diff --git a/.bump2version.cfg b/.bump2version.cfg
deleted file mode 100644
index d3060db996d9a93ff0bed4049e38066b6a4491e9..0000000000000000000000000000000000000000
--- a/.bump2version.cfg
+++ /dev/null
@@ -1,16 +0,0 @@
-[bumpversion]
-current_version = 2.8.2.2
-parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(rc)?(?P<rc>\d+)
-serialize = 
-	{major}.{minor}.{patch}rc{rc}
-	{major}.{minor}.{patch}
-
-[bumpversion:glob:apps/cli/utilities/*/*/__init__.py]
-
-[bumpversion:glob:apps/cli/executables/*/*/__init__.py]
-
-[bumpversion:glob:apps/cli/executables/pexable/*/*/__init__.py]
-
-[bumpversion:glob:services/*/*/__init__.py]
-
-[bumpversion:file:testing/testing/_version.py]
diff --git a/.bumpversion.toml b/.bumpversion.toml
new file mode 100644
index 0000000000000000000000000000000000000000..7bcb6b2b775a03791bb02ed1964c573d68d70538
--- /dev/null
+++ b/.bumpversion.toml
@@ -0,0 +1,75 @@
+# Config for bump-my-version
+# Docs: https://callowayproject.github.io/bump-my-version/
+# NB: Run with `--ignore-missing-version` flag to ignore PyCAPO's version
+[tool.bumpversion]
+current_version = "2.8.2.3rc1"
+parse = """(?x)
+    (?P<major>0|[1-9]\\d*)\\.
+    (?P<minor>0|[1-9]\\d*)\\.
+    (?P<patch>0|[1-9]\\d*)
+    (?:
+        \\.(?P<smallpatch>0|[1-9]\\d*)
+    )?                                  # smallpatch section is optional
+    (?:
+        (?P<pre_l>[a-zA-Z-]+)           # pre-release label
+        (?P<pre_n>0|[1-9]\\d*)            # pre-release version number
+    )?                                  # pre-release section is optional
+"""
+serialize = [
+    "{major}.{minor}.{patch}{pre_l}{pre_n}",
+    "{major}.{minor}.{patch}.{smallpatch}{pre_l}{pre_n}",
+    "{major}.{minor}.{patch}.{smallpatch}",
+    "{major}.{minor}.{patch}",
+]
+allow_dirty = true
+ignore_missing_version = true
+
+[tool.bumpversion.parts.pre_l]
+values = ["dev", "rc", "final"]
+optional_value = "final"
+
+[tool.bumpversion.parts.pre_n]
+first_value = "1"
+
+[[tool.bumpversion.files]]
+glob = "apps/cli/executables/pexable/*/pyproject.toml"
+search = "version = \"{current_version}\""
+replace = "version = \"{new_version}\""
+
+[[tool.bumpversion.files]]
+glob = "apps/cli/executables/pexable/*/*/__init__.py"
+
+[[tool.bumpversion.files]]
+glob = "services/*/pyproject.toml"
+search = "version = \"{current_version}\""
+replace = "version = \"{new_version}\""
+
+[[tool.bumpversion.files]]
+glob = "services/*/*/__init__.py"
+
+[[tool.bumpversion.files]]
+glob = "shared/*/pyproject.toml"
+search = "version = \"{current_version}\""
+replace = "version = \"{new_version}\""
+
+[[tool.bumpversion.files]]
+filename = "shared/workspaces/workspaces/__init__.py"
+
+[[tool.bumpversion.files]]
+glob = "apps/cli/utilities/*/pyproject.toml"
+search = "version = \"{current_version}\""
+replace = "version = \"{new_version}\""
+
+[[tool.bumpversion.files]]
+glob = "apps/cli/utilities/*/*/__init__.py"
+
+[[tool.bumpversion.files]]
+filename = "./.gitlab-ci.yml"
+search = "RELEASE_VERSION: {current_version}"
+replace = "RELEASE_VERSION: {new_version}"
+
+[[tool.bumpversion.files]]
+filename = "services/workflow/gitlab-requirements.txt"
+
+[[tool.bumpversion.files]]
+filename = "apps/cli/executables/go/spelunker/main.go"
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index c3f247756f238da99ba72414e1a0f6dea4c91972..63da2b63e1371191e072af2a2b326023af4cbc37 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -24,7 +24,7 @@ variables:
     POSTGRES_DB: archive
     POSTGRES_USER: "archive"
     POSTGRES_PASSWORD: "docker"
-    RELEASE_VERSION: 2.8.2.2
+    RELEASE_VERSION: 2.8.2.3rc1
 
 image: docker:19.03.12
 
diff --git a/Makefile b/Makefile
index ebbbecb307e2dab04430e2c6c498e8cfa5853238..13bb9325b2ebe99769a53d075136a9e908b4a63f 100644
--- a/Makefile
+++ b/Makefile
@@ -97,3 +97,37 @@ build base images:
     docker build -t workflow:local -f services/workflow/Dockerfile . --build-arg ENV=local --target dev
     docker build -t workflow:local -f services/workflow/Dockerfile . --build-arg ENV=local --target dev
     docker build -t workflow:local -f services/workflow/Dockerfile . --build-arg ENV=local --target dev
+
+
+poetrylockall: poetry-lock-pexes poetry-lock-utilities poetry-lock-shared poetry-lock-services
+
+poetry-lock-pexes:
+	 poetry lock --no-update -C apps/cli/executables/pexable/carta_envoy
+	 poetry lock --no-update -C apps/cli/executables/pexable/casa_envoy
+	 poetry lock --no-update -C apps/cli/executables/pexable/conveyor
+	 poetry lock --no-update -C apps/cli/executables/pexable/data_annotator
+	 poetry lock --no-update -C apps/cli/executables/pexable/deliver
+	 poetry lock --no-update -C apps/cli/executables/pexable/ingest_envoy
+	 poetry lock --no-update -C apps/cli/executables/pexable/mediator
+	 poetry lock --no-update -C apps/cli/executables/pexable/null
+	 poetry lock --no-update -C apps/cli/executables/pexable/productfetcher
+	 poetry lock --no-update -C apps/cli/executables/pexable/update_stage
+	 poetry lock --no-update -C apps/cli/executables/pexable/vela
+	 poetry lock --no-update -C apps/cli/executables/pexable/wf_inspector
+	 poetry lock --no-update -C apps/cli/executables/pexable/ws_annihilator
+	 poetry lock --no-update -C apps/cli/executables/pexable/ws_metrics
+
+poetry-lock-utilities:
+	 poetry lock --no-update -C apps/cli/utilities/aat_wrest
+	 poetry lock --no-update -C apps/cli/utilities/contacts_wrest
+	 poetry lock --no-update -C apps/cli/utilities/core_sampler
+	 poetry lock --no-update -C apps/cli/utilities/wf_monitor
+
+poetry-lock-shared:
+	 poetry lock --no-update -C shared/messaging
+	 poetry lock --no-update -C shared/workspaces
+
+poetry-lock-services:
+	 poetry lock --no-update -C services/capability
+	 poetry lock --no-update -C services/notification
+	 poetry lock --no-update -C services/workflow
diff --git a/apps/cli/executables/go/gmva_ingester/main.go b/apps/cli/executables/go/gmva_ingester/main.go
index f8e1cf250ca5c98672e189d74b7ac6d745163ed7..930ff0f2e8604d40df9aaa683c8c46e246c68118 100644
--- a/apps/cli/executables/go/gmva_ingester/main.go
+++ b/apps/cli/executables/go/gmva_ingester/main.go
@@ -19,23 +19,23 @@
 package main
 
 import (
-    "flag"
-    "log"
-    "os"
-    "ssa/gmva_ingester/pkg/copy"
+	"flag"
+	"log"
+	"os"
+	"ssa/gmva_ingester/pkg/copy"
 )
 
-func main()  {
-    var gmvaDir string
+func main() {
+	var gmvaDir string
 
-    flag.StringVar(&gmvaDir, "dir", "", "Name of the GMVA observation directory (just the name, not the full path) to be ingested")
-    flag.Parse()
+	flag.StringVar(&gmvaDir, "dir", "", "Name of the GMVA observation directory (just the name, not the full path) to be ingested")
+	flag.Parse()
 
-    if gmvaDir == "" {
-        log.Println("GMVA directory argument required!")
-        flag.Usage()
-        os.Exit(1)
-    }
+	if gmvaDir == "" {
+		log.Println("GMVA directory argument required!")
+		flag.Usage()
+		os.Exit(1)
+	}
 
-    copy.CopyGmva(gmvaDir)
+	copy.CopyGmva(gmvaDir)
 }
diff --git a/apps/cli/executables/go/spelunker/main.go b/apps/cli/executables/go/spelunker/main.go
index 4a3a059c0e8d8a1a03dc62f71684981f1c7a30f6..e30732cf0f58de98a9269585b81b920cfb06becd 100644
--- a/apps/cli/executables/go/spelunker/main.go
+++ b/apps/cli/executables/go/spelunker/main.go
@@ -46,7 +46,7 @@ func main() {
 
 	if *v {
 		//TODO: fix version system for containers
-		log.Println("2.8.2.2")
+		log.Println("2.8.2.3rc1")
 		os.Exit(0)
 	}
 
diff --git a/apps/cli/executables/pexable/carta_envoy/carta_envoy/__init__.py b/apps/cli/executables/pexable/carta_envoy/carta_envoy/__init__.py
index 0e7f759cdccaa9945558493c54f7ed0cfa78c2c9..3f2d9c4ae37fdca3d9990d66d048e84a9f1eb03c 100644
--- a/apps/cli/executables/pexable/carta_envoy/carta_envoy/__init__.py
+++ b/apps/cli/executables/pexable/carta_envoy/carta_envoy/__init__.py
@@ -18,4 +18,4 @@
 """
 Workspaces system for launching CARTA for viewing images
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/executables/pexable/carta_envoy/pyproject.toml b/apps/cli/executables/pexable/carta_envoy/pyproject.toml
index 278c269cf0d562c4198b9bfdbd0aed0438380232..d690e98478def575a84feb6fbcd141071b2567d0 100644
--- a/apps/cli/executables/pexable/carta_envoy/pyproject.toml
+++ b/apps/cli/executables/pexable/carta_envoy/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "carta_envoy"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Workspaces system for launching CARTA for viewing images"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/executables/pexable/casa_envoy/casa_envoy/__init__.py b/apps/cli/executables/pexable/casa_envoy/casa_envoy/__init__.py
index 086f33d878bb35b29a08f2e1a351b25a78e87b99..f598851ae8f0b4cd7348668e43bfb899af241df0 100644
--- a/apps/cli/executables/pexable/casa_envoy/casa_envoy/__init__.py
+++ b/apps/cli/executables/pexable/casa_envoy/casa_envoy/__init__.py
@@ -18,4 +18,4 @@
 """
 Workspaces CASA functionality bridge
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/executables/pexable/casa_envoy/poetry.lock b/apps/cli/executables/pexable/casa_envoy/poetry.lock
index 8483b3b0111e4897d43ad9380d8b959a1615dda1..f77c0b0380c9e50c3a40f54a16f75232b788a7c4 100644
--- a/apps/cli/executables/pexable/casa_envoy/poetry.lock
+++ b/apps/cli/executables/pexable/casa_envoy/poetry.lock
@@ -186,6 +186,17 @@ files = [
 dev = ["pre-commit", "tox"]
 testing = ["pytest", "pytest-benchmark"]
 
+[[package]]
+name = "prettierfier"
+version = "1.0.3"
+description = "Intelligently pretty-print HTML/XML with inline tags."
+optional = false
+python-versions = "*"
+files = [
+    {file = "prettierfier-1.0.3-py3-none-any.whl", hash = "sha256:5dd22b1141b333c02df87b81062d18c5194e811675fa1c656627f39494431a3d"},
+    {file = "prettierfier-1.0.3.tar.gz", hash = "sha256:15a5b3b46776bb6173c447f2b33bf5002d2d6d219edccd9402ea64fff9c25f68"},
+]
+
 [[package]]
 name = "pycapo"
 version = "0.3.1"
@@ -244,4 +255,4 @@ files = [
 [metadata]
 lock-version = "2.0"
 python-versions = "~3.10"
-content-hash = "816ee5a91f20c0e568de71318a117b8609af5a9cce7fc21f62ffd038db51e9a6"
+content-hash = "ea1f80f2a0398c4c23d5ef3f2530ea19a5da8febdfe12ab0ed054c09043a0354"
diff --git a/apps/cli/executables/pexable/casa_envoy/pyproject.toml b/apps/cli/executables/pexable/casa_envoy/pyproject.toml
index 9095066fff9ed8494d6246263e9626d7d2faa3d3..0393fa2c553183ffe5be60eae215ba5a92b06743 100644
--- a/apps/cli/executables/pexable/casa_envoy/pyproject.toml
+++ b/apps/cli/executables/pexable/casa_envoy/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "casa_envoy"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Workspaces CASA functionality bridge"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
@@ -11,6 +11,7 @@ python = "~3.10"
 pycapo = "0.3.1"
 beautifulsoup4 = "4.12.2"
 lxml = "4.9.2"
+prettierfier = "1.0.3"
 
 [tool.poetry.group.dev.dependencies]
 pex = "2.1.119"
diff --git a/apps/cli/executables/pexable/conveyor/conveyor/__init__.py b/apps/cli/executables/pexable/conveyor/conveyor/__init__.py
index a0e164b7e5cfbf788904bfde27b4e542ecf3af30..57aed3577af990baa9cbfc59dfea1870c0b2cc83 100644
--- a/apps/cli/executables/pexable/conveyor/conveyor/__init__.py
+++ b/apps/cli/executables/pexable/conveyor/conveyor/__init__.py
@@ -18,4 +18,4 @@
 """
 Conveyor
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/executables/pexable/conveyor/pyproject.toml b/apps/cli/executables/pexable/conveyor/pyproject.toml
index 81dfc767b26af84f8c7f210931f000b1a3e52e55..588b766589e0953dad12cbf44b9f915bb4bec74a 100644
--- a/apps/cli/executables/pexable/conveyor/pyproject.toml
+++ b/apps/cli/executables/pexable/conveyor/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "conveyor"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Conveyor"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/executables/pexable/data_annotator/data_annotator/__init__.py b/apps/cli/executables/pexable/data_annotator/data_annotator/__init__.py
index cf663b45ed9c1859df58af2820828b0da036f8d6..0f2d42562a9caac9ddc426344cf8929d8478e0a4 100644
--- a/apps/cli/executables/pexable/data_annotator/data_annotator/__init__.py
+++ b/apps/cli/executables/pexable/data_annotator/data_annotator/__init__.py
@@ -18,4 +18,4 @@
 """
 Workspaces metrics reporter for users outside of SSA.
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/executables/pexable/data_annotator/pyproject.toml b/apps/cli/executables/pexable/data_annotator/pyproject.toml
index 7e30f8ffd44ca73f948481c5471a394a8f739656..6a53d46e05028cd93b0c38064b8f012dae8e9a61 100644
--- a/apps/cli/executables/pexable/data_annotator/pyproject.toml
+++ b/apps/cli/executables/pexable/data_annotator/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "data_annotator"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Workspaces CLI tool for creating and deleting comments associated with data quality."
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/executables/pexable/deliver/delivery/__init__.py b/apps/cli/executables/pexable/deliver/delivery/__init__.py
index 027786cbc3e530233a765540c4b969357dd93a1b..9af98d6d8d0d63e7b378165aa8da3ccd4092f240 100644
--- a/apps/cli/executables/pexable/deliver/delivery/__init__.py
+++ b/apps/cli/executables/pexable/deliver/delivery/__init__.py
@@ -18,4 +18,4 @@
 """
 Workspaces data delivery module
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/executables/pexable/deliver/pyproject.toml b/apps/cli/executables/pexable/deliver/pyproject.toml
index 8284270646fabe3620073c879252424f43b373fe..82c8dcc857e253ce0883f9cb1515ad6744ce9b85 100644
--- a/apps/cli/executables/pexable/deliver/pyproject.toml
+++ b/apps/cli/executables/pexable/deliver/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "ssa_deliver"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Workspaces data delivery module"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/__init__.py b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/__init__.py
index 497b2a52b32e764d62941e50c317ebfe407603fa..d27e1980979a73905a89ad32939829c4c8622ec5 100644
--- a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/__init__.py
+++ b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/__init__.py
@@ -18,4 +18,4 @@
 """
 Ingest envoy
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/ingest.py b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/ingest.py
index 4b3ad0beb15ea9a7a2d63917e4ff934d4537fbd1..15ac130cc3b33b5181563a17895689391d1b00ad 100644
--- a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/ingest.py
+++ b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/ingest.py
@@ -77,7 +77,6 @@ def _get_settings(
     parameters["useIngest"] = strtobool(ingestion_settings.useIngest)
     parameters["script_location"] = script_location
     parameters["workflowUrl"] = workflow_url
-
     return parameters
 
 
diff --git a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/ingestion_manifest.py b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/ingestion_manifest.py
index 6ff299450f61ea2df194d3e46e252ef87f0dfd73..7c4639f2d3a529b48d2dffd9af5f86c09110c029 100644
--- a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/ingestion_manifest.py
+++ b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/ingestion_manifest.py
@@ -24,10 +24,11 @@ from pathlib import Path
 from pycapo import CapoConfig
 
 # pylint: disable=C0301, E0401, R0903, R1721
-from typing import Tuple, List
+from typing import List, Optional, Tuple
 
 import arrow
 from ingest_envoy.manifest_components import (
+    CURATOR_MANIFEST_FILENAME,
     INGESTION_ARTIFACTS_NAME,
     INIT_WEBLOG_FILENAME,
     JSON,
@@ -43,20 +44,19 @@ from ingest_envoy.manifest_components import (
     OutputGroup,
     OutputScienceProduct,
     ReingestGroup,
-    CURATOR_MANIFEST_FILENAME,
 )
 from ingest_envoy.schema import AbstractTextFile
 from ingest_envoy.std_img_manifest_utils import ImageIngestionProductsFinder
 from ingest_envoy.std_obs_manifest_utils import ObservationIngestionProductsFinder
 from ingest_envoy.utilities import (
     AncillaryProductType,
+    CuratorType,
     IngestionManifestException,
     NoScienceProductException,
     ScienceProductType,
     Telescope,
     find_output_tars,
     find_weblogs,
-    CuratorType,
 )
 
 logger = logging.getLogger(__name__)
@@ -82,8 +82,11 @@ class IngestionManifest(ManifestComponentIF):
         # every non-curator manifest has at least one output group
         output_group: OutputGroup | None,
         # image manifest has this:
-        additional_metadata: AbstractTextFile = None,
-        filename: str = None,
+        additional_metadata: Optional[AbstractTextFile] = None,
+        filename: Optional[str] = None,
+        destination_dir: Path | None = None,
+        # Alternative to staging_source_dir as a way of getting files
+        file_list: Optional[list[str]] = None,
     ):
         self.staging_source_dir = staging_source_dir
         self.sp_type = sp_type
@@ -93,44 +96,19 @@ class IngestionManifest(ManifestComponentIF):
         self.output_group = output_group
         self.telescope = telescope
         self.filename = filename
+        self.destination_dir = destination_dir
+        if destination_dir is None:
+            self.destination_dir = self.staging_source_dir
 
         # Check if NGAS ingestion should be enabled for all manifests in this environment
         self.ngas_ingest = self.get_ngas_flag()
 
         self.parameters = self.build_ingest_parameters(additional_metadata)
-        if staging_source_dir is not None:
+        if file_list:
+            self.files_found = file_list
+        elif staging_source_dir is not None:
             # we are not running curation, find files
-            self.files_found = [file for file in self.staging_source_dir.iterdir()]
-
-    def get_ngas_flag(self) -> bool:
-        """Determine and retrieve the correct ingestNGAS flag for this type of ingestion/curation"""
-
-        match self.sp_type:
-            case ScienceProductType.EXEC_BLOCK:
-                flag = CapoConfig().getboolean("archive-ingestion.ingestNGAS.observation")
-            case ScienceProductType.EVLA_CAL:
-                flag = CapoConfig().getboolean("archive-ingestion.ingestNGAS.calibration")
-            case ScienceProductType.IMAGE | ScienceProductType.VLASS_SECI:
-                flag = CapoConfig().getboolean("archive-ingestion.ingestNGAS.imaging")
-            case _:
-                flag = True
-
-        return flag
-
-    def get_ngas_flag(self) -> bool:
-        """Determine and retrieve the correct ingestNGAS flag for this type of ingestion/curation"""
-
-        match self.sp_type:
-            case ScienceProductType.EXEC_BLOCK:
-                flag = CapoConfig().getboolean("archive-ingestion.ingestNGAS.observation")
-            case ScienceProductType.EVLA_CAL:
-                flag = CapoConfig().getboolean("archive-ingestion.ingestNGAS.calibration")
-            case ScienceProductType.IMAGE | ScienceProductType.VLASS_SECI:
-                flag = CapoConfig().getboolean("archive-ingestion.ingestNGAS.imaging")
-            case _:
-                flag = True
-
-        return flag
+            self.files_found = [file for file in staging_source_dir.iterdir()]
 
     def get_ngas_flag(self) -> bool:
         """Determine and retrieve the correct ingestNGAS flag for this type of ingestion/curation"""
@@ -147,7 +125,7 @@ class IngestionManifest(ManifestComponentIF):
 
         return flag
 
-    def build_ingest_parameters(self, additional_metadata: AbstractTextFile):
+    def build_ingest_parameters(self, additional_metadata: Optional[AbstractTextFile]):
         """
         Make the "parameters" section of the manifest
 
@@ -160,38 +138,15 @@ class IngestionManifest(ManifestComponentIF):
             ScienceProductType.EXEC_BLOCK,
         ]:
             raise NotImplementedError()
+        params = ManifestParameters(
+            telescope=self.telescope,
+            ngas_ingest=self.ngas_ingest,
+            staging_source_dir=self.staging_source_dir,
+            additional_metadata=additional_metadata,
+        )
 
         if self.reingest is not None:
-            params = ManifestParameters(
-                telescope=self.telescope,
-                ngas_ingest=False,
-                staging_source_dir=self.staging_source_dir,
-            )
-
-        elif additional_metadata:
-            params = ManifestParameters(
-                telescope=self.telescope,
-                ngas_ingest=self.ngas_ingest,
-                calibrate=False,
-                staging_source_dir=self.staging_source_dir,
-                additional_metadata=additional_metadata,
-            )
-
-        elif self.sp_type == ScienceProductType.IMAGE:
-            params = ManifestParameters(
-                telescope=self.telescope,
-                ngas_ingest=self.ngas_ingest,
-                calibrate=False,
-                staging_source_dir=self.staging_source_dir,
-            )
-
-        else:
-            params = ManifestParameters(
-                telescope=self.telescope,
-                ngas_ingest=self.ngas_ingest,
-                staging_source_dir=self.staging_source_dir,
-            )
-
+            params.ngas_ingest = False
         return params
 
     def __eq__(self, other):
@@ -220,9 +175,9 @@ class IngestionManifest(ManifestComponentIF):
         """
         me_dict = self.to_dict()
         if self.reingest is not None:
-            output_path = Path.cwd() / CURATOR_MANIFEST_FILENAME
+            output_path = self.destination_dir / CURATOR_MANIFEST_FILENAME
         else:
-            output_path = self.staging_source_dir / MANIFEST_FILENAME
+            output_path = self.destination_dir / MANIFEST_FILENAME
 
         to_write = json.dumps(me_dict, indent=4)
         with open(output_path, "w") as out:
@@ -246,7 +201,7 @@ class IngestionManifest(ManifestComponentIF):
         if IngestionManifestKey.REINGEST.value in me_dict and me_dict[IngestionManifestKey.REINGEST.value] is not None:
             to_return[IngestionManifestKey.REINGEST.value] = me_dict[IngestionManifestKey.REINGEST.value].to_dict()
 
-        # curator manifests have no output groups
+        # partial-curation manifests have no output groups
         if (
             IngestionManifestKey.OUTPUT_GROUP.value in me_dict
             and me_dict[IngestionManifestKey.OUTPUT_GROUP.value] is not None
@@ -273,13 +228,20 @@ class IngestionManifestBuilder:
 
     def __init__(
         self,
-        staging_source_dir: Path | None,  # partial curation has no source path
+        manifest_source_dir: Path | None,  # partial curation has no source path
         sp_type: str,
-        locator: str | None,  # initial observation ingestion has no locator
         telescope: str,
-        additional_metadata: AbstractTextFile = None,
-        filename: str = None,
-        curate: (CuratorType, str, List[str]) = None,
+        locator: Optional[str] = None,  # initial observation ingestion, and full curation, have no locator
+        product_group: Optional[int] = None,  # used for full curation
+        additional_metadata: Optional[AbstractTextFile] = None,
+        filename: Optional[str] = None,
+        # (curator_type, target_list)
+        curate: Optional[tuple[CuratorType, Optional[List[str]]]] = None,
+        # If None, self.manifest_destination_dir = manifest_source_dir
+        manifest_destination_dir: Optional[Path] = None,
+        # alternative to staging_source_dir and curation_source for specifying source files
+        file_list: Optional[list[str]] = None,
+        input_group_locator: Optional[str] = None,
     ):
         # get the telescope
         self.telescope = Telescope(telescope)
@@ -287,7 +249,7 @@ class IngestionManifestBuilder:
             raise ValueError("telescope is required.")
 
         # get the directory containing the files to be ingested
-        self.staging_source_dir = staging_source_dir
+        self.manifest_source_dir = manifest_source_dir
 
         # get the additional metadata, if any (EVLA CAL ingestion will have none)
         self.additional_metadata = additional_metadata
@@ -308,25 +270,43 @@ class IngestionManifestBuilder:
             raise NotImplementedError(f"Don't know yet how to build a {self.sp_type.value} manifest")
 
         if self.curation_type is not None:
-            if self.curation_type not in [CuratorType.PARTIAL]:
+            if self.curation_type not in [CuratorType.PARTIAL, CuratorType.FULL]:
                 raise NotImplementedError(
                     f"Don't know how to build a {self.curation_type.value} curation {self.sp_type} manifest"
                 )
-            self.curation_source = Path(curate[1]) if curate and curate[1] else None
-            self.target_list = curate[2]
+            self.target_list = curate[1]
+            if self.curation_type == CuratorType.FULL and self.target_list:
+                raise IngestionManifestException(
+                    f"Expected targets=None for full curation, got targets={self.target_list}"
+                )
 
-        if locator is not None:
-            # we are not running observation ingestion, use a locator
-            self.locator = locator
+        # we are not running observation ingestion, use a locator
+        self.locator = locator
+        self.product_group = product_group
+
+        self.input_group_locator = input_group_locator
+        if self.input_group_locator is None:
+            self.input_group_locator = locator
+
+        # directory to put the manifest in
+        self.manifest_destination_dir = manifest_destination_dir
+        if self.manifest_destination_dir is None:
+            self.manifest_destination_dir = manifest_source_dir
+        if self.manifest_destination_dir is None:
+            raise IngestionManifestException(
+                f"IngestionManifestBuilder: Expected a directory to house the manifest, found instead staging_source_dir={self.manifest_source_dir}, manifest_destination_dir={self.manifest_destination_dir}"
+            )
 
-        if staging_source_dir is not None:
-            # we are not running curation, look for files
-            self.files_found = [file for file in staging_source_dir.iterdir()]
+        if file_list:
+            self.files_found = [Path(file) for file in file_list]
+        elif self.manifest_source_dir is not None:
+            # we are not running partial curation, look for files
+            self.files_found = [file for file in self.manifest_source_dir.iterdir()]
             if len(self.files_found) == 0:
-                raise IngestionManifestException(f"No ingestion files found at {staging_source_dir}")
+                raise IngestionManifestException(f"No ingestion files found at {manifest_source_dir}")
+        # Partial curation doesn't require source files, it can fetch them from productfetcher via their SPLs
 
-        if filename is not None:
-            self.filename = filename
+        self.filename = filename
 
     def build(self) -> Tuple[IngestionManifest, Path]:
         """
@@ -336,30 +316,31 @@ class IngestionManifestBuilder:
         :return: the ingestion manifest and the file containing its JSON
         """
         if self.curation_type == CuratorType.PARTIAL:
-            return self._build_curation_manifest()
+            return self._build_partial_curation_manifest()
 
         if self.sp_type == ScienceProductType.EVLA_CAL:
             return self._build_evla_cal_manifest()
         elif self.sp_type == ScienceProductType.EXEC_BLOCK:
-            return self._build_observation_manifest()
+            return self._build_observation_manifest(self.curation_type == CuratorType.FULL)
 
-        return self._build_image_manifest()
+        return self._build_image_manifest(self.curation_type == CuratorType.FULL)
 
-    def _build_curation_manifest(self) -> (IngestionManifest, Path):
+    def _build_partial_curation_manifest(self) -> (IngestionManifest, Path):
         """
-        Build a manifest for curator
+        Build a manifest for partial curator
         Partial curation is simple, only parameters and reingest groups are needed
 
         :return: the manifest file object and path
         """
         manifest = IngestionManifest(
             telescope=self.telescope,
-            staging_source_dir=self.curation_source,
+            staging_source_dir=self.manifest_source_dir,
             locator=self.locator,
             sp_type=self.sp_type,
             reingest_group=self._build_reingest_group(),
             input_group=None,
             output_group=None,
+            destination_dir=self.manifest_destination_dir,
         )
 
         manifest_file = manifest.write()
@@ -372,17 +353,17 @@ class IngestionManifestBuilder:
             telescope=self.telescope,
             locator=self.locator,
             sp_type=self.sp_type,
-            staging_source_dir=self.staging_source_dir,
+            staging_source_dir=self.manifest_source_dir,
             reingest_group=None,
             input_group=self._build_input_group(),
             output_group=self._build_evla_cal_output_group(),
+            destination_dir=self.manifest_destination_dir,
         )
 
         # We can't create the ingestion artifacts tar quite yet,
         # because it will contain the yet-to-be-written manifest itself
         # (required for ingestion, evidently)
-        artifacts_filename = self._build_artifacts_filename()
-        artifacts_ap = AncillaryProduct(AncillaryProductType.INGESTION_ARTIFACTS, filename=artifacts_filename)
+        artifacts_ap = self._build_artifacts_product()
         if artifacts_ap not in manifest.output_group.ancillary_products:
             manifest.output_group.ancillary_products.append(artifacts_ap)
 
@@ -393,35 +374,35 @@ class IngestionManifestBuilder:
             manifest.output_group.ancillary_products.append(weblog_ap)
 
         manifest_file = manifest.write()
-        artifacts_file = self.staging_source_dir / artifacts_filename
-        self.write_ingestion_artifacts_tar(artifacts_file)
+        self.write_ingestion_artifacts_tar(self.manifest_destination_dir / artifacts_ap.filename)
 
         return manifest, manifest_file
 
-    def _build_observation_manifest(self) -> (IngestionManifest, Path):
+    def _build_observation_manifest(self, is_full_curation: bool = False) -> tuple[IngestionManifest, Path]:
+        reingest_group = None
+        if is_full_curation:
+            reingest_group = self._build_reingest_group()
         # create the manifest
         manifest = IngestionManifest(
             telescope=self.telescope,
             locator=None,
             sp_type=self.sp_type,
-            staging_source_dir=self.staging_source_dir,
-            reingest_group=None,
+            staging_source_dir=self.manifest_source_dir,
+            reingest_group=reingest_group,
             input_group=InputGroup([]),
             output_group=self._build_observation_output_group(),
             filename=self.filename,
+            destination_dir=self.manifest_destination_dir,
         )
 
-        artifacts_filename = self._build_artifacts_filename()
-        artifacts_ap = AncillaryProduct(AncillaryProductType.INGESTION_ARTIFACTS, filename=artifacts_filename)
-        if artifacts_ap not in manifest.output_group.ancillary_products:
-            manifest.output_group.ancillary_products.append(artifacts_ap)
-
         if not manifest.output_group.ancillary_products:
             manifest.output_group.ancillary_products = []
+        artifacts_ap = self._build_artifacts_product()
+        if artifacts_ap not in manifest.output_group.ancillary_products:
+            manifest.output_group.ancillary_products.append(artifacts_ap)
 
         manifest_file = manifest.write()
-        artifacts_file = self.staging_source_dir / artifacts_filename
-        self.write_ingestion_artifacts_tar(artifacts_file)
+        self.write_ingestion_artifacts_tar(self.manifest_destination_dir / artifacts_ap.filename)
 
         return manifest, manifest_file
 
@@ -432,36 +413,46 @@ class IngestionManifestBuilder:
 
         :return: initial weblog, if any
         """
-        for file in [file for file in self.staging_source_dir.iterdir()]:
+        for file in [file for file in self.files_found]:
             if file.name == INIT_WEBLOG_FILENAME:
                 return file
 
         return None
 
-    def _build_image_manifest(self) -> (IngestionManifest, Path):
+    def _build_image_manifest(self, is_full_curation: bool = False) -> tuple[IngestionManifest, Path]:
         """
         Image manifest has additional_metadata, and output group is way more complicated
         :return:
         """
-
+        reingest_group = None
+        if is_full_curation:
+            reingest_group = self._build_reingest_group()
         # create the manifest
         manifest = IngestionManifest(
             telescope=self.telescope,
             locator=self.locator,
             additional_metadata=self.additional_metadata,
             sp_type=self.sp_type,
-            staging_source_dir=self.staging_source_dir,
-            reingest_group=None,
+            staging_source_dir=self.manifest_source_dir,
+            reingest_group=reingest_group,
             input_group=self._build_input_group(),
             output_group=self._build_imaging_output_group(),
+            destination_dir=self.manifest_destination_dir,
         )
 
-        artifacts_file = self.staging_source_dir / self._build_artifacts_filename()
-        artifacts_ap = AncillaryProduct(type=AncillaryProductType.INGESTION_ARTIFACTS, filename=artifacts_file.name)
+        if is_full_curation:
+            # find the existing artifacts tar file name
+            artifacts_ap = self._find_existing_record(self.files_found)
+        else:
+            # build new artifacts tar
+            artifacts_ap = self._build_artifacts_product()
+
         if artifacts_ap not in manifest.output_group.ancillary_products:
             manifest.output_group.ancillary_products.append(artifacts_ap)
         manifest_file = manifest.write()
-        self.write_ingestion_artifacts_tar(artifacts_file)
+        if not is_full_curation:
+            # we're running initial ingestion, make a new artifacts tar
+            self.write_ingestion_artifacts_tar(self.manifest_destination_dir / artifacts_ap.filename)
 
         return manifest, manifest_file
 
@@ -472,9 +463,10 @@ class IngestionManifestBuilder:
         :return:
         """
         return ReingestGroup(
-            locator=self.locator,
+            locator=self.product_group if self.product_group else self.locator,
             product_type=self.sp_type,
             targets=self.target_list,
+            # product_group=self.product_group,
         )
 
     def _build_input_group(self) -> InputGroup:
@@ -487,7 +479,7 @@ class IngestionManifestBuilder:
         # N.B. this is sufficient for most types of ingestion,
         # but ALMA CALs will have multiple EB SPs, identified only by locator,
         # and VLBAs have no input group at all.
-        sp_in = InputScienceProduct(locator=self.locator)
+        sp_in = InputScienceProduct(locator=self.input_group_locator)
 
         return InputGroup([sp_in])
 
@@ -500,22 +492,22 @@ class IngestionManifestBuilder:
         """
 
         # find science product (we expect just one for this SP type)
-        tars_found = find_output_tars(self.files_found, self.staging_source_dir)
+        tars_found = find_output_tars(self.files_found, self.manifest_source_dir)
 
         sci_prod = None
         for file in tars_found:
-            sci_prod = OutputScienceProduct(product_type=self.sp_type, filename=file.name)
+            sci_prod = OutputScienceProduct(type=self.sp_type, filename=file.name)
             break
 
         if sci_prod:
-            weblog_files = find_weblogs(self.files_found, self.staging_source_dir)
+            weblog_files = find_weblogs(self.files_found, self.manifest_source_dir)
             weblogs = []
             for file in weblog_files:
                 ap = AncillaryProduct(type=AncillaryProductType.PIPELINE_WEBLOG, filename=file.name)
                 if ap not in weblogs:
                     weblogs.append(ap)
         else:
-            raise NoScienceProductException(f">>> NO SCIENCE PRODUCT FOUND in {self.staging_source_dir}")
+            raise NoScienceProductException(f">>> NO SCIENCE PRODUCT FOUND in {self.manifest_source_dir}")
 
         return OutputGroup(science_products=[sci_prod], ancillary_products=weblogs)
 
@@ -527,7 +519,7 @@ class IngestionManifestBuilder:
         :return:
         """
 
-        products_finder = ImageIngestionProductsFinder(self.staging_source_dir, self.sp_type)
+        products_finder = ImageIngestionProductsFinder(self.files_found, self.sp_type)
         science_products = products_finder.output_science_products
         ancillary_products = products_finder.ancillary_products
 
@@ -541,7 +533,7 @@ class IngestionManifestBuilder:
         :return:
         """
 
-        products_finder = ObservationIngestionProductsFinder(self.staging_source_dir, self.sp_type)
+        products_finder = ObservationIngestionProductsFinder(self.files_found, self.sp_type)
         science_products = products_finder.output_science_products
         ancillary_products = products_finder.ancillary_products
 
@@ -558,20 +550,39 @@ class IngestionManifestBuilder:
         timestamp = format_timestamp(current_time)
         return f"{INGESTION_ARTIFACTS_NAME}{timestamp}{TARFILE_EXT}"
 
+    @staticmethod
+    def _build_artifacts_product() -> AncillaryProduct:
+        return AncillaryProduct(
+            AncillaryProductType.INGESTION_ARTIFACTS, IngestionManifestBuilder._build_artifacts_filename()
+        )
+
+    @staticmethod
+    def _find_existing_record(available_files: list) -> AncillaryProduct:
+        """
+        Find an existing artifacts tar for curation
+
+        :param available_files: existing files list
+        :return: Ancillary Product of existing tar
+        """
+        for file in available_files:
+            if INGESTION_ARTIFACTS_NAME in file.name:
+                return AncillaryProduct(AncillaryProductType.INGESTION_ARTIFACTS, file.name)
+
     def write_ingestion_artifacts_tar(self, artifacts_path: Path) -> tarfile.TarFile:
         """
-        Take the list of files and build a tar for inclusion into the archive.
+        Take the list of files and write a tar file for inclusion in the archive.
         This happens in the staging area for ingestion.
         The EVLA CAL tar will contain just the manifest.
 
-        :return: a .tar archive of the ingestion artifacts
+        :param artifacts_path: Path to create the resulting tar file at
+        :return: tar file of the ingestion artifacts
         """
 
         addl_md_file = None
         if self.additional_metadata:
             # find the additional metadata
             addl_md_filename = self.additional_metadata.filename
-            addl_md_file = self.staging_source_dir / addl_md_filename
+            addl_md_file = self.manifest_destination_dir / addl_md_filename
 
         with tarfile.open(artifacts_path, "w") as ingestion_artifacts_tar:
             if addl_md_file:
@@ -579,7 +590,13 @@ class IngestionManifestBuilder:
 
             # The manifest file itself is considered an ingestion artifact.
             # (It's turtles all the way down.)
-            manifest_file = self.staging_source_dir / MANIFEST_FILENAME
+            manifest_file = self.manifest_destination_dir / MANIFEST_FILENAME
+            if not manifest_file.exists():
+                manifest_file = self.manifest_destination_dir / CURATOR_MANIFEST_FILENAME
+            if not manifest_file.exists():
+                raise FileNotFoundError(
+                    f"No manifest (i.e. {MANIFEST_FILENAME} or {CURATOR_MANIFEST_FILENAME}) found in {self.manifest_destination_dir}"
+                )
             ingestion_artifacts_tar.add(manifest_file)
 
         return ingestion_artifacts_tar
diff --git a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/launchers.py b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/launchers.py
index 774e732de73909d4123d8530f7c7125013067e1a..973c6a7b9381fc65785fd2d45197a65460087840 100644
--- a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/launchers.py
+++ b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/launchers.py
@@ -23,14 +23,14 @@ from typing import Union
 
 from ingest_envoy.collectors import (
     ImageCollector,
-    SECICollector,
     ObservationCollector,
+    SECICollector,
     collect_image_metadata,
 )
 from ingest_envoy.ingestion_manifest import IngestionManifestBuilder
 from ingest_envoy.interfaces import LauncherIF
 from ingest_envoy.schema import AbstractTextFile
-from ingest_envoy.utilities import IngestType, VLASSIngestType, CuratorType
+from ingest_envoy.utilities import CuratorType, IngestType, VLASSIngestType
 
 
 def trigger_ingest(real_ingest: bool, staging_dir: str, bin_dir: str = ".") -> int:
@@ -54,19 +54,19 @@ def trigger_ingest(real_ingest: bool, staging_dir: str, bin_dir: str = ".") -> i
         return 0
 
 
-def trigger_curator(real_ingest: bool, bin_dir: str = ".") -> int:
+def trigger_curator(real_ingest: bool, manifest_dir: Path, bin_dir: str = ".") -> int:
     """
     Run curator
 
     :param real_ingest: real curation or testing only?
+    :param manifest_dir: The directory containing the manifest
     :param bin_dir: directory containing the curator utility
     :return: return code
     """
 
     if real_ingest:
-        # manifest location is known, no need for paths
         curator_process = subprocess.run(
-            [f"{bin_dir}/curator", "-m", f"{Path.cwd() / 'manifest.json'}"],
+            [f"{bin_dir}/curator", "-m", f"{manifest_dir / 'manifest.json'}"],
             stdout=sys.stdout,
             stderr=sys.stderr,
         )
@@ -161,7 +161,12 @@ class IngestCalibrationLauncher(LauncherIF):
         spl = self.parameters["spl"]
         telescope = self.parameters["telescope"]
 
-        IngestionManifestBuilder(Path(self.staging_source_dir), self.sci_product_type, spl, telescope).build()
+        IngestionManifestBuilder(
+            staging_source_dir=Path(self.staging_source_dir),
+            sp_type=self.sci_product_type,
+            locator=spl,
+            telescope=telescope,
+        ).build()
 
 
 class IngestImageLauncher(LauncherIF):
@@ -225,11 +230,11 @@ class IngestImageLauncher(LauncherIF):
         additional_metadata = additional_file
 
         IngestionManifestBuilder(
-            Path(self.staging_source_dir),
-            self.sci_product_type,
-            spl,
-            telescope,
-            additional_metadata,
+            staging_source_dir=Path(self.staging_source_dir),
+            sp_type=self.sci_product_type,
+            locator=spl,
+            telescope=telescope,
+            additional_metadata=additional_metadata,
         ).build()
 
 
@@ -287,10 +292,10 @@ class IngestObservationLauncher(LauncherIF):
         filename = self.parameters["filename"]
 
         IngestionManifestBuilder(
-            Path(self.staging_source_dir),
-            self.sci_product_type,
-            None,
-            telescope,
+            staging_source_dir=Path(self.staging_source_dir),
+            sp_type=self.sci_product_type,
+            locator=None,
+            telescope=telescope,
             filename=filename,
         ).build()
 
@@ -302,6 +307,8 @@ class CuratorLauncher(LauncherIF):
         self.logger = logging.getLogger("ingest_envoy")
         self.curator_type = arg_type
         self.sci_product_type = parameters["product_type"]
+        self.curation_source = parameters["curation_source"]
+        self.manifest_destination_dir = Path.cwd()
         self.parameters = parameters
 
     def prepare_for_launch(self):
@@ -326,13 +333,20 @@ class CuratorLauncher(LauncherIF):
         telescope = self.parameters["telescope"]
         curation_source = self.parameters["curation_source"]
         target_list = self.parameters["target_list"]
+        file_list = self.parameters["file_list"]
+        product_group = self.parameters["product_group_id"]
+        input_group_locator = self.parameters["input_group_locator"]
 
         IngestionManifestBuilder(
-            staging_source_dir=None,
+            manifest_source_dir=curation_source,
             sp_type=self.sci_product_type,
             locator=spl,
             telescope=telescope,
-            curate=(self.curator_type, curation_source, target_list),
+            curate=(self.curator_type, target_list),
+            manifest_destination_dir=self.manifest_destination_dir,
+            file_list=file_list,
+            product_group=product_group,
+            input_group_locator=input_group_locator,
         ).build()
 
     def launch(self) -> int:
@@ -345,4 +359,6 @@ class CuratorLauncher(LauncherIF):
         self.prepare_for_launch()
 
         self.logger.info("Running curator!")
-        return trigger_curator(self.parameters["useIngest"], self.parameters["script_location"])
+        return trigger_curator(
+            self.parameters["useIngest"], self.manifest_destination_dir, self.parameters["script_location"]
+        )
diff --git a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/manifest_components.py b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/manifest_components.py
index 328cbc3a099342896ff4197b8f724758bf6da79d..6710efd3b94506d936f8720c6b34562ea4aa93ac 100644
--- a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/manifest_components.py
+++ b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/manifest_components.py
@@ -19,9 +19,10 @@
 
 import abc
 import re
+from dataclasses import dataclass
 from enum import Enum
 from pathlib import Path
-from typing import Dict, List, Union
+from typing import Dict, List, Optional, Union
 
 from ingest_envoy.schema import AbstractTextFile
 from ingest_envoy.utilities import AncillaryProductType, ScienceProductType, Telescope
@@ -56,7 +57,6 @@ class ParamsKey(Enum):
     TELESCOPE = "telescope"
     REINGEST = "reingest"
     NGAS_INGEST = "ngas_ingest"
-    CALIBRATE = "calibrate"
     INGESTION_PATH = "ingestion_path"
     ADDITIONAL_METADATA = "additional_metadata"
     COLLECTION_METADATA = "additional_metadata"  # needed for VLASS, realfast, elwa, alfalfa. Not yet implemented
@@ -68,6 +68,7 @@ class ReingestKey(Enum):
     TARGETS = "targets"
     LOCATOR = "locator"
     TYPE = "type"
+    PRODUCT_GROUP = "product_group"
 
 
 class ManifestComponentIF(abc.ABC):
@@ -86,15 +87,11 @@ class ManifestComponentIF(abc.ABC):
         """
 
 
+@dataclass
 class InputScienceProduct(ManifestComponentIF):
     """Simplest type of science product: has only a locator"""
 
-    def __init__(self, locator: str):
-        self.locator = locator
-
-    def __eq__(self, other):
-        if isinstance(other, InputScienceProduct):
-            return other.locator == self.locator
+    locator: str
 
     def to_dict(self) -> JSON:
         """
@@ -105,17 +102,11 @@ class InputScienceProduct(ManifestComponentIF):
         return {"locator": self.locator}
 
 
+@dataclass
 class InputGroup(ManifestComponentIF):
     """Generic ingestion manifest input group"""
 
-    def __init__(self, science_products: List[InputScienceProduct]):
-        self.science_products = science_products
-
-    def __eq__(self, other):
-        if isinstance(other, InputGroup):
-            return other.science_products == self.science_products
-
-        return False
+    science_products: List[InputScienceProduct]
 
     def to_dict(self) -> JSON:
         """
@@ -134,43 +125,17 @@ class InputGroup(ManifestComponentIF):
         return to_return
 
 
+@dataclass
 class ManifestParameters(ManifestComponentIF):
     """Represents "parameters" section of ingestion manifest.
     ASSUMPTIONS:
-    * EVLA CAL manifest has no "calibrate" parameter
     * "ngas_ingest" is always True (per our testing examples)
     """
 
-    def __init__(
-        self,
-        telescope: Telescope,
-        ngas_ingest: bool,
-        staging_source_dir: Path | None,
-        additional_metadata: AbstractTextFile = None,
-        calibrate: bool = None,
-    ):
-        self.telescope = telescope
-
-        self.ngas_ingest = ngas_ingest
-        if calibrate is not None:
-            self.calibrate = calibrate
-
-        if staging_source_dir is not None:
-            self.staging_source_dir = staging_source_dir
-        self.additional_metadata = additional_metadata
-
-    def __eq__(self, other):
-        if isinstance(other, ManifestParameters):
-            return (
-                other.telescope.value == self.telescope.value
-                and other.ngas_ingest == self.ngas_ingest
-                and other.calibrate == self.calibrate
-                and other.staging_source_dir == self.staging_source_dir
-                and other.additional_metadata.filename == self.additional_metadata.filename
-                and other.additional_metadata.content == self.additional_metadata.content
-            )
-
-        return False
+    telescope: Telescope
+    ngas_ingest: bool
+    staging_source_dir: Optional[Path]
+    additional_metadata: Optional[AbstractTextFile] = None
 
     def to_dict(self) -> JSON:
         """
@@ -184,43 +149,22 @@ class ManifestParameters(ManifestComponentIF):
             # rather than "True" and "False"
             ParamsKey.NGAS_INGEST.value: self.ngas_ingest,
         }
-        if hasattr(self, "staging_source_dir"):
+        if self.staging_source_dir is not None:
             json_dict[ParamsKey.INGESTION_PATH.value] = str(self.staging_source_dir)
-        if hasattr(self, "calibrate"):
-            json_dict[ParamsKey.CALIBRATE.value] = self.calibrate
         if self.additional_metadata:
             json_dict[ParamsKey.ADDITIONAL_METADATA.value] = self.additional_metadata.filename
 
         return json_dict
 
 
+@dataclass
 class AncillaryProduct(ManifestComponentIF):
     """Represents an ancillary product in an ingestion manifest"""
 
-    def __init__(
-        self,
-        type: AncillaryProductType,
-        filename: str,
-        science_associate: str = None,
-        group_with: str = None,
-    ):
-        self.type = type
-        self.filename = filename
-        self.science_associate = science_associate
-        self.group_with = group_with
-
-        # make this an ancillary to a particular science product (assumes locator string)
-        if science_associate:
-            self.science_associate = science_associate
-        # make this an ancillary to the group of a science product (assumes locator string)
-        if group_with:
-            self.group_with = group_with
-
-    def __eq__(self, other):
-        if isinstance(other, AncillaryProduct) and other.type == self.type and other.filename == self.filename:
-            return other.group_with == self.group_with and other.science_associate == self.science_associate
-
-        return False
+    type: AncillaryProductType
+    filename: str
+    science_associate: Optional[str] = None
+    group_with: Optional[str] = None
 
     def __str__(self):
         return f"{self.filename}: {self.type.value}"
@@ -241,28 +185,13 @@ class AncillaryProduct(ManifestComponentIF):
         return clean_dict
 
 
+@dataclass
 class OutputScienceProduct(ManifestComponentIF):
     """Generic science product contained in manifest output group"""
 
-    def __init__(
-        self,
-        product_type: Union[ScienceProductType, AncillaryProductType],
-        filename: str,
-        ancillary_products: List[AncillaryProduct] = None,
-    ):
-        self.type = product_type
-        self.filename = filename
-        self.ancillary_products = ancillary_products
-
-    def __eq__(self, other):
-        if isinstance(other, OutputScienceProduct):
-            return (
-                other.type == self.type
-                and other.filename == self.filename
-                and other.ancillary_products == self.ancillary_products
-            )
-
-        return False
+    type: Union[ScienceProductType, AncillaryProductType]
+    filename: str
+    ancillary_products: Optional[List[AncillaryProduct]] = None
 
     def __str__(self):
         return f"{Path(self.filename).name}: {self.type.value}, {len(self.ancillary_products)} ancillary products"
@@ -275,24 +204,12 @@ class OutputScienceProduct(ManifestComponentIF):
         return json_dict
 
 
+@dataclass
 class OutputGroup(ManifestComponentIF):
     """Generic ingestion manifest output group"""
 
-    def __init__(
-        self,
-        science_products: List[OutputScienceProduct],
-        ancillary_products: List[AncillaryProduct] = None,
-    ):
-        self.science_products = science_products
-        self.ancillary_products = ancillary_products
-
-    def __eq__(self, other):
-        if isinstance(other, OutputGroup):
-            return (
-                other.science_products == self.science_products and other.ancillary_products == self.ancillary_products
-            )
-
-        return False
+    science_products: List[OutputScienceProduct]
+    ancillary_products: Optional[List[AncillaryProduct]] = None
 
     def to_dict(self) -> JSON:
         """
@@ -314,39 +231,30 @@ class OutputGroup(ManifestComponentIF):
         return me_dict
 
 
+@dataclass
 class ReingestGroup(ManifestComponentIF):
     """Generic manifest reingest group"""
 
-    def __init__(
-        self,
-        locator: str,
-        product_type: ScienceProductType,
-        targets: List[str] = None,  # optional: presence determines Partial vs Full curation.
-    ):
-        self.locator = locator
-        self.product_type = product_type
-        self.targets = targets
-
-    def __eq__(self, other):
-        if isinstance(other, ReingestGroup):
-            return (
-                other.locator == self.locator
-                and other.product_type == self.product_type
-                and other.targets == self.targets
-            )
-
-        return False
+    product_type: ScienceProductType
+    locator: Optional[str] = None  # Replaced by product_group for full curation
+    product_group: Optional[int] = None  # Replaced by locator for partial curation
+    targets: Optional[List[str]] = None  # optional: presence determines Partial vs Full curation.
 
     def to_dict(self) -> JSON:
         """
-        Turn me into a json-ifiable dict
+        Turn me into a json-ifiable dict, excluding None fields
 
         :return: dict
         """
-        json_dict = {
-            ReingestKey.TARGETS.value: self.targets,
-            ReingestKey.LOCATOR.value: self.locator,
-            ReingestKey.TYPE.value: self.product_type.value,
-        }
+        key_value_pairs = [
+            (ReingestKey.TARGETS.value, self.targets),
+            (ReingestKey.LOCATOR.value, self.locator),
+            (ReingestKey.TYPE.value, self.product_type.value),
+            (ReingestKey.PRODUCT_GROUP.value, self.product_group),
+        ]
+        json_dict = dict()
+        for key, value in key_value_pairs:
+            if value is not None:
+                json_dict[key] = value
 
         return json_dict
diff --git a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/solicitor.py b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/solicitor.py
index 4696b7ff6df2a598ebd3a45c7d385b8ee305777e..7925088ad634fb239e6a1f716a8a4d7313460b30 100644
--- a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/solicitor.py
+++ b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/solicitor.py
@@ -25,7 +25,7 @@ import pathlib
 from typing import List, Union
 
 import requests
-from ingest_envoy.utilities import IngestType, VLASSIngestType, CuratorType
+from ingest_envoy.utilities import CuratorType, IngestType, VLASSIngestType
 
 INVALID_INITIAL_VERSION = "Initial version not valid for ingest"
 
@@ -236,15 +236,15 @@ class Solicitor:
         params = {
             "telescope": self.metadata["projectMetadata"]["telescope"],  # all, needed by manifest generator
             "project": self.metadata["projectMetadata"]["projectCode"],  # needed for post ingestion messaging
-            "spl": self.metadata["product_locator"],
+            "spl": self.metadata.get("product_locator"),
+            "input_group_locator": self.metadata.get("input_group_locator"),
             "product_type": self.metadata["product_type"],
-            "curation_source": self.metadata["data_location"]
-            if "data_location" in self.metadata
-            else None,  # not required for curation
+            "curation_source": self.metadata.get("data_location"),  # not required for curation
+            "file_list": list(filter(None, self.metadata.get("file_list").split(","))),
+            "product_group_id": self.metadata.get("product_group_id"),
             "target_list": targets,
         }
-
-        return {**params}
+        return params
 
     def solicit_seci_params(self) -> dict:
         """
diff --git a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/std_img_manifest_utils.py b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/std_img_manifest_utils.py
index d4d6d26497f71a5c7769259e982876b6b8afe0d9..4dbc3468a784a499e3cb67240d6a2867a93dab13 100644
--- a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/std_img_manifest_utils.py
+++ b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/std_img_manifest_utils.py
@@ -45,11 +45,10 @@ PBCOR = "pbcor"
 class ImageIngestionProductsFinder:
     """Finds ancillary science products and other ancillary products needed for image ingestion"""
 
-    def __init__(self, staging_source_dir: Path, sp_type: ScienceProductType):
+    def __init__(self, files_found: list[Path], sp_type: ScienceProductType):
         self.logger = logging.getLogger("ingest_envoy")
-        self.staging_source_dir = staging_source_dir
         self.sp_type = sp_type
-        self.files_found = [file for file in self.staging_source_dir.iterdir()]
+        self.files_found = files_found
         self.output_science_products = self._find_output_science_products()
         self.ancillary_products = self._find_other_ancillary_products()
 
@@ -104,7 +103,7 @@ class ImageIngestionProductsFinder:
             # Add all science products and their ancillaries
             science_products.append(
                 OutputScienceProduct(
-                    product_type=AncillaryProductType.FITS,
+                    type=AncillaryProductType.FITS,
                     filename=file.name,
                     ancillary_products=sp_aps.get(file),
                 )
@@ -129,7 +128,7 @@ class ImageIngestionProductsFinder:
                 and file.name.endswith(TARFILE_EXT)
             ][0]
         except IndexError as err:
-            raise FileNotFoundError(f"WARNING: No pipeline artifacts found in {self.staging_source_dir}") from err
+            raise FileNotFoundError(f"WARNING: No pipeline artifacts found in {self.files_found}") from err
 
         ancillary_products = [
             AncillaryProduct(type=AncillaryProductType.PIPELINE_WEBLOG, filename=WEBLOG_FILENAME),
diff --git a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/std_obs_manifest_utils.py b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/std_obs_manifest_utils.py
index cb7b40ac53b235408701fcf99e5a3c1d37a8b7fb..c3f9450823db47b7c404d7b595579e4483f402d0 100644
--- a/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/std_obs_manifest_utils.py
+++ b/apps/cli/executables/pexable/ingest_envoy/ingest_envoy/std_obs_manifest_utils.py
@@ -41,17 +41,17 @@ PB = "pb"
 MASK = "mask"
 ALPHA = "alpha"
 PBCOR = "pbcor"
+BIN = "bin"
 
 
 # pylint: disable=R1721
 class ObservationIngestionProductsFinder:
     """Finds ancillary science products and other ancillary products needed for observation ingestion"""
 
-    def __init__(self, staging_source_dir: Path, sp_type: ScienceProductType):
+    def __init__(self, files_found: list[Path], sp_type: ScienceProductType):
         self.logger = logging.getLogger("ingest_envoy")
-        self.staging_source_dir = staging_source_dir
         self.sp_type = sp_type
-        self.files_found = [file for file in self.staging_source_dir.iterdir()]
+        self.files_found = files_found
         self.output_science_products = self._find_output_science_products()
         self.ancillary_products = self._find_ancillary_products()
 
@@ -65,24 +65,18 @@ class ObservationIngestionProductsFinder:
         :return:
         """
 
-        # Currently we only support ingestion of GMVA/VLBI observations. If there
-        # is not yet implemented
-        # is an SDM file present we know it's a different type of observation which
-        for file in self.files_found:
-            if file.name.endswith(XML):
-                self.logger.error("Non-VLBA/GMVA observation ingestion is not currently implemented!")
-                sys.exit(1)
-
-        # Ingest all *fits files as science products
+        # Ingest all *fits files, or directories that contain XML files (assumed to be SDMs), as science products
         fits_files = [file for file in self.files_found if file.name.endswith(IDIFITS)]
-        self.logger.info(f"Science Product(s) to ingest: {fits_files}")
+        sdm_dirs = [file.parent for file in self.files_found if file.name == "ASDM.xml"]
+        output_product_file_paths = fits_files + sdm_dirs
+        self.logger.info(f"Science Product(s) to ingest: {output_product_file_paths}")
 
         science_products = []
-        for file in fits_files:
+        for file in output_product_file_paths:
             # Add all science products and their ancillaries
             science_products.append(
                 OutputScienceProduct(
-                    product_type=ScienceProductType.EXEC_BLOCK,
+                    type=ScienceProductType.EXEC_BLOCK,
                     filename=file.name,
                 )
             )
diff --git a/apps/cli/executables/pexable/ingest_envoy/pyproject.toml b/apps/cli/executables/pexable/ingest_envoy/pyproject.toml
index 8a3b080b2f203e4b2acb029a7e9be3b1e144c949..3aa28117c361ce567d8e429e89689a8570f36b4e 100644
--- a/apps/cli/executables/pexable/ingest_envoy/pyproject.toml
+++ b/apps/cli/executables/pexable/ingest_envoy/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "ingest_envoy"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Ingest envoy"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/executables/pexable/ingest_envoy/test/conftest.py b/apps/cli/executables/pexable/ingest_envoy/test/conftest.py
index 8ed2489cafe48d23a09164945748e27f4f3b9805..fc111f3ae44112303b4469873e22f85dbe3d1f96 100644
--- a/apps/cli/executables/pexable/ingest_envoy/test/conftest.py
+++ b/apps/cli/executables/pexable/ingest_envoy/test/conftest.py
@@ -19,12 +19,14 @@
 
 # pylint: disable=E0401, R1721
 
+from os import mkdir
 from pathlib import Path
 from typing import List
 
 import pytest
 from ingest_envoy.manifest_components import (
     INIT_WEBLOG_FILENAME,
+    TARFILE_EXT,
     WEBLOG_FILENAME,
     AncillaryProduct,
     OutputScienceProduct,
@@ -39,20 +41,33 @@ UNWANTED = ["ignore_me.fits", "just_a_lotta_nothing", "uninteresting_metadata.xm
 
 
 @pytest.fixture(scope="function")
-def ingest_path(tmpdir: Path) -> Path:
+def ingest_path(tmp_path: Path) -> Path:
     """
-    Make an "ingestion path" for tests
+    Make a directory to use as the ingestion staging dir, or curation source
 
-    :param tmpdir: temporary home for ingestion location
-    :return:
+    :param tmp_path: built-in pytest fixture, Pytest cleans them up periodically
+    :return: Path to new directory
     """
 
     # cast is necessary because otherwise we get a LocalPath, which doesn't work
-    fake_ingest_path = Path(tmpdir / "ingestion")
+    fake_ingest_path = tmp_path / "ingestion"
     fake_ingest_path.mkdir()
     return fake_ingest_path
 
 
+@pytest.fixture
+def alternate_manifest_destination(tmp_path: Path) -> Path:
+    """
+    Make an alternative directory to ingest_path for tests to put their manifests in
+
+    :param tmp_path: built-in pytest fixture, Pytest cleans them up periodically
+    :return: Path to new directory
+    """
+    alternate_manifest_destination = tmp_path / "manifest_destination"
+    alternate_manifest_destination.mkdir()
+    return alternate_manifest_destination
+
+
 def find_example_manifest(manifest_name: str) -> Path:
     """
     Get this example manifest for comparison with one we've generated in a test.
@@ -107,60 +122,75 @@ def populate_fake_final_evla_cal_ingest_path(staging_dir: Path) -> List[Path]:
 # -----------------------------
 #    Image manifest test data
 # -----------------------------
-
-EXAMPLE_MANIFEST_FILE = find_example_manifest("image_manifest_tmpx_ratuqh")
-
+IMAGE_PRODUCT_GROUP = 1
+IMAGE_LOCATOR = "uid://evla/calibration/ea93dae5-3495-47fa-887d-4be2852f5f14"
 ADDITIONAL_METADATA_FILENAME = "aux_image_metadata.json"
 
-PRIMARY_BEAM_ANCILLARY = AncillaryProduct(
-    type=AncillaryProductType.PB_FITS, filename="oussid.J1522+3934_sci.K_band.cont.I.pb.tt0.fits"
+PRIMARY_BEAM_ANCILLARY_K = AncillaryProduct(
+    type=AncillaryProductType.PB_FITS, filename="16B-069.MJD57713.51329133102.J1522+3934_sci.K_band.cont.I.pb.tt0.fits"
 )
-CLEAN_MASK_ANCILLARY = AncillaryProduct(
-    type=AncillaryProductType.FITS_MASK, filename="oussid.J1522+3934_sci.K_band.cont.I.mask.fits"
+CLEAN_MASK_ANCILLARY_K = AncillaryProduct(
+    type=AncillaryProductType.FITS_MASK, filename="16B-069.MJD57713.51329133102.J1522+3934_sci.K_band.cont.I.mask.fits"
 )
-ALPHA_ERROR = AncillaryProduct(
-    type=AncillaryProductType.FITS, filename="oussid.J1522+3934_sci.K_band.cont.I.alpha.error.fits"
+ALPHA_ERROR_K = AncillaryProduct(
+    type=AncillaryProductType.FITS,
+    filename="16B-069.MJD57713.51329133102.J1522+3934_sci.K_band.cont.I.alpha.error.fits",
 )
-ALPHA = AncillaryProduct(
-    type=AncillaryProductType.FITS, filename="oussid.J1522+3934_sci.K_band.cont.I.alpha.fits"
+ALPHA_K = AncillaryProduct(
+    type=AncillaryProductType.FITS, filename="16B-069.MJD57713.51329133102.J1522+3934_sci.K_band.cont.I.alpha.fits"
 )
-TT0 = AncillaryProduct(
-    type=AncillaryProductType.FITS, filename="oussid.J1522+3934_sci.K_band.cont.I.tt0.fits"
+TT0_K = AncillaryProduct(
+    type=AncillaryProductType.FITS, filename="16B-069.MJD57713.51329133102.J1522+3934_sci.K_band.cont.I.tt0.fits"
 )
-TT1 = AncillaryProduct(
-    type=AncillaryProductType.FITS, filename="oussid.J1522+3934_sci.K_band.cont.I.tt1.fits"
+TT1_K = AncillaryProduct(
+    type=AncillaryProductType.FITS, filename="16B-069.MJD57713.51329133102.J1522+3934_sci.K_band.cont.I.tt1.fits"
 )
-OUTPUT_GROUP_SCIENCE_PRODUCT_ANCILLARIES_K = [PRIMARY_BEAM_ANCILLARY, CLEAN_MASK_ANCILLARY, ALPHA_ERROR, ALPHA, TT0, TT1]
+OUTPUT_GROUP_SCIENCE_PRODUCT_ANCILLARIES_K = [
+    PRIMARY_BEAM_ANCILLARY_K,
+    CLEAN_MASK_ANCILLARY_K,
+    ALPHA_ERROR_K,
+    ALPHA_K,
+    TT0_K,
+    TT1_K,
+]
 
 OUTPUT_SCIENCE_PRODUCT_K = OutputScienceProduct(
-    product_type=AncillaryProductType.FITS,
-    filename="oussid.J1522+3934_sci.K_band.cont.I.pbcor.tt0.fits",
+    type=AncillaryProductType.FITS,
+    filename="16B-069.MJD57713.51329133102.J1522+3934_sci.K_band.cont.I.pbcor.tt0.fits",
     ancillary_products=OUTPUT_GROUP_SCIENCE_PRODUCT_ANCILLARIES_K,
 )
 
-PRIMARY_BEAM_ANCILLARY = AncillaryProduct(
-    type=AncillaryProductType.PB_FITS, filename="oussid.J1522+3934_sci.X_band.cont.I.pb.tt0.fits"
+PRIMARY_BEAM_ANCILLARY_X = AncillaryProduct(
+    type=AncillaryProductType.PB_FITS, filename="16B-069.MJD57713.51329133102.J1522+3934_sci.X_band.cont.I.pb.tt0.fits"
 )
-CLEAN_MASK_ANCILLARY = AncillaryProduct(
-    type=AncillaryProductType.FITS_MASK, filename="oussid.J1522+3934_sci.X_band.cont.I.mask.fits"
+CLEAN_MASK_ANCILLARY_X = AncillaryProduct(
+    type=AncillaryProductType.FITS_MASK, filename="16B-069.MJD57713.51329133102.J1522+3934_sci.X_band.cont.I.mask.fits"
 )
-ALPHA_ERROR = AncillaryProduct(
-    type=AncillaryProductType.FITS, filename="oussid.J1522+3934_sci.X_band.cont.I.alpha.error.fits"
+ALPHA_ERROR_X = AncillaryProduct(
+    type=AncillaryProductType.FITS,
+    filename="16B-069.MJD57713.51329133102.J1522+3934_sci.X_band.cont.I.alpha.error.fits",
 )
-ALPHA = AncillaryProduct(
-    type=AncillaryProductType.FITS, filename="oussid.J1522+3934_sci.X_band.cont.I.alpha.fits"
+ALPHA_X = AncillaryProduct(
+    type=AncillaryProductType.FITS, filename="16B-069.MJD57713.51329133102.J1522+3934_sci.X_band.cont.I.alpha.fits"
 )
-TT0 = AncillaryProduct(
-    type=AncillaryProductType.FITS, filename="oussid.J1522+3934_sci.X_band.cont.I.tt0.fits"
+TT0_X = AncillaryProduct(
+    type=AncillaryProductType.FITS, filename="16B-069.MJD57713.51329133102.J1522+3934_sci.X_band.cont.I.tt0.fits"
 )
-TT1 = AncillaryProduct(
-    type=AncillaryProductType.FITS, filename="oussid.J1522+3934_sci.X_band.cont.I.tt1.fits"
+TT1_K = AncillaryProduct(
+    type=AncillaryProductType.FITS, filename="16B-069.MJD57713.51329133102.J1522+3934_sci.X_band.cont.I.tt1.fits"
 )
-OUTPUT_GROUP_SCIENCE_PRODUCT_ANCILLARIES_X = [PRIMARY_BEAM_ANCILLARY, CLEAN_MASK_ANCILLARY, ALPHA_ERROR, ALPHA, TT0, TT1]
+OUTPUT_GROUP_SCIENCE_PRODUCT_ANCILLARIES_X = [
+    PRIMARY_BEAM_ANCILLARY_X,
+    CLEAN_MASK_ANCILLARY_X,
+    ALPHA_ERROR_X,
+    ALPHA_X,
+    TT0_X,
+    TT1_K,
+]
 
 OUTPUT_SCIENCE_PRODUCT_X = OutputScienceProduct(
-    product_type=AncillaryProductType.FITS,
-    filename="oussid.J1522+3934_sci.X_band.cont.I.pbcor.tt0.fits",
+    type=AncillaryProductType.FITS,
+    filename="16B-069.MJD57713.51329133102.J1522+3934_sci.X_band.cont.I.pbcor.tt0.fits",
     ancillary_products=OUTPUT_GROUP_SCIENCE_PRODUCT_ANCILLARIES_X,
 )
 
@@ -185,14 +215,21 @@ STAGING_DIR_FILES = [
 ]
 
 
-def populate_fake_tmpx_ratuqh_ingest_path(staging_source_dir: Path, is_final: bool = False) -> List[Path]:
+def populate_fake_tmpx_ratuqh_ingest_path(
+    staging_source_dir: Path, is_final: bool = False, is_for_curation_test: bool = False
+) -> List[Path]:
     """
     Make a bunch of fake files that should result in the example manifest.
     If this is version 2 or later of a standard calibration, include the initial weblog.
 
     :return:
     """
-    fake_files_to_create = [ADDITIONAL_METADATA_FILENAME]
+    fake_files_to_create = []
+
+    if not is_for_curation_test:
+        # Curator doesn't care about the additional metadata file for images
+        fake_files_to_create.append(ADDITIONAL_METADATA_FILENAME)
+    fake_files_to_create.append(PIPELINE_AF_ANCILLARY.filename)
 
     for product in OUTPUT_GROUP_SCIENCE_PRODUCT_ANCILLARIES_K:
         fake_files_to_create.append(product.filename)
@@ -200,7 +237,6 @@ def populate_fake_tmpx_ratuqh_ingest_path(staging_source_dir: Path, is_final: bo
     for product in OUTPUT_GROUP_SCIENCE_PRODUCT_ANCILLARIES_X:
         fake_files_to_create.append(product.filename)
 
-    fake_files_to_create.append(PIPELINE_AF_ANCILLARY.filename)
     fake_files_to_create.append(WEBLOG_ANCILLARY.filename)
     if is_final:
         fake_files_to_create.append(INIT_WEBLOG_ANCILLARY.filename)
@@ -208,6 +244,9 @@ def populate_fake_tmpx_ratuqh_ingest_path(staging_source_dir: Path, is_final: bo
     fake_files_to_create.append(OUTPUT_SCIENCE_PRODUCT_K.filename)
     fake_files_to_create.append(OUTPUT_SCIENCE_PRODUCT_X.filename)
 
+    if is_for_curation_test:
+        fake_files_to_create.append(INGESTION_AF_ANCILLARY.filename)
+
     files = []
     for filename in fake_files_to_create:
         file = staging_source_dir / filename
@@ -217,3 +256,76 @@ def populate_fake_tmpx_ratuqh_ingest_path(staging_source_dir: Path, is_final: bo
     assert len(files) == len(fake_files_to_create)
 
     return files
+
+
+# -----------------------------
+# SDM manifest test data
+# -----------------------------
+SDM_FILE_LIST = [
+    "Antenna.xml",
+    "ASDM.xml",
+    "CalData.xml",
+    "CalDevice.xml",
+    "CalPointing.xml",
+    "CalReduction.xml",
+    "ConfigDescription.xml",
+    "CorrelatorMode.xml",
+    "DataDescription.xml",
+    "Doppler.xml",
+    "Ephemeris.xml",
+    "ExecBlock.xml",
+    "Feed.xml",
+    "Field.xml",
+    "Flag.xml",
+    "Main.xml",
+    "Pointing.xml",
+    "PointingModel.xml",
+    "Polarization.xml",
+    "Processor.xml",
+    "Receiver.xml",
+    "SBSummary.xml",
+    "Scan.xml",
+    "Source.xml",
+    "SpectralWindow.xml",
+    "State.xml",
+    "Station.xml",
+    "Subscan.xml",
+    "SwitchCycle.xml",
+    "SysCal.xml",
+    "SysPower.bin",
+    "Weather.xml",
+]
+# From file ./examples/full_curation_evla_eb_manifest.json
+EVLA_EB_NAME = "19A-001.sb1234567890.eb233423545632.54321.894327984569"
+EVLA_EB_LOCATOR = "uid://I/am/a/locator"
+EVLA_EB_PRODUCT_GROUP = 2
+
+
+def populate_fake_evla_eb_curator_source_path(staging_dir: Path) -> list[Path]:
+    eb_dir = staging_dir / EVLA_EB_NAME
+    eb_dir.mkdir()
+    for sdm_filename in SDM_FILE_LIST:
+        sdm_file_path = eb_dir / sdm_filename
+        sdm_file_path.touch()
+    # Only really care about the directory for the manifest
+    return [eb_dir]
+
+
+def find_ingestion_artifacts_tar(staging_source_dir: Path):
+    """
+    There should be an ingestion artifacts tar after manifest creation.
+
+    :param staging_source_dir:
+    :return:
+    """
+    ing_artifacts_tars = [
+        file
+        for file in staging_source_dir.iterdir()
+        if file.name.startswith(AncillaryProductType.INGESTION_ARTIFACTS.value) and file.name.endswith(TARFILE_EXT)
+    ]
+    if len(ing_artifacts_tars) == 0:
+        # we're testing curation, no new tar in directory
+        return None
+    # otherwise ensure there is only 1 tar file present
+    assert len(ing_artifacts_tars) == 1
+    return ing_artifacts_tars[0]
diff --git a/apps/cli/executables/pexable/ingest_envoy/test/examples/_16B_069_cal_manifest.json b/apps/cli/executables/pexable/ingest_envoy/test/examples/_16B_069_cal_manifest.json
index 849c8021972d718e1f4ffa9ba85fb5d74c61181f..053bde46019b7abd1ceaa4a7ae0878766ce38eee 100644
--- a/apps/cli/executables/pexable/ingest_envoy/test/examples/_16B_069_cal_manifest.json
+++ b/apps/cli/executables/pexable/ingest_envoy/test/examples/_16B_069_cal_manifest.json
@@ -2,7 +2,6 @@
   "parameters": {
     "reingest": "false",
     "ngas_ingest": "false",
-    "calibrate": "false",
     "ingestion_path": "/lustre/aoc/cluster/pipeline/dsoc-dev/workspaces/staging/cal_test6",
     "telescope": "EVLA"
   },
diff --git a/apps/cli/executables/pexable/ingest_envoy/test/examples/full_curation_evla_eb_manifest.json b/apps/cli/executables/pexable/ingest_envoy/test/examples/full_curation_evla_eb_manifest.json
new file mode 100644
index 0000000000000000000000000000000000000000..e642dae699a88ba45f9a54bad0bf403a3e1aa0cb
--- /dev/null
+++ b/apps/cli/executables/pexable/ingest_envoy/test/examples/full_curation_evla_eb_manifest.json
@@ -0,0 +1,25 @@
+{
+  "parameters": {
+    "ngas_ingest": false,
+    "telescope": "EVLA",
+    "ingestion_path": "/lustre/aoc/cluster/pipeline/dsoc-prod/workspaces/spool/tmp8gfknlo9/19A-001/observation.54321.894327984569"
+  },
+  "reingest": {
+    "type": "execution_block",
+    "locator": 2
+  },
+  "output_group": {
+    "science_products": [
+      {
+        "type": "execution_block",
+        "filename": "19A-001.sb1234567890.eb233423545632.54321.894327984569"
+      }
+    ],
+    "ancillary_products": [
+      {
+        "type": "ingestion_artifacts",
+        "filename": "ingestion_artifacts_2019_07_26_T10_49_44.890.tar"
+      }
+    ]
+  }
+}
diff --git a/apps/cli/executables/pexable/ingest_envoy/test/examples/full_curation_evla_image_manifest.json b/apps/cli/executables/pexable/ingest_envoy/test/examples/full_curation_evla_image_manifest.json
new file mode 100644
index 0000000000000000000000000000000000000000..8e7000dbb61baa46ed99def2a47348dbc3304965
--- /dev/null
+++ b/apps/cli/executables/pexable/ingest_envoy/test/examples/full_curation_evla_image_manifest.json
@@ -0,0 +1,97 @@
+{
+  "parameters": {
+    "telescope": "EVLA",
+    "ngas_ingest": false,
+    "ingestion_path": "/lustre/aoc/cluster/pipeline/dsoc-dev/workspaces/staging/tmpx_ratuqh",
+    "additional_metadata": "aux_image_metadata.json"
+  },
+  "reingest": {
+    "type": "fits_image",
+    "locator": 1
+  },
+  "input_group": {
+    "science_products": [
+      {
+        "locator": "uid://evla/calibration/ea93dae5-3495-47fa-887d-4be2852f5f14"
+      }
+    ]
+  },
+  "output_group": {
+    "science_products": [
+      {
+        "type": "fits_image",
+        "filename": "16B-069.MJD57713.51329133102.J1522+3934_sci.K_band.cont.I.pbcor.tt0.fits",
+        "ancillary_products": [
+          {
+            "type": "primary_beam",
+            "filename": "16B-069.MJD57713.51329133102.J1522+3934_sci.K_band.cont.I.pb.tt0.fits"
+          },
+          {
+            "type": "clean_mask",
+            "filename": "16B-069.MJD57713.51329133102.J1522+3934_sci.K_band.cont.I.mask.fits"
+          },
+          {
+            "type": "spectral_index",
+            "filename": "16B-069.MJD57713.51329133102.J1522+3934_sci.K_band.cont.I.alpha.error.fits"
+          },
+          {
+            "type": "spectral_index",
+            "filename": "16B-069.MJD57713.51329133102.J1522+3934_sci.K_band.cont.I.alpha.fits"
+          },
+          {
+            "type": "fits_image",
+            "filename": "16B-069.MJD57713.51329133102.J1522+3934_sci.K_band.cont.I.tt0.fits"
+          },
+          {
+            "type": "fits_image",
+            "filename": "16B-069.MJD57713.51329133102.J1522+3934_sci.K_band.cont.I.tt1.fits"
+          }
+        ]
+      },
+      {
+        "type": "fits_image",
+        "filename": "16B-069.MJD57713.51329133102.J1522+3934_sci.X_band.cont.I.pbcor.tt0.fits",
+        "ancillary_products": [
+          {
+            "type": "primary_beam",
+            "filename": "16B-069.MJD57713.51329133102.J1522+3934_sci.X_band.cont.I.pb.tt0.fits"
+          },
+          {
+            "type": "clean_mask",
+            "filename": "16B-069.MJD57713.51329133102.J1522+3934_sci.X_band.cont.I.mask.fits"
+          },
+          {
+            "type": "spectral_index",
+            "filename": "16B-069.MJD57713.51329133102.J1522+3934_sci.X_band.cont.I.alpha.error.fits"
+          },
+          {
+            "type": "spectral_index",
+            "filename": "16B-069.MJD57713.51329133102.J1522+3934_sci.X_band.cont.I.alpha.fits"
+          },
+          {
+            "type": "fits_image",
+            "filename": "16B-069.MJD57713.51329133102.J1522+3934_sci.X_band.cont.I.tt0.fits"
+          },
+          {
+            "type": "fits_image",
+            "filename": "16B-069.MJD57713.51329133102.J1522+3934_sci.X_band.cont.I.tt1.fits"
+          }
+        ]
+      }
+    ],
+    "ancillary_products": [
+      {
+        "type": "pipeline_weblog",
+        "filename": "weblog.tgz"
+      },
+      {
+        "type": "ingestion_artifacts",
+        "filename": "ingestion_artifacts_2021_08_04T01_57_08.564.tar"
+      },
+      {
+        "type": "pipeline_artifacts",
+        "filename": "pipeline_artifacts_2021_08_04T15_46_02.tar"
+      }
+    ]
+  }
+}
diff --git a/apps/cli/executables/pexable/ingest_envoy/test/examples/full_curation_evla_image_manifest2.json b/apps/cli/executables/pexable/ingest_envoy/test/examples/full_curation_evla_image_manifest2.json
new file mode 100644
index 0000000000000000000000000000000000000000..c1aa70d3c487e3bee11ba044ae1f48514419ea43
--- /dev/null
+++ b/apps/cli/executables/pexable/ingest_envoy/test/examples/full_curation_evla_image_manifest2.json
@@ -0,0 +1,95 @@
+{
+    "parameters": {
+        "telescope": "EVLA",
+        "ngas_ingest": false
+    },
+    "reingest": {
+        "type": "image",
+        "locator": 328918
+    },
+    "output_group": {
+        "science_products": [
+            {
+                "type": "fits_image",
+                "filename": "20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.pbcor.tt0.fits",
+                "ancillary_products": [
+                    {
+                        "type": "spectral_index",
+                        "filename": "20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.alpha.error.fits"
+                    },
+                    {
+                        "type": "primary_beam",
+                        "filename": "20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.pb.tt0.fits"
+                    },
+                    {
+                        "type": "fits_image",
+                        "filename": "20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.tt0.fits"
+                    },
+                    {
+                        "type": "spectral_index",
+                        "filename": "20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.alpha.fits"
+                    },
+                    {
+                        "type": "fits_image",
+                        "filename": "20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.tt1.fits"
+                    },
+                    {
+                        "type": "clean_mask",
+                        "filename": "20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.mask.fits"
+                    }
+                ]
+            },
+            {
+                "type": "fits_image",
+                "filename": "20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.pbcor.tt0.fits",
+                "ancillary_products": [
+                    {
+                        "type": "clean_mask",
+                        "filename": "20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.mask.fits"
+                    },
+                    {
+                        "type": "fits_image",
+                        "filename": "20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.tt1.fits"
+                    },
+                    {
+                        "type": "spectral_index",
+                        "filename": "20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.alpha.error.fits"
+                    },
+                    {
+                        "type": "spectral_index",
+                        "filename": "20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.alpha.fits"
+                    },
+                    {
+                        "type": "fits_image",
+                        "filename": "20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.tt0.fits"
+                    },
+                    {
+                        "type": "primary_beam",
+                        "filename": "20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.pb.tt0.fits"
+                    }
+                ]
+            }
+        ],
+        "ancillary_products": [
+            {
+                "type": "pipeline_weblog",
+                "filename": "weblog.tgz"
+            },
+            {
+                "type": "pipeline_artifacts",
+                "filename": "pipeline_artifacts_2022_12_09T19_08_32.tar"
+            },
+            {
+                "type": "ingestion_artifacts",
+                "filename": "ingestion_artifacts_2023_11_17T11_08_54.911.tar"
+            }
+        ]
+    },
+    "input_group": {
+        "science_products": [
+            {
+                "locator": "uid://evla/calibration/620195ad-1d11-49f4-be1f-3532092565c1"
+            }
+        ]
+    }
+}
\ No newline at end of file
diff --git a/apps/cli/executables/pexable/ingest_envoy/test/examples/image_manifest_tmpx_ratuqh.json b/apps/cli/executables/pexable/ingest_envoy/test/examples/image_manifest_tmpx_ratuqh.json
index 9e2309246f72b815cda711f4200db7e96350295c..3d7d616183f8fd4125468f1581404f4f8e0c0395 100644
--- a/apps/cli/executables/pexable/ingest_envoy/test/examples/image_manifest_tmpx_ratuqh.json
+++ b/apps/cli/executables/pexable/ingest_envoy/test/examples/image_manifest_tmpx_ratuqh.json
@@ -4,7 +4,6 @@
     "reingest": "false",
     "ngas_ingest": "true",
     "ingestion_path": "/lustre/aoc/cluster/pipeline/dsoc-dev/workspaces/staging/tmpx_ratuqh",
-    "calibrate": "false",
     "additional_metadata": "aux_image_metadata.json"
   },
   "input_group": {
diff --git a/apps/cli/executables/pexable/ingest_envoy/test/examples/partial_curation_evla_eb_manifest.json b/apps/cli/executables/pexable/ingest_envoy/test/examples/partial_curation_evla_eb_manifest.json
new file mode 100644
index 0000000000000000000000000000000000000000..1a61aa86f41c3ff02d38362ef89f07c6fa549ada
--- /dev/null
+++ b/apps/cli/executables/pexable/ingest_envoy/test/examples/partial_curation_evla_eb_manifest.json
@@ -0,0 +1,12 @@
+{
+  "parameters": {
+    "ngas_ingest": false,
+    "telescope": "EVLA",
+    "ingestion_path": "/lustre/aoc/cluster/pipeline/dsoc-prod/workspaces/spool/tmp8gfknlo9/19A-001/observation.54321.894327984569"
+  },
+  "reingest": {
+    "targets": ["subscans.dec"],
+    "locator": "uid://I/am/a/locator",
+    "type": "execution_block"
+  }
+}
diff --git a/apps/cli/executables/pexable/ingest_envoy/test/examples/partial_curation_evla_eb_manifest_no_curation_source.json b/apps/cli/executables/pexable/ingest_envoy/test/examples/partial_curation_evla_eb_manifest_no_curation_source.json
new file mode 100644
index 0000000000000000000000000000000000000000..15a4e020bd2ff6fa953725eaabb6cd39e3421899
--- /dev/null
+++ b/apps/cli/executables/pexable/ingest_envoy/test/examples/partial_curation_evla_eb_manifest_no_curation_source.json
@@ -0,0 +1,11 @@
+{
+  "parameters": {
+    "ngas_ingest": false,
+    "telescope": "EVLA"
+  },
+  "reingest": {
+    "targets": ["subscans.dec"],
+    "locator": "uid://I/am/a/locator",
+    "type": "execution_block"
+  }
+}
diff --git a/apps/cli/executables/pexable/ingest_envoy/test/input_files/test-full-curation-image-evla-metadata.json b/apps/cli/executables/pexable/ingest_envoy/test/input_files/test-full-curation-image-evla-metadata.json
new file mode 100644
index 0000000000000000000000000000000000000000..3fa470816f8c2634f6515bc875ef590cbd207433
--- /dev/null
+++ b/apps/cli/executables/pexable/ingest_envoy/test/input_files/test-full-curation-image-evla-metadata.json
@@ -0,0 +1,10 @@
+{
+  "product_type": "image",
+  "projectMetadata": {
+    "telescope": "EVLA",
+    "projectCode": "20B-377"
+  },
+  "product_group_id": 328918,
+  "file_list": "20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.mask.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.tt1.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.pbcor.tt0.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.pb.tt0.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.alpha.error.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.alpha.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.tt0.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.pb.tt0.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.alpha.error.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.pbcor.tt0.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.tt0.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.alpha.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.tt1.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.mask.fits,ingestion_artifacts_2022_12_14T06_22_03.874.tar,weblog.tgz,pipeline_artifacts_2022_12_09T19_08_32.tar,",
+  "input_group_locator": "uid://evla/calibration/620195ad-1d11-49f4-be1f-3532092565c1"
+}
diff --git a/apps/cli/executables/pexable/ingest_envoy/test/test_curator_manifest.py b/apps/cli/executables/pexable/ingest_envoy/test/test_curator_manifest.py
new file mode 100644
index 0000000000000000000000000000000000000000..bcfc6bdd352784d4532669428a93d36229b5ccdc
--- /dev/null
+++ b/apps/cli/executables/pexable/ingest_envoy/test/test_curator_manifest.py
@@ -0,0 +1,417 @@
+# Copyright (C) 2023 Associated Universities, Inc. Washington DC, USA.
+#
+# This file is part of NRAO Workspaces.
+#
+# Workspaces is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Workspaces is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Workspaces.  If not, see <https://www.gnu.org/licenses/>.
+#
+import json
+import shutil
+import tarfile
+from operator import itemgetter
+from pathlib import Path
+
+import pytest
+from conftest import (
+    ADDITIONAL_METADATA_FILENAME,
+    ANCILLARY_PRODUCTS,
+    EVLA_EB_LOCATOR,
+    EVLA_EB_NAME,
+    EVLA_EB_PRODUCT_GROUP,
+    IMAGE_LOCATOR,
+    IMAGE_PRODUCT_GROUP,
+    find_example_manifest,
+    find_ingestion_artifacts_tar,
+    populate_fake_evla_eb_curator_source_path,
+    populate_fake_tmpx_ratuqh_ingest_path,
+)
+from ingest_envoy.ingestion_manifest import IngestionManifestBuilder
+from ingest_envoy.manifest_components import (
+    CURATOR_MANIFEST_FILENAME,
+    INGESTION_ARTIFACTS_NAME,
+    TARFILE_EXT,
+    WEBLOG_FILENAME,
+    IngestionManifestKey,
+    ParamsKey,
+    ReingestGroup,
+    ReingestKey,
+)
+from ingest_envoy.schema import AbstractTextFile
+from ingest_envoy.utilities import (
+    AncillaryProductType,
+    CuratorType,
+    IngestionManifestException,
+    ScienceProductType,
+    Telescope,
+)
+
+PRODUCT_SORT_KEY = itemgetter("filename")
+
+
+@pytest.mark.parametrize("use_alternate_manifest_destination", [True, False])
+def test_manifest_full_curation_eb_manifest(
+    use_alternate_manifest_destination: bool, ingest_path: Path, alternate_manifest_destination: Path
+):
+    """Test of manifest creation and serialization for full curation of an execution block
+
+    :param use_alternate_manifest_destination: whether to stick the manifest in the same place as curator's sources
+    :param ingest_path: directory where curator's sources will be
+    :param alternate_manifest_destination: directory to stick the manifest into if use_alternate_manifest_destination
+    """
+    eb_files = populate_fake_evla_eb_curator_source_path(ingest_path)
+
+    manifest_destination_dir = ingest_path
+    if use_alternate_manifest_destination:
+        manifest_destination_dir = alternate_manifest_destination
+    assert len(eb_files) == 1
+    eb_dir = eb_files[0]
+    actual_manifest, actual_manifest_file = IngestionManifestBuilder(
+        telescope=Telescope.EVLA.value,
+        manifest_source_dir=eb_dir,
+        sp_type=ScienceProductType.EXEC_BLOCK.value,
+        locator=EVLA_EB_LOCATOR,
+        filename=EVLA_EB_NAME,
+        curate=(CuratorType.FULL, None),
+        manifest_destination_dir=manifest_destination_dir,
+        product_group=EVLA_EB_PRODUCT_GROUP,
+    ).build()
+    assert actual_manifest_file.name == CURATOR_MANIFEST_FILENAME
+    assert actual_manifest_file.parent == manifest_destination_dir
+
+    # Check metadata
+    params = actual_manifest.parameters
+    assert params.ngas_ingest is False
+    assert params.telescope == Telescope.EVLA
+    assert params.staging_source_dir == eb_dir
+
+    assert actual_manifest.input_group
+    assert not actual_manifest.input_group.science_products
+
+    output_group = actual_manifest.output_group
+    assert output_group
+    assert len(output_group.science_products) == 1
+    assert output_group.science_products[0].filename == EVLA_EB_NAME
+    assert len(output_group.ancillary_products) == 1
+    assert INGESTION_ARTIFACTS_NAME in output_group.ancillary_products[0].filename
+
+    reingest = actual_manifest.reingest
+    assert reingest
+    assert reingest.targets == None
+    assert reingest.locator == EVLA_EB_PRODUCT_GROUP
+    assert reingest.product_type == ScienceProductType.EXEC_BLOCK
+
+    # Check actual file
+    with open(actual_manifest_file) as f:
+        actual_manifest_deser = json.load(f)
+    assert actual_manifest.to_dict() == actual_manifest_deser
+
+    # Check against expected manifest
+    expected_manifest_file = find_example_manifest("full_curation_evla_eb_manifest")
+    with open(expected_manifest_file) as f:
+        expected_manifest = json.load(f)
+
+    # ingestion_path depends on the ingest_path fixture, so ignore it
+    expected_params = expected_manifest["parameters"]
+    actual_params = actual_manifest_deser["parameters"]
+    assert actual_params["ngas_ingest"] == expected_params["ngas_ingest"]
+    assert actual_manifest_deser["reingest"] == expected_manifest["reingest"]
+    assert actual_manifest_deser.get("input_group") == expected_manifest.get("input_group")
+
+    expected_outgroup = expected_manifest["output_group"]
+    expected_osp = expected_outgroup["science_products"]
+    actual_outgroup = actual_manifest_deser["output_group"]
+    actual_osp = actual_outgroup["science_products"]
+    assert actual_osp == expected_osp
+
+    # the ingestion_artifacts' filename depends on current time, so ignore it
+    expected_aps = expected_outgroup["ancillary_products"]
+    actual_aps = actual_outgroup["ancillary_products"]
+    assert len(expected_aps) == len(actual_aps)
+    for expected_ap, actual_ap in zip(expected_aps, actual_aps):
+        assert expected_ap["type"] == actual_ap["type"]
+
+    # get ingestion artifacts
+    artifacts_file = [file for file in manifest_destination_dir.glob("ingestion_artifacts*.tar")][0]
+    with tarfile.open(artifacts_file, "r") as tar:
+        # confirm that contains has as many files as we expect...
+        members = tar.getmembers()
+
+    assert len(members) == 1
+    member = members[0]
+    mf_path = Path(member.name)
+    assert mf_path.name == CURATOR_MANIFEST_FILENAME
+
+
+@pytest.mark.parametrize("has_curation_source", [True, False])
+def test_manifest_partial_curation_eb(has_curation_source: bool, ingest_path: Path):
+    """Test full-curation manifest creation & serialization
+    :param ingest_path: directory where curator's sources will be
+    """
+    eb_files = populate_fake_evla_eb_curator_source_path(ingest_path)
+    assert len(eb_files) == 1
+    eb_dir = eb_files[0]
+
+    curation_source = eb_dir
+    expected_manifest_name = "partial_curation_evla_eb_manifest"
+    if not has_curation_source:
+        expected_manifest_name = "partial_curation_evla_eb_manifest_no_curation_source"
+        curation_source = None
+
+    actual_manifest, actual_manifest_file = IngestionManifestBuilder(
+        telescope=Telescope.EVLA.value,
+        manifest_source_dir=curation_source,
+        sp_type=ScienceProductType.EXEC_BLOCK.value,
+        locator=EVLA_EB_LOCATOR,
+        filename=EVLA_EB_NAME,
+        curate=(CuratorType.PARTIAL, ["subscans.dec"]),
+        manifest_destination_dir=ingest_path,
+    ).build()
+
+    # Check metadata
+    params = actual_manifest.parameters
+    assert params.ngas_ingest is False
+    assert params.telescope == Telescope.EVLA
+    if has_curation_source:
+        assert params.staging_source_dir == curation_source
+    else:
+        assert params.staging_source_dir is None
+    assert not actual_manifest.input_group
+    assert not actual_manifest.output_group
+    reingest = actual_manifest.reingest
+    assert reingest
+    assert reingest.targets is not None
+    assert reingest.locator == EVLA_EB_LOCATOR
+    assert reingest.product_type == ScienceProductType.EXEC_BLOCK
+    assert reingest.product_group == None
+
+    # Check that manifest file exists on disk
+    with open(actual_manifest_file) as f:
+        actual_manifest_deser = json.load(f)
+    assert actual_manifest.to_dict() == actual_manifest_deser
+
+    # Check against expected manifest
+    expected_manifest_file = find_example_manifest(expected_manifest_name)
+    with open(expected_manifest_file) as f:
+        expected_manifest = json.load(f)
+    assert actual_manifest_deser.get("input_group") == expected_manifest.get("input_group")
+    assert actual_manifest_deser.get("output_group") == expected_manifest.get("output_group")
+    assert actual_manifest_deser["reingest"] == expected_manifest["reingest"]
+
+    # Make sure there are no artifacts
+    artifacts_file = [file for file in ingest_path.glob("ingestion_artifacts*.tar")]
+    assert not artifacts_file
+
+
+@pytest.mark.parametrize("is_full_curation", [True, False])
+def test_curation_manifest_bad_no_manifest_destination_directory(is_full_curation: bool, ingest_path: Path):
+    curator_type = CuratorType.PARTIAL
+    target_list = ["subscans.ra"]
+    if is_full_curation:
+        curator_type = CuratorType.FULL
+        target_list = None
+    populate_fake_evla_eb_curator_source_path(ingest_path)
+    with pytest.raises(IngestionManifestException, match="directory to house the manifest"):
+        IngestionManifestBuilder(
+            telescope=Telescope.EVLA.value,
+            sp_type=ScienceProductType.EXEC_BLOCK.value,
+            locator=EVLA_EB_LOCATOR,
+            filename=EVLA_EB_NAME,
+            curate=(curator_type, target_list),
+            manifest_source_dir=None,
+            manifest_destination_dir=None,
+        )
+
+
+@pytest.mark.parametrize("use_file_list", [True, False])
+def test_manifest_full_curation_image(use_file_list: bool, ingest_path: Path, alternate_manifest_destination: Path):
+    """Test manifest creation and serialization for full curation of an image
+    :param ingest_path: directory where curator's sources will be
+    """
+    image_paths = populate_fake_tmpx_ratuqh_ingest_path(ingest_path, is_for_curation_test=True)
+
+    curation_source = ingest_path
+    file_list = None
+    manifest_destination_dir = ingest_path
+    if use_file_list:
+        shutil.rmtree(ingest_path)
+        manifest_destination_dir = alternate_manifest_destination
+        file_list = [image_path.name for image_path in image_paths]
+        curation_source = None
+
+    actual_manifest, actual_manifest_file = IngestionManifestBuilder(
+        file_list=file_list,
+        manifest_destination_dir=manifest_destination_dir,
+        # product_group=IMAGE_PRODUCT_GROUP,
+        input_group_locator=IMAGE_LOCATOR,
+        manifest_source_dir=curation_source,
+        telescope=Telescope.EVLA.value,
+        sp_type=ScienceProductType.IMAGE.value,
+        locator=IMAGE_PRODUCT_GROUP,
+        curate=(CuratorType.FULL, None),
+    ).build()
+    assert actual_manifest_file.name == CURATOR_MANIFEST_FILENAME
+
+    # Check metadata
+    mf_json = actual_manifest.to_dict()
+    keys = ["parameters", "input_group", "output_group", "reingest"]
+    assert len(mf_json) == len(keys)
+    for key in keys:
+        assert key in mf_json.keys()
+    params = actual_manifest.parameters
+    assert params.ngas_ingest is False
+    assert params.telescope == Telescope.EVLA
+    if use_file_list:
+        assert params.staging_source_dir is None
+    else:
+        assert params.staging_source_dir == ingest_path
+    reingest = actual_manifest.reingest
+    assert reingest
+    assert reingest.targets == None
+    assert reingest.locator == IMAGE_PRODUCT_GROUP
+    # assert reingest.product_group == IMAGE_PRODUCT_GROUP
+    assert reingest.product_type == ScienceProductType.IMAGE
+    assert actual_manifest.input_group
+    assert len(actual_manifest.input_group.science_products) == 1
+
+    """
+    The ancillary_products section of the manifest we build
+    should match the one in the example.
+    """
+    output_group = actual_manifest.output_group
+    assert output_group
+    aps = output_group.ancillary_products
+    assert aps
+    assert len(aps) == len(ANCILLARY_PRODUCTS)
+
+    # Check contents of ancillary_products
+    weblog_candidates = [ap for ap in aps if ap.filename == WEBLOG_FILENAME]
+    assert len(weblog_candidates) == 1
+    assert weblog_candidates[0].type == AncillaryProductType.PIPELINE_WEBLOG
+
+    ingest_artifacts_tar = find_ingestion_artifacts_tar(manifest_destination_dir)
+    if use_file_list:
+        # there is not a file on disk, but we know it exists for this product
+        assert ingest_artifacts_tar is None
+    else:
+        # We are running with a real directory and there should be an artifacts tar available
+        ingest_artifacts_candidates = [ap for ap in aps if ap.filename == ingest_artifacts_tar.name]
+        assert len(ingest_artifacts_candidates) == 1
+        assert ingest_artifacts_candidates[0].type == AncillaryProductType.INGESTION_ARTIFACTS
+
+    # Inspect the manifest's JSON dict
+    """
+    The output_group section of the manifest we build
+    should match the one in the example:
+    * a "science_products" section containing two science products comprising "type", "filename",
+    and six ancillary products on each science product
+    * an "ancillary products" section comprising three ancillary products
+    """
+    mf_json = actual_manifest.to_dict()
+    if use_file_list:
+        assert ParamsKey.INGESTION_PATH.value not in mf_json[IngestionManifestKey.PARAMETERS.value].keys()
+    og_json = mf_json[IngestionManifestKey.OUTPUT_GROUP.value]
+    assert len(og_json[IngestionManifestKey.SCIENCE_PRODUCTS.value]) == 2
+
+    for sp_json in og_json[IngestionManifestKey.SCIENCE_PRODUCTS.value]:
+        assert len(sp_json) == 3
+        assert set(sp_json.keys()) == {IngestionManifestKey.ANCILLARY_PRODUCTS.value, "type", "filename"}
+        # and ancillary products belonging to the first science product...
+        sp_ap_jsons = sp_json[IngestionManifestKey.ANCILLARY_PRODUCTS.value]
+        assert len(sp_ap_jsons) == 6
+
+    # ... and ancillary products twisting in the wind all by themselves
+    ap_jsons = og_json[IngestionManifestKey.ANCILLARY_PRODUCTS.value]
+    assert len(ap_jsons) == 3
+
+    # there should be a weblog, the ingestion artifacts tar, and the pipeline artifacts tar
+    filenames_found = []
+    for ap_json in ap_jsons:
+        filename = ap_json["filename"]
+        if filename.endswith(TARFILE_EXT):
+            if filename.startswith(AncillaryProductType.PIPELINE_ARTIFACTS.value):
+                filenames_found.append(filename)
+        elif filename == WEBLOG_FILENAME:
+            filenames_found.append(filename)
+
+    assert len(filenames_found) == 2
+
+    # Check that manifest file exists on disk
+    with open(actual_manifest_file) as f:
+        actual_manifest_deser = json.load(f)
+    assert actual_manifest.to_dict() == actual_manifest_deser
+
+    # Check against expected manifest
+    expected_manifest_file = find_example_manifest("full_curation_evla_image_manifest")
+    with open(expected_manifest_file) as f:
+        expected_manifest = json.load(f)
+
+    assert actual_manifest_deser.keys() == expected_manifest.keys()
+    # ingestion_path depends on the ingest_path fixture, so ignore it
+    expected_params = expected_manifest["parameters"]
+    actual_params = actual_manifest_deser["parameters"]
+    assert actual_params["ngas_ingest"] == expected_params["ngas_ingest"]
+    assert expected_manifest["input_group"] == actual_manifest_deser["input_group"]
+
+    expected_outgroup = expected_manifest["output_group"]
+    expected_osps = expected_outgroup["science_products"]
+    actual_outgroup = actual_manifest_deser["output_group"]
+    actual_osps = actual_outgroup["science_products"]
+    assert len(actual_osps) == len(expected_osps)
+    for actual_osp, expected_osp in zip(
+        sorted(actual_osps, key=PRODUCT_SORT_KEY), sorted(expected_osps, key=PRODUCT_SORT_KEY)
+    ):
+        assert actual_osp["type"] == expected_osp["type"]
+        assert actual_osp["filename"] == expected_osp["filename"]
+        assert sorted(actual_osp["ancillary_products"], key=PRODUCT_SORT_KEY) == sorted(
+            expected_osp["ancillary_products"], key=PRODUCT_SORT_KEY
+        )
+
+    # the ingestion_artifacts' filename depends on current time, so ignore it
+    expected_aps = expected_outgroup["ancillary_products"]
+    actual_aps = actual_outgroup["ancillary_products"]
+    assert len(expected_aps) == len(actual_aps)
+    for expected_ap, actual_ap in zip(
+        sorted(expected_aps, key=PRODUCT_SORT_KEY), sorted(actual_aps, key=PRODUCT_SORT_KEY)
+    ):
+        assert expected_ap["type"] == actual_ap["type"]
+        # if expected_ap["type"] != AncillaryProductType.INGESTION_ARTIFACTS.value:
+        #     assert expected_ap["filename"] == actual_ap["filename"]
+
+
+def test_reingest_block_json_well_formed_partial_curation():
+    """
+    Make sure our ReingestGroup makes nice JSON without targets and with locator instead of product_group.
+    """
+    reingest = ReingestGroup(
+        product_type=ScienceProductType.EXEC_BLOCK, locator=EVLA_EB_LOCATOR, targets=["subscans.ra", "subscans.dec"]
+    )
+    reingest_json = reingest.to_dict()
+    # Ensure that None fields are absent
+    assert ReingestKey.PRODUCT_GROUP.value not in reingest_json.keys()
+    # if we can dump it, it's good
+    json.dumps(reingest_json)
+
+
+def test_reingest_block_json_well_formed_full_curation():
+    """
+    Make sure our ReingestGroup makes nice JSON without targets and with product_group instead of locator.
+
+    Curator requires the "targets" field to be None, not [], in order to trigger full curation.
+    """
+    reingest = ReingestGroup(product_type=ScienceProductType.EXEC_BLOCK, product_group=EVLA_EB_PRODUCT_GROUP)
+    reingest_json = reingest.to_dict()
+    # Ensure that None fields are absent
+    assert ReingestKey.TARGETS.value not in reingest_json.keys()
+    assert ReingestKey.LOCATOR.value not in reingest_json.keys()
+    # if we can dump it, it's good
+    json.dumps(reingest_json)
diff --git a/apps/cli/executables/pexable/ingest_envoy/test/test_evla_cal_manifest.py b/apps/cli/executables/pexable/ingest_envoy/test/test_evla_cal_manifest.py
index 7816d5a0f3e80789b408f2eb2b84a9bddd9ee7bc..d79c6f6f8c47c1b7b4596f9a828a64545e6aa3d2 100644
--- a/apps/cli/executables/pexable/ingest_envoy/test/test_evla_cal_manifest.py
+++ b/apps/cli/executables/pexable/ingest_envoy/test/test_evla_cal_manifest.py
@@ -80,7 +80,7 @@ def test_filters_cal_input_files(ingest_path: Path):
     locator = "uid://evla/calibration/twinkle-twinkle-little-quasar"
     manifest, _ = IngestionManifestBuilder(
         telescope=Telescope.EVLA.value,
-        staging_source_dir=ingest_path,
+        manifest_source_dir=ingest_path,
         sp_type=ScienceProductType.EVLA_CAL.value,
         locator=locator,
     ).build()
@@ -89,7 +89,6 @@ def test_filters_cal_input_files(ingest_path: Path):
 
     assert manifest.locator == locator
     params = manifest.parameters
-    assert not hasattr(params, "calibrate")
 
     input_group = manifest.input_group
     assert len(input_group.science_products) == 1
@@ -121,7 +120,7 @@ def test_writes_expected_evla_cal_output_files(ingest_path: Path):
     populate_fake_evla_cal_ingest_path(ingest_path)
     manifest_file, manifest = IngestionManifestBuilder(
         telescope=Telescope.EVLA.value,
-        staging_source_dir=ingest_path,
+        manifest_source_dir=ingest_path,
         locator="uid://evla/calibration/fee-fi-fo-fum-acdf23",
         sp_type=ScienceProductType.EVLA_CAL.value,
     ).build()
@@ -163,7 +162,6 @@ def test_params_json_well_formed():
     params = ManifestParameters(
         telescope=Telescope.EVLA,
         ngas_ingest=False,
-        calibrate=False,
         staging_source_dir=Path("/home/mchammer/evla/parallel-prod"),
     )
 
@@ -225,7 +223,7 @@ def test_output_group_well_formed():
 
     :return:
     """
-    osp = OutputScienceProduct(product_type=ScienceProductType.EVLA_CAL, filename="im_a_lil_calibration.tar")
+    osp = OutputScienceProduct(type=ScienceProductType.EVLA_CAL, filename="im_a_lil_calibration.tar")
     ap1 = AncillaryProduct(type=AncillaryProductType.PIPELINE_ARTIFACTS, filename="without_feathers.tar")
     ap2 = AncillaryProduct(type=AncillaryProductType.PIPELINE_ARTIFACTS, filename="with_feathers.tar")
 
@@ -261,7 +259,7 @@ def test_ingestion_artifacts_tar_filename_built_just_once(ingest_path: Path):
     ) as mock:
         # build the manifest
         IngestionManifestBuilder(
-            staging_source_dir=ingest_path,
+            manifest_source_dir=ingest_path,
             sp_type=ScienceProductType.EVLA_CAL.value,
             locator="uid://evla/calibration/are-we-there-yet",
             telescope=Telescope.EVLA.value,
@@ -283,7 +281,7 @@ def test_ingestion_artifacts_tar_correct(ingest_path: Path):
     locator = "uid://evla/calibration/3dfa528b-9870-46c9-a200-131dbac701cc"
     # if you build it, they will come
     IngestionManifestBuilder(
-        staging_source_dir=ingest_path,
+        manifest_source_dir=ingest_path,
         sp_type=ScienceProductType.EVLA_CAL.value,
         locator=locator,
         telescope=Telescope.EVLA.value,
@@ -320,7 +318,7 @@ def test_evla_cal_manifest_matches_example(ingest_path: Path):
     populate_fake_evla_cal_ingest_path(ingest_path)
 
     builder = IngestionManifestBuilder(
-        staging_source_dir=ingest_path,
+        manifest_source_dir=ingest_path,
         telescope=Telescope.EVLA.value,
         sp_type=ScienceProductType.EVLA_CAL.value,
         locator="uid://evla/execblock/i-just-play-an-eb-on-teevee",
@@ -335,7 +333,6 @@ def test_evla_cal_manifest_matches_example(ingest_path: Path):
     expected_params = expected_json["parameters"]
     actual_params = manifest.parameters.to_dict()
 
-    assert "calibrate" not in actual_params.keys()
     assert manifest.input_group.to_dict() == expected_json["input_group"]
 
     expected_outgroup = expected_json["output_group"]
@@ -376,7 +373,7 @@ def test_evla_cal_final_manifest_finds_init_weblog(ingest_path: Path):
     assert len(weblogs) == 2
 
     builder = IngestionManifestBuilder(
-        staging_source_dir=ingest_path,
+        manifest_source_dir=ingest_path,
         telescope=Telescope.EVLA.value,
         sp_type=ScienceProductType.EVLA_CAL.value,
         locator="uid://evla/execblock/50bb85af-ce52-49d8-b9d8-9221bfce939d",
@@ -419,7 +416,7 @@ def test_evla_cal_final_manifest_matches_example(ingest_path: Path):
     populate_fake_final_evla_cal_ingest_path(ingest_path)
 
     builder = IngestionManifestBuilder(
-        staging_source_dir=ingest_path,
+        manifest_source_dir=ingest_path,
         telescope=Telescope.EVLA.value,
         sp_type=ScienceProductType.EVLA_CAL.value,
         locator="uid://evla/execblock/some-nonsense-not-in-db",
@@ -434,7 +431,6 @@ def test_evla_cal_final_manifest_matches_example(ingest_path: Path):
     expected_params = expected_json["parameters"]
     actual_params = manifest.parameters.to_dict()
 
-    assert "calibrate" not in actual_params.keys()
     assert manifest.input_group.to_dict() == expected_json["input_group"]
 
     expected_outgroup = expected_json["output_group"]
diff --git a/apps/cli/executables/pexable/ingest_envoy/test/test_img_manifest_example.py b/apps/cli/executables/pexable/ingest_envoy/test/test_img_manifest_example.py
index 1bdac66167c96ff96a16d6ee202e298d1268565f..fbfd454559e916c04fa4aa9ae5864c23ae5bea96 100644
--- a/apps/cli/executables/pexable/ingest_envoy/test/test_img_manifest_example.py
+++ b/apps/cli/executables/pexable/ingest_envoy/test/test_img_manifest_example.py
@@ -19,11 +19,18 @@
 /lustre/aoc/cluster/pipeline/dsoc-dev/workspaces/staging/tmpx_ratuqh"""
 
 import json
-import shutil
 import tarfile
 from pathlib import Path
 
 import pytest
+from conftest import (
+    ADDITIONAL_METADATA_FILENAME,
+    ANCILLARY_PRODUCTS,
+    WEBLOG_ANCILLARY,
+    find_ingestion_artifacts_tar,
+    ingest_path,
+    populate_fake_tmpx_ratuqh_ingest_path,
+)
 from ingest_envoy.ingestion_manifest import IngestionManifestBuilder
 from ingest_envoy.manifest_components import (
     MANIFEST_FILENAME,
@@ -34,83 +41,102 @@ from ingest_envoy.manifest_components import (
 from ingest_envoy.schema import AbstractTextFile
 from ingest_envoy.utilities import AncillaryProductType, ScienceProductType, Telescope
 
-from conftest import (
-    ADDITIONAL_METADATA_FILENAME,
-    ANCILLARY_PRODUCTS,
-    ingest_path,
-    populate_fake_tmpx_ratuqh_ingest_path,
-)
-
 # pylint: disable=E0401, E0402, W0621
 
 
 # pylint: disable=R1721
 
 
-def test_manifest_picks_up_ing_artifact(ingest_path: Path):
-    """
-    There's an ingestion artifacts tar in the ingest path,
-    and it should show up among the ancillary products.
-
-    :return:
-    """
+def test_image_manifest_creation(ingest_path: Path):
+    """Test manifest creation for an image"""
 
     populate_fake_tmpx_ratuqh_ingest_path(ingest_path)
+    manifest, manifest_file = build_tmpx_ratuqh_image_manifest(ingest_path)
 
-    _, manifest_file = build_tmpx_ratuqh_image_manifest(ingest_path)
-
-    # make sure that tar really is there
-    candidates = [file for file in ingest_path.glob("ingestion_artifacts*.tar")]
-    assert len(candidates) > 0
-    ing_arties = candidates[0]
-
-    with open(manifest_file, "r") as infile:
-        manifest_contents = infile.read()
-        assert ing_arties.name in manifest_contents
-
-    shutil.rmtree(ingest_path)
-
-
-def test_addl_metadata_not_at_bottom_of_manifest(ingest_path: Path):
-    """
-    Manifest should have only parameters, input group, and output group
-    :param ingest_path:
-    :return:
-    """
-    populate_fake_tmpx_ratuqh_ingest_path(ingest_path)
-    manifest, _ = build_tmpx_ratuqh_image_manifest(ingest_path)
+    # Check that top-level groups match expectations
     mf_json = manifest.to_dict()
     keys = ["parameters", "input_group", "output_group"]
     assert len(mf_json) == len(keys)
     for key in keys:
         assert key in mf_json.keys()
 
-
-def test_manifest_picks_up_pip_artifact(ingest_path: Path):
     """
-    There's an pipeline artifacts tar in the ingest path,
-    and it should show up among the ancillary products.
-
-    :return:
+    The ancillary_products section of the manifest we build
+    should match the one in the example.
     """
-    populate_fake_tmpx_ratuqh_ingest_path(ingest_path)
-    manifest, _ = build_tmpx_ratuqh_image_manifest(ingest_path)
+    output_group = manifest.output_group
+    aps = output_group.ancillary_products
+    assert len(aps) == len(ANCILLARY_PRODUCTS)
+
+    # Check contents of ancillary_products
+    weblog_candidates = [ap for ap in manifest.output_group.ancillary_products if ap.filename == WEBLOG_FILENAME]
+    assert len(weblog_candidates) == 1
+    assert weblog_candidates[0].type == AncillaryProductType.PIPELINE_WEBLOG
+
+    ingest_artifacts_tar = find_ingestion_artifacts_tar(ingest_path)
+    ingest_artifacts_candidates = [
+        ap for ap in manifest.output_group.ancillary_products if ap.filename == ingest_artifacts_tar.name
+    ]
+    assert len(ingest_artifacts_candidates) == 1
+    assert ingest_artifacts_candidates[0].type == AncillaryProductType.INGESTION_ARTIFACTS
+
     maybe_pips = [file for file in ingest_path.glob(AncillaryProductType.PIPELINE_ARTIFACTS.value + "*.tar")]
+    assert len(maybe_pips) == 1
     pip_artie = maybe_pips[0]
+    pipeline_artifacts_candidates = [
+        ap for ap in manifest.output_group.ancillary_products if ap.filename == pip_artie.name
+    ]
+    assert len(pipeline_artifacts_candidates) == 1
+    assert pipeline_artifacts_candidates[0].type == AncillaryProductType.PIPELINE_ARTIFACTS
 
+    # Inspect the manifest's JSON dict
+    """
+    The output_group section of the manifest we build
+    should match the one in the example:
+    * a "science_products" section containing two science products comprising "type", "filename",
+    and six ancillary products on each science product
+    * an "ancillary products" section comprising three ancillary products
+    """
     mf_json = manifest.to_dict()
     og_json = mf_json[IngestionManifestKey.OUTPUT_GROUP.value]
+    assert len(og_json[IngestionManifestKey.SCIENCE_PRODUCTS.value]) == 2
+
+    for sp_json in og_json[IngestionManifestKey.SCIENCE_PRODUCTS.value]:
+        assert len(sp_json) == 3
+        assert set(sp_json.keys()) == {IngestionManifestKey.ANCILLARY_PRODUCTS.value, "type", "filename"}
+        # and ancillary products belonging to the first science product...
+        sp_ap_jsons = sp_json[IngestionManifestKey.ANCILLARY_PRODUCTS.value]
+        assert len(sp_ap_jsons) == 6
 
+    # ... and ancillary products twisting in the wind all by themselves
     ap_jsons = og_json[IngestionManifestKey.ANCILLARY_PRODUCTS.value]
-    found = False
+    assert len(ap_jsons) == 3
+
+    # there should be a weblog, the ingestion artifacts tar, and the pipeline artifacts tar
+    filenames_found = []
     for ap_json in ap_jsons:
-        if ap_json["filename"] == pip_artie.name:
-            found = True
-            break
+        filename = ap_json["filename"]
+        if filename.endswith(TARFILE_EXT):
+            if filename.startswith(AncillaryProductType.INGESTION_ARTIFACTS.value) or filename.startswith(
+                AncillaryProductType.PIPELINE_ARTIFACTS.value
+            ):
+                filenames_found.append(filename)
+        elif filename == WEBLOG_FILENAME:
+            filenames_found.append(filename)
 
-    assert found
+    assert len(filenames_found) == 3
 
-    shutil.rmtree(ingest_path)
+    """
+    The ingestion_artifacts tar should contain ONLY
+    the ingestion manifest and the additional metadata file
+    """
+    addl_md = manifest.parameters.additional_metadata
+    ing_arties_tar = find_ingestion_artifacts_tar(ingest_path)
+    with tarfile.open(ing_arties_tar, "r") as tar:
+        members = tar.getmembers()
+        assert len(members) == 2
+        for member in members:
+            assert member.name.endswith(MANIFEST_FILENAME) or member.name.endswith(addl_md.filename)
 
 
 def test_catches_invalid_sp_type():
@@ -125,7 +151,7 @@ def test_catches_invalid_sp_type():
 
     # we expect this to work fine
     IngestionManifestBuilder(
-        staging_source_dir=Path(),
+        manifest_source_dir=Path(),
         telescope=Telescope.EVLA.value,
         sp_type=ScienceProductType.IMAGE.value,
         locator="somewhere-over-the-rainbow",
@@ -134,7 +160,7 @@ def test_catches_invalid_sp_type():
     # this, however, should fail
     with pytest.raises(ValueError):
         IngestionManifestBuilder(
-            staging_source_dir=Path(),
+            manifest_source_dir=Path(),
             telescope=Telescope.EVLA.value,
             sp_type="model_built_out_of_playdoh",
             locator="somewhere-over-the-rainbow",
@@ -154,7 +180,7 @@ def test_catches_invalid_telescope():
 
     # we expect this to work fine
     IngestionManifestBuilder(
-        staging_source_dir=Path(),
+        manifest_source_dir=Path(),
         telescope=Telescope.EVLA.value,
         sp_type=ScienceProductType.IMAGE.value,
         locator="ill-have-what-shes-having",
@@ -163,7 +189,7 @@ def test_catches_invalid_telescope():
     # this, however, should go belly-up
     with pytest.raises(ValueError):
         IngestionManifestBuilder(
-            staging_source_dir=Path(),
+            manifest_source_dir=Path(),
             telescope="nathans_mars_telescope",
             sp_type=ScienceProductType.IMAGE.value,
             locator="ill-have-what-shes-having",
@@ -171,128 +197,6 @@ def test_catches_invalid_telescope():
         )
 
 
-def test_ancillary_products_rendered_correctly(ingest_path: Path):
-    """
-    The ancillary_products section of the manifest we build
-    should match the one in the example.
-
-    :param ingest_path: staging source dir
-    :return:
-    """
-    populate_fake_tmpx_ratuqh_ingest_path(ingest_path)
-
-    manifest, _ = build_tmpx_ratuqh_image_manifest(ingest_path)
-
-    output_group = manifest.output_group
-    aps = output_group.ancillary_products
-    assert len(aps) == len(ANCILLARY_PRODUCTS)
-    ap_data = []
-
-    for ap in aps:
-        ap_data.append({"filename": ap.filename, "json": ap.to_dict()})
-    assert len(ap_data) == len(aps)
-
-    # make sure all the ancillary products were created...
-    filename_count = 0
-    for product in ANCILLARY_PRODUCTS:
-        # (It won't find the ingest artifact tar yet, because we didn't populate
-        # the ingest path with it; it's produced during manifest creation.)
-
-        # ...and that each one's JSON is well formed.
-        for properties in ap_data:
-            if properties["filename"] == product.filename:
-                filename_count += 1
-                # If there's a class `repr` in there rather than JSON-serializable text,
-                # dump will fail
-                json.dumps(properties["json"])
-
-    ingest_artifacts_tar = find_ingestion_artifacts_tar(ingest_path)
-    assert ingest_artifacts_tar
-    filename_count += 1
-
-    assert filename_count == len(ANCILLARY_PRODUCTS)
-
-    shutil.rmtree(ingest_path)
-
-
-def test_output_science_products_rendered_correctly(ingest_path: Path):
-    """
-    The output_group section of the manifest we build
-    should match the one in the example:
-    * a "science_products" section containing two science products comprising "type", "filename",
-    and six ancillary products on each science product
-    * an "ancillary products" section comprising three ancillary products
-
-    :param ingest_path:
-    :return:
-    """
-    populate_fake_tmpx_ratuqh_ingest_path(ingest_path)
-    manifest, _ = build_tmpx_ratuqh_image_manifest(ingest_path)
-    mf_json = manifest.to_dict()
-    og_json = mf_json[IngestionManifestKey.OUTPUT_GROUP.value]
-    print(og_json)
-
-    # there should be the first science product...
-    sp_json = og_json[IngestionManifestKey.SCIENCE_PRODUCTS.value][0]
-    assert len(sp_json) == 3
-    for key in (IngestionManifestKey.ANCILLARY_PRODUCTS.value, "type", "filename"):
-        assert key in sp_json.keys()
-
-    # and ancillary products belonging to the first science product...
-    sp_ap_jsons = sp_json[IngestionManifestKey.ANCILLARY_PRODUCTS.value]
-    assert len(sp_ap_jsons) == 6
-
-    # then a second science product...
-    sp_json = og_json[IngestionManifestKey.SCIENCE_PRODUCTS.value][1]
-    assert len(sp_json) == 3
-    for key in (IngestionManifestKey.ANCILLARY_PRODUCTS.value, "type", "filename"):
-        assert key in sp_json.keys()
-
-    # and ancillary products belonging to the second science product...
-    sp_ap_jsons = sp_json[IngestionManifestKey.ANCILLARY_PRODUCTS.value]
-    assert len(sp_ap_jsons) == 6
-
-    # ... and ancillary products twisting in the wind all by themselves
-    ap_jsons = og_json[IngestionManifestKey.ANCILLARY_PRODUCTS.value]
-    assert len(ap_jsons) == 3
-
-    # there should be a weblog, the ingestion artifacts tar, and the pipeline artifacts tar
-    filenames_found = []
-    for ap_json in ap_jsons:
-        filename = ap_json["filename"]
-        if filename.endswith(TARFILE_EXT):
-            if filename.startswith(AncillaryProductType.INGESTION_ARTIFACTS.value) or filename.startswith(
-                AncillaryProductType.PIPELINE_ARTIFACTS.value
-            ):
-                filenames_found.append(filename)
-        elif filename == WEBLOG_FILENAME:
-            filenames_found.append(filename)
-
-    assert len(filenames_found) == 3
-
-    shutil.rmtree(ingest_path)
-
-
-def test_ing_artifacts_tar_has_only_what_it_should(ingest_path: Path):
-    """
-    The ingestion_artifacts tar should contain ONLY
-    the ingestion manifest and the additional metadata file
-
-    :return:
-    """
-    populate_fake_tmpx_ratuqh_ingest_path(ingest_path)
-    manifest, _ = build_tmpx_ratuqh_image_manifest(ingest_path)
-    addl_md = manifest.parameters.additional_metadata
-    ing_arties_tar = find_ingestion_artifacts_tar(ingest_path)
-    with tarfile.open(ing_arties_tar, "r") as tar:
-        members = tar.getmembers()
-        assert len(members) == 2
-        for member in members:
-            assert member.name.endswith(MANIFEST_FILENAME) or member.name.endswith(addl_md.filename)
-
-    shutil.rmtree(ingest_path)
-
-
 # -----------------------------
 #  U  T  I  L  I  T  I  E  S
 # -----------------------------
@@ -308,25 +212,9 @@ def build_tmpx_ratuqh_image_manifest(staging_source_dir: Path):
     additional_metadata = AbstractTextFile(filename=ADDITIONAL_METADATA_FILENAME, content="")
 
     return IngestionManifestBuilder(
-        staging_source_dir=staging_source_dir,
+        manifest_source_dir=staging_source_dir,
         additional_metadata=additional_metadata,
         telescope=Telescope.EVLA.value,
         sp_type=ScienceProductType.IMAGE.value,
         locator="uid://evla/calibration/ea93dae5-3495-47fa-887d-4be2852f5f14",
     ).build()
-
-
-def find_ingestion_artifacts_tar(staging_source_dir: Path):
-    """
-    There should be an ingestion artifacts tar after manifest creation.
-
-    :param staging_source_dir:
-    :return:
-    """
-    ing_artifacts_tars = [
-        file
-        for file in staging_source_dir.iterdir()
-        if file.name.startswith(AncillaryProductType.INGESTION_ARTIFACTS.value) and file.name.endswith(TARFILE_EXT)
-    ]
-    assert len(ing_artifacts_tars) == 1
-    return ing_artifacts_tars[0]
diff --git a/apps/cli/executables/pexable/ingest_envoy/test/test_manifest_builder_entry_points.py b/apps/cli/executables/pexable/ingest_envoy/test/test_manifest_builder_entry_points.py
index 41d0aa3cb2dd4813f10a7f46709c14a9db37809e..4281c679d095341e60699c3127d759b81634f465 100644
--- a/apps/cli/executables/pexable/ingest_envoy/test/test_manifest_builder_entry_points.py
+++ b/apps/cli/executables/pexable/ingest_envoy/test/test_manifest_builder_entry_points.py
@@ -20,18 +20,9 @@
 import logging
 
 # pylint: disable=E0401, E0402, R1721, W0611, W0621
-import shutil
 import sys
 from pathlib import Path
 
-from ingest_envoy.ingestion_manifest import (
-    IngestionManifest,
-    IngestionManifestBuilder,
-    find_manifest,
-)
-from ingest_envoy.manifest_components import INGESTION_ARTIFACTS_NAME, TARFILE_EXT
-from ingest_envoy.utilities import ScienceProductType, Telescope
-
 from conftest import (
     ANCILLARY_PRODUCTS,
     OUTPUT_GROUP_SCIENCE_PRODUCT_ANCILLARIES_K,
@@ -40,6 +31,13 @@ from conftest import (
     populate_fake_evla_cal_ingest_path,
     populate_fake_tmpx_ratuqh_ingest_path,
 )
+from ingest_envoy.ingestion_manifest import (
+    IngestionManifest,
+    IngestionManifestBuilder,
+    find_manifest,
+)
+from ingest_envoy.manifest_components import INGESTION_ARTIFACTS_NAME, TARFILE_EXT
+from ingest_envoy.utilities import ScienceProductType, Telescope
 
 logger = logging.getLogger(IngestionManifest.__name__)
 logger.setLevel(logging.INFO)
@@ -62,7 +60,7 @@ def test_entry_point_for_evla_cal(ingest_path: Path):
         telescope=Telescope.EVLA.value,
         locator=locator,
         sp_type=ScienceProductType.EVLA_CAL.value,
-        staging_source_dir=ingest_path,
+        manifest_source_dir=ingest_path,
     )
     builder.build()
     ingestion_files = [file for file in ingest_path.iterdir()]
@@ -96,8 +94,12 @@ def test_entry_point_for_image(ingest_path: Path):
     # we should be starting out with various image manifest input files
     # and CASA byproducts, a random file, and -not- the image ingestion
     # manifest yet to be created
-    expected_file_count_before = len(ANCILLARY_PRODUCTS) + len(OUTPUT_GROUP_SCIENCE_PRODUCT_ANCILLARIES_K) +\
-                                 len(OUTPUT_GROUP_SCIENCE_PRODUCT_ANCILLARIES_X) + 2
+    expected_file_count_before = (
+        len(ANCILLARY_PRODUCTS)
+        + len(OUTPUT_GROUP_SCIENCE_PRODUCT_ANCILLARIES_K)
+        + len(OUTPUT_GROUP_SCIENCE_PRODUCT_ANCILLARIES_X)
+        + 2
+    )
     ingestion_files_before = [file for file in ingest_path.iterdir()]
     assert len(ingestion_files_before) == expected_file_count_before
 
@@ -105,7 +107,7 @@ def test_entry_point_for_image(ingest_path: Path):
         telescope=Telescope.EVLA.value,
         locator=locator,
         sp_type=ScienceProductType.IMAGE.value,
-        staging_source_dir=ingest_path,
+        manifest_source_dir=ingest_path,
     ).build()
 
     # there should be one ingestion manifest....
@@ -123,4 +125,3 @@ def test_entry_point_for_image(ingest_path: Path):
                 print(f">>> {file.name} present after manifest build")
 
     assert len(ingestion_files_after) == expected_file_count_before + 2
-    shutil.rmtree(ingest_path)
diff --git a/apps/cli/executables/pexable/ingest_envoy/test/test_solicitor.py b/apps/cli/executables/pexable/ingest_envoy/test/test_solicitor.py
index e2b6a53462e912a557f53fba9ac9889427261075..9cd8e1e718d432b69412e320e4b2cb84e020e77d 100644
--- a/apps/cli/executables/pexable/ingest_envoy/test/test_solicitor.py
+++ b/apps/cli/executables/pexable/ingest_envoy/test/test_solicitor.py
@@ -23,21 +23,22 @@ from unittest.mock import patch
 
 import pytest
 from ingest_envoy.solicitor import Solicitor
-from ingest_envoy.utilities import IngestType
+from ingest_envoy.utilities import CuratorType, IngestType
 
 SOLICIT_WORKFLOWS_PATCH = "ingest_envoy.solicitor.Solicitor.solicit_contents"
-filename = "test/input_files/test-metadata.json"
-image_filename = "test/input_files/test-image-metadata.json"
+SOLICITOR_URLS = ["http://capability:3457", "http://workflow:3456"]
 
 
 @pytest.fixture(scope="function")
 def solicitor() -> Solicitor:
-    return Solicitor(IngestType.CAL, ["http://capability:3457", "http://workflow:3456"], filename=filename)
+    filename = "test/input_files/test-metadata.json"
+    return Solicitor(IngestType.CAL, SOLICITOR_URLS, filename=filename)
 
 
 @pytest.fixture(scope="function")
 def image_solicitor() -> Solicitor:
-    return Solicitor(IngestType.IMG, ["http://capability:3457", "http://workflow:3456"], filename=image_filename)
+    image_filename = "test/input_files/test-image-metadata.json"
+    return Solicitor(IngestType.IMG, SOLICITOR_URLS, filename=image_filename)
 
 
 expected_metadata = {
@@ -59,7 +60,7 @@ expected_metadata = {
 
 class TestSolicitor:
     def test_solicit_contents(self, solicitor: Solicitor):
-        metadata = solicitor.solicit_contents(filename)
+        metadata = solicitor.solicit_contents(solicitor.filename)
         assert metadata == expected_metadata
 
     def test_solicit_workflow_directory_name(self, solicitor: Solicitor):
@@ -109,3 +110,28 @@ class TestSolicitor:
         with patch("ingest_envoy.solicitor.Solicitor.solicit_initial_directory_name", return_value=initial_version_dir):
             parameters = image_solicitor.solicit_parameters()
             assert parameters == metadata
+
+    @pytest.mark.skip("file_list parameter tests need tweaking")
+    def test_solicit_parameters_full_curation_image_file_list(self):
+        filename = "test/input_files/test-full-curation-image-evla-metadata.json"
+        expected_parameters = {
+            "telescope": "EVLA",
+            "project": "20B-377",
+            "product_group_id": 328918,
+            "product_type": "image",
+            "curation_source": None,
+            "target_list": None,
+            "spl": None,
+            "input_group_locator": "uid://evla/calibration/620195ad-1d11-49f4-be1f-3532092565c1",
+            "file_list": "20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.mask.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.tt1.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.pbcor.tt0.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.pb.tt0.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.alpha.error.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.alpha.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.X_band.cont.I.tt0.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.pb.tt0.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.alpha.error.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.pbcor.tt0.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.tt0.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.alpha.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.tt1.fits,20B-377.MJD59909.70629903935.AT2019teq_sci.C_band.cont.I.mask.fits,ingestion_artifacts_2022_12_14T06_22_03.874.tar,weblog.tgz,pipeline_artifacts_2022_12_09T19_08_32.tar,",
+        }
+        full_curation_image_files_list_solicitor = Solicitor(CuratorType.FULL, SOLICITOR_URLS, filename)
+        actual_parameters = full_curation_image_files_list_solicitor.solicit_parameters()
+        assert len(actual_parameters.keys()) == len(expected_parameters.keys())
+        assert set(actual_parameters.keys()) == set(expected_parameters.keys())
+        for key, value in expected_parameters.items():
+            if key != "file_list":
+                assert value == actual_parameters[key]
+            else:
+                assert len(value) == len(actual_parameters[key])
+                assert set(value) == set(actual_parameters[key])
diff --git a/apps/cli/executables/pexable/mediator/pyproject.toml b/apps/cli/executables/pexable/mediator/pyproject.toml
index 0dc0968e14779ae8720d575b08cabb5a3ffdbcf6..1203d0a3977af3f7022d5bba18bd4bc91912b7b5 100644
--- a/apps/cli/executables/pexable/mediator/pyproject.toml
+++ b/apps/cli/executables/pexable/mediator/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "ssa_mediator"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Mediator: the Workspaces intervention utility"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/executables/pexable/mediator/system_mediator/__init__.py b/apps/cli/executables/pexable/mediator/system_mediator/__init__.py
index e51cae533d6dade781e3579303bc71afc1f4226f..9bdb4c6936f757f6f92e65a8a94b161e10373f57 100644
--- a/apps/cli/executables/pexable/mediator/system_mediator/__init__.py
+++ b/apps/cli/executables/pexable/mediator/system_mediator/__init__.py
@@ -18,4 +18,4 @@
 """
 Mediator: the Workspaces intervention utility
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/executables/pexable/null/null/__init__.py b/apps/cli/executables/pexable/null/null/__init__.py
index 6a56e6d2db78f33fd7bee004f935969d9979e4ba..32fd06ce2114c79785dd03cdc5d1cd07cedc30ca 100644
--- a/apps/cli/executables/pexable/null/null/__init__.py
+++ b/apps/cli/executables/pexable/null/null/__init__.py
@@ -18,4 +18,4 @@
 """
 This is the null executable, a baseline test of the functionality of the Workspaces system.
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/executables/pexable/null/pyproject.toml b/apps/cli/executables/pexable/null/pyproject.toml
index 4ef09de00135bbee8698fd2caabaeb4e27b30693..de840291e3ec52304f3f467a88aa56044230e6be 100644
--- a/apps/cli/executables/pexable/null/pyproject.toml
+++ b/apps/cli/executables/pexable/null/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "ssa_null"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "This is the null executable, a baseline test of the functionality of the Workspaces system."
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/executables/pexable/productfetcher/productfetcher/__init__.py b/apps/cli/executables/pexable/productfetcher/productfetcher/__init__.py
index 95798c7e2d6335e4424b525dbfe6d6bd4393f3c8..8a12b90896947c2f8ff1821803de9a162f3f7a19 100644
--- a/apps/cli/executables/pexable/productfetcher/productfetcher/__init__.py
+++ b/apps/cli/executables/pexable/productfetcher/productfetcher/__init__.py
@@ -18,4 +18,4 @@
 """
 Product fetcher: retrieve products from NGAS and other places for the archive and place them on disk
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/executables/pexable/productfetcher/pyproject.toml b/apps/cli/executables/pexable/productfetcher/pyproject.toml
index 3c485cae8f2ef91bc5028f0d36d7ce086f3cc7af..41eba059fe80db91587d8b06729bb323f1f30866 100644
--- a/apps/cli/executables/pexable/productfetcher/pyproject.toml
+++ b/apps/cli/executables/pexable/productfetcher/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "productfetcher"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Product fetcher: retrieve products from NGAS and other places for the archive and place them on disk"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/executables/pexable/update_stage/pyproject.toml b/apps/cli/executables/pexable/update_stage/pyproject.toml
index 15b634e67c1b0eee95af1058a7f57816ec8bea95..317557a2bdb1c2ab0cd92fdbd4a55cee26cf8f65 100644
--- a/apps/cli/executables/pexable/update_stage/pyproject.toml
+++ b/apps/cli/executables/pexable/update_stage/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "ssa_update_stage"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Update stage: pass status information back to workspaces over the HT Chirp protocol"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/executables/pexable/update_stage/update_stage/__init__.py b/apps/cli/executables/pexable/update_stage/update_stage/__init__.py
index 233e2ae6aa2412efa9829d3018dea6743ce0e600..efc2330e5b8f502196dd6947a3a47f6ba64af253 100644
--- a/apps/cli/executables/pexable/update_stage/update_stage/__init__.py
+++ b/apps/cli/executables/pexable/update_stage/update_stage/__init__.py
@@ -18,4 +18,4 @@
 """
 Update stage: pass status information back to workspaces over the HT Chirp protocol
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/executables/pexable/vela/pyproject.toml b/apps/cli/executables/pexable/vela/pyproject.toml
index 0fc0c3e351b4a129658127e94230293e63d4926f..f78a550d604b36eca07642b2aa80ea587717fe0c 100644
--- a/apps/cli/executables/pexable/vela/pyproject.toml
+++ b/apps/cli/executables/pexable/vela/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "ssa_vela"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Workspaces CASA functionality bridge"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/executables/pexable/vela/vela/__init__.py b/apps/cli/executables/pexable/vela/vela/__init__.py
index 086f33d878bb35b29a08f2e1a351b25a78e87b99..f598851ae8f0b4cd7348668e43bfb899af241df0 100644
--- a/apps/cli/executables/pexable/vela/vela/__init__.py
+++ b/apps/cli/executables/pexable/vela/vela/__init__.py
@@ -18,4 +18,4 @@
 """
 Workspaces CASA functionality bridge
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/executables/pexable/wf_inspector/pyproject.toml b/apps/cli/executables/pexable/wf_inspector/pyproject.toml
index 8ced242ef94890cb8436135827be60f4135fba34..a937054feb8ffdded5ee119d10ac5a5bc99857f3 100644
--- a/apps/cli/executables/pexable/wf_inspector/pyproject.toml
+++ b/apps/cli/executables/pexable/wf_inspector/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "wf_inspector"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Command-line script that wraps the functionality of `docker exec -it` to enter our workflow Docker container"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/executables/pexable/wf_inspector/wf_inspector/__init__.py b/apps/cli/executables/pexable/wf_inspector/wf_inspector/__init__.py
index 3faf86ab2dad1a416505d3eb84d7dcb214d338e1..893c8d443dc8d76b5c0e92941d40ddf351421c8b 100644
--- a/apps/cli/executables/pexable/wf_inspector/wf_inspector/__init__.py
+++ b/apps/cli/executables/pexable/wf_inspector/wf_inspector/__init__.py
@@ -16,4 +16,4 @@
 # You should have received a copy of the GNU General Public License
 # along with Workspaces.  If not, see <https://www.gnu.org/licenses/>.
 """wf_inspector: access a running workflow via ssh"""
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/executables/pexable/ws_annihilator/poetry.lock b/apps/cli/executables/pexable/ws_annihilator/poetry.lock
index d3a398f130fe2b1f112588b1fb24cd4b64b57b4b..649444960f0af2a8303b52fde68d3b035e672423 100644
--- a/apps/cli/executables/pexable/ws_annihilator/poetry.lock
+++ b/apps/cli/executables/pexable/ws_annihilator/poetry.lock
@@ -135,83 +135,6 @@ files = [
 [package.extras]
 test = ["pytest (>=6)"]
 
-[[package]]
-name = "greenlet"
-version = "2.0.2"
-description = "Lightweight in-process concurrent programming"
-optional = false
-python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
-files = [
-    {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"},
-    {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"},
-    {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
-    {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
-    {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
-    {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"},
-    {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
-    {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
-    {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
-    {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"},
-    {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"},
-    {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
-    {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
-    {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
-    {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"},
-    {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
-    {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
-    {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
-    {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"},
-    {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"},
-    {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"},
-    {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"},
-    {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"},
-    {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"},
-    {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"},
-    {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"},
-    {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"},
-    {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"},
-    {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"},
-    {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"},
-    {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"},
-    {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"},
-    {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"},
-    {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"},
-    {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"},
-    {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"},
-    {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"},
-    {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"},
-    {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"},
-    {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"},
-    {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"},
-    {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
-    {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
-    {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
-    {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"},
-    {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
-    {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
-    {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
-    {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"},
-    {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"},
-    {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
-    {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
-    {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
-    {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"},
-    {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
-    {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
-    {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
-    {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"},
-    {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"},
-    {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"},
-    {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"},
-    {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"},
-    {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"},
-    {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"},
-]
-
-[package.extras]
-docs = ["Sphinx", "docutils (<0.18)"]
-test = ["objgraph", "psutil"]
-
 [[package]]
 name = "idna"
 version = "3.4"
@@ -260,87 +183,6 @@ files = [
 dev = ["pre-commit", "tox"]
 testing = ["pytest", "pytest-benchmark"]
 
-[[package]]
-name = "psycopg2-binary"
-version = "2.9.9"
-description = "psycopg2 - Python-PostgreSQL Database Adapter"
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"},
-    {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"},
-    {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"},
-    {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"},
-    {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"},
-    {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"},
-    {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"},
-    {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"},
-    {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"},
-    {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"},
-    {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"},
-    {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"},
-    {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"},
-    {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"},
-    {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"},
-    {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"},
-    {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"},
-    {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"},
-    {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"},
-    {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"},
-    {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"},
-    {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"},
-    {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"},
-    {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"},
-    {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"},
-    {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"},
-    {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"},
-    {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"},
-    {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"},
-    {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"},
-    {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"},
-    {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"},
-    {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"},
-    {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"},
-    {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"},
-    {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"},
-    {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"},
-    {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"},
-    {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"},
-    {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"},
-    {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"},
-    {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"},
-    {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"},
-    {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"},
-    {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"},
-    {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"},
-    {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"},
-    {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"},
-    {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"},
-    {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"},
-    {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"},
-    {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"},
-    {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"},
-    {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"},
-    {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"},
-    {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"},
-    {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"},
-    {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"},
-    {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"},
-    {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"},
-    {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"},
-    {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"},
-    {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"},
-    {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"},
-    {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"},
-    {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"},
-    {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"},
-    {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"},
-    {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"},
-    {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"},
-    {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"},
-    {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"},
-]
-
 [[package]]
 name = "pycapo"
 version = "0.3.1"
@@ -395,87 +237,6 @@ urllib3 = ">=1.21.1,<3"
 socks = ["PySocks (>=1.5.6,!=1.5.7)"]
 use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
 
-[[package]]
-name = "sqlalchemy"
-version = "1.4.49"
-description = "Database Abstraction Library"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
-files = [
-    {file = "SQLAlchemy-1.4.49-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2e126cf98b7fd38f1e33c64484406b78e937b1a280e078ef558b95bf5b6895f6"},
-    {file = "SQLAlchemy-1.4.49-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:03db81b89fe7ef3857b4a00b63dedd632d6183d4ea5a31c5d8a92e000a41fc71"},
-    {file = "SQLAlchemy-1.4.49-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:95b9df9afd680b7a3b13b38adf6e3a38995da5e162cc7524ef08e3be4e5ed3e1"},
-    {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63e43bf3f668c11bb0444ce6e809c1227b8f067ca1068898f3008a273f52b09"},
-    {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca46de16650d143a928d10842939dab208e8d8c3a9a8757600cae9b7c579c5cd"},
-    {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f835c050ebaa4e48b18403bed2c0fda986525896efd76c245bdd4db995e51a4c"},
-    {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c21b172dfb22e0db303ff6419451f0cac891d2e911bb9fbf8003d717f1bcf91"},
-    {file = "SQLAlchemy-1.4.49-cp310-cp310-win32.whl", hash = "sha256:5fb1ebdfc8373b5a291485757bd6431de8d7ed42c27439f543c81f6c8febd729"},
-    {file = "SQLAlchemy-1.4.49-cp310-cp310-win_amd64.whl", hash = "sha256:f8a65990c9c490f4651b5c02abccc9f113a7f56fa482031ac8cb88b70bc8ccaa"},
-    {file = "SQLAlchemy-1.4.49-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8923dfdf24d5aa8a3adb59723f54118dd4fe62cf59ed0d0d65d940579c1170a4"},
-    {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9ab2c507a7a439f13ca4499db6d3f50423d1d65dc9b5ed897e70941d9e135b0"},
-    {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debe7d49b8acf1f3035317e63d9ec8d5e4d904c6e75a2a9246a119f5f2fdf3d"},
-    {file = "SQLAlchemy-1.4.49-cp311-cp311-win32.whl", hash = "sha256:82b08e82da3756765c2e75f327b9bf6b0f043c9c3925fb95fb51e1567fa4ee87"},
-    {file = "SQLAlchemy-1.4.49-cp311-cp311-win_amd64.whl", hash = "sha256:171e04eeb5d1c0d96a544caf982621a1711d078dbc5c96f11d6469169bd003f1"},
-    {file = "SQLAlchemy-1.4.49-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f23755c384c2969ca2f7667a83f7c5648fcf8b62a3f2bbd883d805454964a800"},
-    {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8396e896e08e37032e87e7fbf4a15f431aa878c286dc7f79e616c2feacdb366c"},
-    {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66da9627cfcc43bbdebd47bfe0145bb662041472393c03b7802253993b6b7c90"},
-    {file = "SQLAlchemy-1.4.49-cp312-cp312-win32.whl", hash = "sha256:9a06e046ffeb8a484279e54bda0a5abfd9675f594a2e38ef3133d7e4d75b6214"},
-    {file = "SQLAlchemy-1.4.49-cp312-cp312-win_amd64.whl", hash = "sha256:7cf8b90ad84ad3a45098b1c9f56f2b161601e4670827d6b892ea0e884569bd1d"},
-    {file = "SQLAlchemy-1.4.49-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:36e58f8c4fe43984384e3fbe6341ac99b6b4e083de2fe838f0fdb91cebe9e9cb"},
-    {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b31e67ff419013f99ad6f8fc73ee19ea31585e1e9fe773744c0f3ce58c039c30"},
-    {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc22807a7e161c0d8f3da34018ab7c97ef6223578fcdd99b1d3e7ed1100a5db"},
-    {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c14b29d9e1529f99efd550cd04dbb6db6ba5d690abb96d52de2bff4ed518bc95"},
-    {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f3470e084d31247aea228aa1c39bbc0904c2b9ccbf5d3cfa2ea2dac06f26d"},
-    {file = "SQLAlchemy-1.4.49-cp36-cp36m-win32.whl", hash = "sha256:706bfa02157b97c136547c406f263e4c6274a7b061b3eb9742915dd774bbc264"},
-    {file = "SQLAlchemy-1.4.49-cp36-cp36m-win_amd64.whl", hash = "sha256:a7f7b5c07ae5c0cfd24c2db86071fb2a3d947da7bd487e359cc91e67ac1c6d2e"},
-    {file = "SQLAlchemy-1.4.49-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:4afbbf5ef41ac18e02c8dc1f86c04b22b7a2125f2a030e25bbb4aff31abb224b"},
-    {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24e300c0c2147484a002b175f4e1361f102e82c345bf263242f0449672a4bccf"},
-    {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:393cd06c3b00b57f5421e2133e088df9cabcececcea180327e43b937b5a7caa5"},
-    {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:201de072b818f8ad55c80d18d1a788729cccf9be6d9dc3b9d8613b053cd4836d"},
-    {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653ed6817c710d0c95558232aba799307d14ae084cc9b1f4c389157ec50df5c"},
-    {file = "SQLAlchemy-1.4.49-cp37-cp37m-win32.whl", hash = "sha256:647e0b309cb4512b1f1b78471fdaf72921b6fa6e750b9f891e09c6e2f0e5326f"},
-    {file = "SQLAlchemy-1.4.49-cp37-cp37m-win_amd64.whl", hash = "sha256:ab73ed1a05ff539afc4a7f8cf371764cdf79768ecb7d2ec691e3ff89abbc541e"},
-    {file = "SQLAlchemy-1.4.49-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:37ce517c011560d68f1ffb28af65d7e06f873f191eb3a73af5671e9c3fada08a"},
-    {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1878ce508edea4a879015ab5215546c444233881301e97ca16fe251e89f1c55"},
-    {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ab792ca493891d7a45a077e35b418f68435efb3e1706cb8155e20e86a9013c"},
-    {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e8e608983e6f85d0852ca61f97e521b62e67969e6e640fe6c6b575d4db68557"},
-    {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccf956da45290df6e809ea12c54c02ace7f8ff4d765d6d3dfb3655ee876ce58d"},
-    {file = "SQLAlchemy-1.4.49-cp38-cp38-win32.whl", hash = "sha256:f167c8175ab908ce48bd6550679cc6ea20ae169379e73c7720a28f89e53aa532"},
-    {file = "SQLAlchemy-1.4.49-cp38-cp38-win_amd64.whl", hash = "sha256:45806315aae81a0c202752558f0df52b42d11dd7ba0097bf71e253b4215f34f4"},
-    {file = "SQLAlchemy-1.4.49-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b6d0c4b15d65087738a6e22e0ff461b407533ff65a73b818089efc8eb2b3e1de"},
-    {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a843e34abfd4c797018fd8d00ffffa99fd5184c421f190b6ca99def4087689bd"},
-    {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:738d7321212941ab19ba2acf02a68b8ee64987b248ffa2101630e8fccb549e0d"},
-    {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c890421651b45a681181301b3497e4d57c0d01dc001e10438a40e9a9c25ee77"},
-    {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d26f280b8f0a8f497bc10573849ad6dc62e671d2468826e5c748d04ed9e670d5"},
-    {file = "SQLAlchemy-1.4.49-cp39-cp39-win32.whl", hash = "sha256:ec2268de67f73b43320383947e74700e95c6770d0c68c4e615e9897e46296294"},
-    {file = "SQLAlchemy-1.4.49-cp39-cp39-win_amd64.whl", hash = "sha256:bbdf16372859b8ed3f4d05f925a984771cd2abd18bd187042f24be4886c2a15f"},
-    {file = "SQLAlchemy-1.4.49.tar.gz", hash = "sha256:06ff25cbae30c396c4b7737464f2a7fc37a67b7da409993b182b024cec80aed9"},
-]
-
-[package.dependencies]
-greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
-
-[package.extras]
-aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
-aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"]
-asyncio = ["greenlet (!=0.4.17)"]
-asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
-mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"]
-mssql = ["pyodbc"]
-mssql-pymssql = ["pymssql"]
-mssql-pyodbc = ["pyodbc"]
-mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
-mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
-mysql-connector = ["mysql-connector-python"]
-oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"]
-postgresql = ["psycopg2 (>=2.7)"]
-postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
-postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
-postgresql-psycopg2binary = ["psycopg2-binary"]
-postgresql-psycopg2cffi = ["psycopg2cffi"]
-pymysql = ["pymysql", "pymysql (<1)"]
-sqlcipher = ["sqlcipher3-binary"]
-
 [[package]]
 name = "tomli"
 version = "2.0.1"
@@ -507,4 +268,4 @@ zstd = ["zstandard (>=0.18.0)"]
 [metadata]
 lock-version = "2.0"
 python-versions = "~3.10"
-content-hash = "00ad0278e98b61feb9dba1771ef02a57c16f88a7c9753168bc94e6d004a38e04"
+content-hash = "ea0262b4dbfe0e33f62b9d65eec274c696b67430dfad092a73f784820f8cfeed"
diff --git a/apps/cli/executables/pexable/ws_annihilator/pyproject.toml b/apps/cli/executables/pexable/ws_annihilator/pyproject.toml
index 93fbe469c9919f7396ad5c59895c9ed451024c4f..cb5b8cfd825050698a7831e9a5553334d3708ddd 100644
--- a/apps/cli/executables/pexable/ws_annihilator/pyproject.toml
+++ b/apps/cli/executables/pexable/ws_annihilator/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "ws_annihilator"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Workspaces Directory Annihilator; Clean up generated products from lustre!"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
@@ -10,9 +10,6 @@ readme = "README.md"
 python = "~3.10"
 requests = "2.31.0"
 pycapo = "0.3.1"
-psycopg2-binary = "2.9.9"
-sqlalchemy = "1.4.49"
-greenlet = "2.0.2"
 
 [tool.poetry.group.test.dependencies]
 pytest = "7.4.2"
diff --git a/apps/cli/executables/pexable/ws_annihilator/ws_annihilator/__init__.py b/apps/cli/executables/pexable/ws_annihilator/ws_annihilator/__init__.py
index 9959d428d293166c7eb4bb85d118215e349d70b5..cd738431c5a05aa1dd91c7c7d49a46b3a4d9e2ca 100644
--- a/apps/cli/executables/pexable/ws_annihilator/ws_annihilator/__init__.py
+++ b/apps/cli/executables/pexable/ws_annihilator/ws_annihilator/__init__.py
@@ -18,4 +18,4 @@
 """
 Workspaces Directory Annihilator; Clean up generated products from lustre!
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/executables/pexable/ws_metrics/pyproject.toml b/apps/cli/executables/pexable/ws_metrics/pyproject.toml
index 730ff66313c17617020fef842f61dd1e6fe7360c..2b9ac8ef9fd90fd5f79980502e12c3dc8df54ccb 100644
--- a/apps/cli/executables/pexable/ws_metrics/pyproject.toml
+++ b/apps/cli/executables/pexable/ws_metrics/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "ws_metrics"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Workspaces metrics reporter for users outside of SSA."
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/executables/pexable/ws_metrics/ws_metrics/__init__.py b/apps/cli/executables/pexable/ws_metrics/ws_metrics/__init__.py
index cf663b45ed9c1859df58af2820828b0da036f8d6..0f2d42562a9caac9ddc426344cf8929d8478e0a4 100644
--- a/apps/cli/executables/pexable/ws_metrics/ws_metrics/__init__.py
+++ b/apps/cli/executables/pexable/ws_metrics/ws_metrics/__init__.py
@@ -18,4 +18,4 @@
 """
 Workspaces metrics reporter for users outside of SSA.
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/executables/wf_framework/ingest_requirements/calibration-table-collector.sh b/apps/cli/executables/wf_framework/ingest_requirements/calibration-table-collector.sh
index a342c963fe873d6d3540d56d5cee90106440dfc8..1a61c4478e3b5d46e6742c08829dbcab599d204a 100755
--- a/apps/cli/executables/wf_framework/ingest_requirements/calibration-table-collector.sh
+++ b/apps/cli/executables/wf_framework/ingest_requirements/calibration-table-collector.sh
@@ -51,10 +51,13 @@ CAL_PROCESSING_DATETIME=$1;shift
 # version 1 location, if applicable (optional argument)
 INITIAL_DIR=${1:-default}
 
+# Assuming pycapo is in the same directory as this script
+CUR_DIR=$(dirname "$(realpath "$0")")
+
 # Get the spool, staging and storage paths from CAPO
-SPOOL_DIR=$(/lustre/aoc/cluster/pipeline/"${CAPO_PROFILE}"/workspaces/sbin/pycapo -P "${CAPO_PROFILE}" -q edu.nrao.workspaces.ProcessingSettings.rootDirectory)
-STAGING_DIR=$(/lustre/aoc/cluster/pipeline/"${CAPO_PROFILE}"/workspaces/sbin/pycapo  -P "${CAPO_PROFILE}" -q edu.nrao.workspaces.IngestionSettings.stagingDirectory)
-STORAGE_DIR=$(/lustre/aoc/cluster/pipeline/"${CAPO_PROFILE}"/workspaces/sbin/pycapo  -P "${CAPO_PROFILE}" -q edu.nrao.workspaces.IngestionSettings.storageDirectory)
+SPOOL_DIR=$(${CUR_DIR}/pycapo -P "${CAPO_PROFILE}" -q edu.nrao.workspaces.ProcessingSettings.rootDirectory)
+STAGING_DIR=$(${CUR_DIR}/pycapo -P "${CAPO_PROFILE}" -q edu.nrao.workspaces.IngestionSettings.stagingDirectory)
+STORAGE_DIR=$(${CUR_DIR}/pycapo -P "${CAPO_PROFILE}" -q edu.nrao.workspaces.IngestionSettings.storageDirectory)
 
 # File name for output tar of calibration products
 #
diff --git a/apps/cli/executables/wf_framework/ingest_requirements/image-product-collector.sh b/apps/cli/executables/wf_framework/ingest_requirements/image-product-collector.sh
index 17a972c59010a0941bde304427a4a914d66055fb..1790a3d65099cbaa0576be39d479c85b2aa7af6b 100644
--- a/apps/cli/executables/wf_framework/ingest_requirements/image-product-collector.sh
+++ b/apps/cli/executables/wf_framework/ingest_requirements/image-product-collector.sh
@@ -45,7 +45,10 @@ WORKFLOW_DIR=$1;shift
 STAGE_DIR=$1;shift
 FILENAME=$1;shift
 
-SPOOL_DIR=$(/lustre/aoc/cluster/pipeline/"${CAPO_PROFILE}"/workspaces/sbin/pycapo -P "${CAPO_PROFILE}" -q edu.nrao.workspaces.ProcessingSettings.rootDirectory)
+# Assuming pycapo is in the same directory as this script
+CUR_DIR=$(dirname "$(realpath "$0")")
+
+SPOOL_DIR=$(${CUR_DIR}/pycapo -P "${CAPO_PROFILE}" -q edu.nrao.workspaces.ProcessingSettings.rootDirectory)
 SOURCE_DIR=${SPOOL_DIR}/${WORKFLOW_DIR}/products
 
 
diff --git a/apps/cli/utilities/aat_wrest/aat_wrest/__init__.py b/apps/cli/utilities/aat_wrest/aat_wrest/__init__.py
index 10cd7cfd9d0e9099094c6e7a9f80606ea548493b..831285115d2b5932564bb7c74717c6672d71b28a 100644
--- a/apps/cli/utilities/aat_wrest/aat_wrest/__init__.py
+++ b/apps/cli/utilities/aat_wrest/aat_wrest/__init__.py
@@ -18,4 +18,4 @@
 """
 AAT Wrest: Workspaces-to-Archive metadata retriever
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/utilities/aat_wrest/aat_wrest/metadata_wrester.py b/apps/cli/utilities/aat_wrest/aat_wrest/metadata_wrester.py
index 5eeda69e8f0b684b14a47ed09edd573e25cdb1a7..17a14906f3efc0723b00e84f762ee499ad26f946 100644
--- a/apps/cli/utilities/aat_wrest/aat_wrest/metadata_wrester.py
+++ b/apps/cli/utilities/aat_wrest/aat_wrest/metadata_wrester.py
@@ -357,30 +357,120 @@ class WrestWorkflowMetadata:
         """
         query = """
         SELECT sp.science_product_type,
-               eb.telescope,
-               eb.project_code
+        COALESCE(eb.telescope,im.telescope) as telescope,
+        spp.project_code
         FROM science_products sp
-        JOIN execution_blocks eb ON sp.science_product_locator = eb.science_product_locator
+        JOIN science_products_projects spp ON sp.science_product_locator = spp.science_product_locator
+        LEFT JOIN execution_blocks eb ON sp.science_product_locator = eb.science_product_locator
+        LEFT JOIN images im ON sp.science_product_locator = im.science_product_locator
         WHERE sp.science_product_locator = %(spl)s;
         """
         make_json = {}
         try:
             cursor = self.conn.cursor()
-            cursor.execute(query, {"spl": self.spl[0]})
+            spl = self.spl[0]
+            if self.spl[0].isdigit():
+                spl_query = """
+                SELECT science_product_locator
+                FROM science_products_product_groups
+                WHERE product_group_id = %(pg_id)s;
+                """
+                cursor.execute(spl_query, {"pg_id": self.spl[0]})
+                data = cursor.fetchall()
+                if data:
+                    spl = data[0][0]
+                else:
+                    self.logger.error(f"ERROR: Failed to determine SPL from product group id, got {data}")
+                    return make_json
+
+            cursor.execute(query, {"spl": spl})
             data = cursor.fetchall()
             if data:
-                make_json = json.dumps(
-                    {
-                        "product_type": str(data[0][0]).lower().replace(' ', '_'),
-                        "telescope": data[0][1],
-                        "projectCode": data[0][2]
-                    }
-                )
+                make_json = {
+                    "product_type": str(data[0][0]).lower().replace(" ", "_"),
+                    "telescope": data[0][1],
+                    "projectCode": data[0][2],
+                }
+
             else:
                 self.logger.error(
                     f"ERROR: aat-wrest query returned no results!"
-                    f" The database appears to be missing information for spl id {self.spl[0]}!"
+                    f" The database appears to be missing information for spl id {spl}!"
                 )
         finally:
             self.conn.close()
         return make_json
+
+    def wrest_curator_products(self) -> json:
+        """
+        Given a locator or product group ID, returns the product filenames associated with it
+
+        :return: JSON containing the product list
+        """
+        make_json = {}
+        product_group_id = ""
+        try:
+            cursor = self.conn.cursor()
+            if self.spl[0].isdigit():
+                product_group_id = self.spl[0]
+            else:
+                # We were given an SPL, need to get the associated product group ID
+                prod_id_query = """
+                SELECT product_group_id
+                FROM science_products_product_groups
+                WHERE science_product_locator = %(spl)s;
+                """
+                cursor.execute(prod_id_query, {"spl": self.spl[0]})
+                data = cursor.fetchall()
+                if data:
+                    product_group_id = data[0][0]
+                else:
+                    self.logger.error(f"ERROR: Failed to fetch product group id from SPL, got {data}")
+
+            files_query = """
+            WITH product_locators AS (
+            SELECT sp.science_product_locator,
+                   atspl.ancillary_product_locator
+            FROM   science_products_product_groups sp
+            JOIN   ancillary_to_science_product_locators atspl on sp.science_product_locator = atspl.science_product_locator
+            WHERE  sp.product_group_id = %(pg_id)s
+            ), filegroups AS (
+                SELECT ap2.filegroup_id
+                FROM ancillary_products ap2
+                JOIN product_locators pl ON ap2.ancillary_product_locator = pl.ancillary_product_locator
+            )
+            SELECT external_name
+            FROM science_products sp2
+            JOIN product_locators pl ON pl.science_product_locator = sp2.science_product_locator
+            UNION
+            SELECT filename
+            FROM files f
+            JOIN filegroups fg ON f.filegroup = fg.filegroup_id
+            """
+            if product_group_id != "":
+                cursor.execute(files_query, {"pg_id": product_group_id})
+                data = cursor.fetchall()
+                if data:
+                    # This should have all the ancillary and science product names
+                    # It returns from the query as a list of tuples, so it must be flattened
+                    make_json = {"file_list": list(sum(data, ()))}
+                else:
+                    self.logger.error(f"ERROR: Failed to fetch products from product ID {product_group_id}")
+            else:
+                self.logger.error(f"ERROR: Failed to get product group ID")
+
+            input_group_query = """
+            SELECT DISTINCT s.science_product_locator
+            FROM product_groups pg2
+            JOIN science_products_product_groups sppg on pg2.product_group_id = sppg.product_group_id
+            JOIN science_products_product_groups s on pg2.parent_product_group_id = s.product_group_id
+            WHERE sppg.product_group_id = %(pg_id)s;
+            """
+            cursor.execute(input_group_query, {"pg_id": product_group_id})
+            input_data = cursor.fetchall()
+            if input_data:
+                input_info = {"input_group_locator": input_data[0][0]}
+                make_json = {**make_json, **input_info}
+        finally:
+            self.conn.close()
+        return make_json
diff --git a/apps/cli/utilities/aat_wrest/aat_wrest/wrest.py b/apps/cli/utilities/aat_wrest/aat_wrest/wrest.py
index 8de40dcc8c0eb4ad7ee3ffa55952f7c571b98c78..289fa40640dc5ac4f03d50756a3b60c9cb6a9133 100644
--- a/apps/cli/utilities/aat_wrest/aat_wrest/wrest.py
+++ b/apps/cli/utilities/aat_wrest/aat_wrest/wrest.py
@@ -114,6 +114,16 @@ def parser() -> argparse.ArgumentParser:
         required=False,
         help="Find the product information necessary to run curator on the provided product locator",
     )
+    arg_parser.add_argument(
+        "--curator_products",
+        nargs=1,
+        action="store",
+        required=False,
+        help=(
+            "Find the product information necessary to run curator or ingest on the provided product locator or "
+            "product group id"
+        ),
+    )
     return arg_parser
 
 
@@ -148,7 +158,13 @@ def determine_wrester(connection: MDDBConnector, args: argparse.Namespace):
     elif args.product:
         data = WrestWorkflowMetadata(connection, spl=args.product).wrest_product_info()
     elif args.curator:
-        data = WrestWorkflowMetadata(connection, spl=args.curator).wrest_curator()
+        data_dict = WrestWorkflowMetadata(connection, spl=args.curator).wrest_curator()
+        data = json.dumps(data_dict)
+    elif args.curator_products:
+        general_data = WrestWorkflowMetadata(connection, spl=args.curator_products).wrest_curator()
+        connection2 = MDDBConnector()
+        product_data = WrestWorkflowMetadata(connection2, spl=args.curator_products).wrest_curator_products()
+        data = json.dumps({**general_data, **product_data})
     else:
         data = None
 
diff --git a/apps/cli/utilities/aat_wrest/pyproject.toml b/apps/cli/utilities/aat_wrest/pyproject.toml
index c8cbc724e7ba73a980ec283ab0692f54f0696075..7a62252e2ad1ccef86c67c709ba6ec10ef346df1 100644
--- a/apps/cli/utilities/aat_wrest/pyproject.toml
+++ b/apps/cli/utilities/aat_wrest/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
 
 [tool.poetry]
 name = "aat_wrest"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "AAT Wrest: Workspaces-to-Archive metadata retriever"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/utilities/aat_wrest/test/test_aat_wrest.py b/apps/cli/utilities/aat_wrest/test/test_aat_wrest.py
index 2df25da7257b5f100b36a8513a6c3d04d7d7f5bc..f972512104b48708ec9ec89a250745236c32b39d 100644
--- a/apps/cli/utilities/aat_wrest/test/test_aat_wrest.py
+++ b/apps/cli/utilities/aat_wrest/test/test_aat_wrest.py
@@ -52,6 +52,11 @@ args_carta = argparse.Namespace(
 )
 result_carta = '{"imageName": "VLASS1.1.ql.T29t04.J094850+743000.10.2048.v5"}'
 
+args_curator = argparse.Namespace(
+    curator="uid://evla/calibration/71d1f00c-a381-4688-9d5c-f528f43452f7", carta=None, stdcals=None, stdimg=None
+)
+result_curator = '{"file_list": ["weblog.tgz", "18B-265_2019_12_10_T01_24_58.690.tar", "ingestion_artifacts_2019_12_13_T23_31_42.382.tar"]}'
+
 
 def mock_wrester(args: argparse.Namespace) -> WrestWorkflowMetadata:
     with patch("psycopg2.connect") as mock_connect:
@@ -61,11 +66,14 @@ def mock_wrester(args: argparse.Namespace) -> WrestWorkflowMetadata:
             return WrestWorkflowMetadata(connection=mock_connect, sdm_id=args.stdimg)
         if args.carta is not None:
             return WrestWorkflowMetadata(connection=mock_connect, spl=args.carta)
+        if args.curator is not None:
+            return WrestWorkflowMetadata(connection=mock_connect, spl=args.curator)
 
 
 cal_wrester = mock_wrester(args_cal)
 img_wrester = mock_wrester(args_image)
 carta_wrester = mock_wrester(args_carta)
+curator_wrester = mock_wrester(args_curator)
 
 
 class TestAatWrest:
@@ -114,3 +122,17 @@ class TestAatWrest:
         assert args_carta.carta == "uid://evla/image/3d3db489-9331-4e61-aa80-002bc2989b1e"
         value = carta_wrester.wrest_image_name()
         assert value == '{"imageName": "VLASS1.1.ql.T29t04.J094850+743000.10.2048.v5"}'
+
+    @patch("json.dumps", MagicMock(return_value=result_curator))
+    def test_wrest_curator_products(self):
+        curator_wrester.conn.cursor.return_value.fetchall.return_value = [
+            "weblog.tgz",
+            "18B-265_2019_12_10_T01_24_58.690.tar",
+            "ingestion_artifacts_2019_12_13_T23_31_42.382.tar",
+        ]
+        assert args_curator.curator == "uid://evla/calibration/71d1f00c-a381-4688-9d5c-f528f43452f7"
+        value = curator_wrester.wrest_curator_products()
+        assert (
+            value
+            == '{"file_list": ["weblog.tgz", "18B-265_2019_12_10_T01_24_58.690.tar", "ingestion_artifacts_2019_12_13_T23_31_42.382.tar"]}'
+        )
diff --git a/apps/cli/utilities/contacts_wrest/contacts_wrest/__init__.py b/apps/cli/utilities/contacts_wrest/contacts_wrest/__init__.py
index 7e16fe9e73d43f5d65d8c86be27c6d2949a9c595..fbd3c3af367113bce04fa05316d78960ec57803d 100644
--- a/apps/cli/utilities/contacts_wrest/contacts_wrest/__init__.py
+++ b/apps/cli/utilities/contacts_wrest/contacts_wrest/__init__.py
@@ -18,4 +18,4 @@
 """
 Contact information wrester
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/utilities/contacts_wrest/pyproject.toml b/apps/cli/utilities/contacts_wrest/pyproject.toml
index 786cbb0eeac5252ebd3deeb9ac254dbb39e674b7..4e76f0f28a831db9f78c95af0ee8cb5e06c44d2e 100644
--- a/apps/cli/utilities/contacts_wrest/pyproject.toml
+++ b/apps/cli/utilities/contacts_wrest/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "contacts_wrest"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Contact information wrester"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/utilities/core_sampler/core_sampler/__init__.py b/apps/cli/utilities/core_sampler/core_sampler/__init__.py
index a6dd012c0f83f0e964d1b2f24e4c57ff615f5116..1aa7b49b0da2558aba14f8e88440fdd3f9916243 100644
--- a/apps/cli/utilities/core_sampler/core_sampler/__init__.py
+++ b/apps/cli/utilities/core_sampler/core_sampler/__init__.py
@@ -18,4 +18,4 @@
 """
 Workspaces database core sampler
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/utilities/core_sampler/core_sampler/core_sampler.py b/apps/cli/utilities/core_sampler/core_sampler/core_sampler.py
index f3e2c93289b1466af5f029a0f2c72ee1d6cff14c..e40e895a7f6e4362e723b72472c15e53759d6db5 100644
--- a/apps/cli/utilities/core_sampler/core_sampler/core_sampler.py
+++ b/apps/cli/utilities/core_sampler/core_sampler/core_sampler.py
@@ -79,6 +79,9 @@ class MDDBConnector:
     def cursor(self):
         return self.connection.cursor(cursor_factory=extras.RealDictCursor)
 
+    def commit(self):
+        self.connection.commit()
+
     def close(self):
         self.connection.close()
 
@@ -136,6 +139,104 @@ class CoreSampler:
         self.writer.close()
         return self.writer.bottom()
 
+    def sample_product_group(self, product_group_id: str) -> RowWriter:
+        """
+        Get metadata from the archive database for a certain product group, including individual product information
+
+        :param product_group_id: group id of interest
+        :return:
+        """
+        # get the top level product group information
+        product_groups = self.table("product_groups")
+        product_group_rows = product_groups.fetch({"product_group_id": product_group_id})
+        self.save(product_group_rows)
+
+        # the product group table doesn't directly connect to the science products table via a key,
+        # so the sampler can't find them by default; grab them manually
+        products = self.table("science_products")
+        finder = ProductFinder(self.connection)
+        science_product_locators = finder.find_product_locators(product_group_id)
+        science_product_rows = products.fetch_from_many({"science_product_locator": science_product_locators})
+        self.save(science_product_rows)
+
+        # ancillary products can be linked either by group id or a spl, but never both; the previous code grabs
+        # ancillaries if it is associated with a science product, so grab the ones connected by the group id
+        ancillary_products = self.table("ancillary_products")
+        finder = AncillaryProductFinder(self.connection)
+        ancillary_product_locators = finder.find_ancillary_product_locators(product_group_id, science_product_locators)
+        ancillary_rows = ancillary_products.fetch_from_many({"ancillary_product_locator": ancillary_product_locators})
+
+        # ancillaries can be accessed via science products, clear that table so entries will save
+        self.visited.clear()
+        self.save(ancillary_rows)
+
+        # the science products table doesn't directly connect to the projects table via a key,
+        # so the sampler can't find them by default; grab the project code entries manually
+        products_projects = self.table("science_products_projects")
+        finder = ProductsProjectsFinder(self.connection)
+        project_codes = finder.find_project_codes_for_product(science_product_locators)
+        products_projects_rows = products_projects.fetch_from_many(
+            {"project_code": project_codes, "science_product_locator": science_product_locators})
+        self.save(products_projects_rows)
+
+        # finally, grab the files associated with the entries
+        filegroup_ids = [row["filegroup_id"] for row in science_product_rows] + \
+                        [row["filegroup_id"] for row in ancillary_rows]
+        filegroups = self.table("filegroups")
+        filegroup_rows = filegroups.fetch_from_many({"filegroup_id": filegroup_ids})
+        self.save(filegroup_rows)
+
+        self.writer.close()
+        return_contents = self.writer.bottom()
+        return return_contents
+
+    def remove_product_group(self, product_group_id: str):
+        """
+        Remove metadata from the archive database for a certain product group, including individual product information
+        comprising the group
+
+        :param product_group_id: group id of interest
+        :return:
+        """
+        # get the top level product group information
+        projects = self.table("product_groups")
+        requested_product_group_rows = projects.fetch({"product_group_id": product_group_id})
+
+        # the product group table doesn't directly connect to the science products table via a key,
+        # so the sampler can't find them by default; grab them manually
+        products = self.table("science_products")
+        finder = ProductFinder(self.connection)
+        science_product_locators = finder.find_product_locators(product_group_id)
+        requested_product_rows = products.fetch_from_many({"science_product_locator": science_product_locators})
+
+        # ancillary products can be linked either by group id or a spl, but never both; the previous code grabs
+        # ancillaries if it is associated with a science product, so override that with all ancillaries
+        ancillary_products = self.table("ancillary_products")
+        finder = AncillaryProductFinder(self.connection)
+        ancillary_product_locators = finder.find_ancillary_product_locators(product_group_id, science_product_locators)
+        requested_ancillary_rows = ancillary_products.fetch_from_many({"ancillary_product_locator": ancillary_product_locators})
+
+        # the science products table doesn't directly connect to the projects table via a key,
+        # so the sampler can't find them by default; grab the project code entries manually
+        products_projects = self.table("science_products_projects")
+        finder = ProductsProjectsFinder(self.connection)
+        project_codes = finder.find_project_codes_for_product(science_product_locators)
+        requested_project_product_rows = products_projects.fetch_from_many(
+            {"project_code": project_codes, "science_product_locator": science_product_locators})
+
+        # Create one dictionary pairing table names to row sets
+        rows_dict = self.create_row_dict(requested_product_group_rows, {})
+        rows_dict = self.create_row_dict(requested_product_rows, rows_dict)
+        rows_dict = self.create_row_dict(requested_ancillary_rows, rows_dict)
+        rows_dict = self.create_row_dict(requested_project_product_rows, rows_dict)
+
+        # the correct table deletion order is not the same as the reverse of the topographic map, set the order
+        deletion_order = ["ancillary_products", "science_products_projects", "science_products_product_groups",
+                          "product_groups", "images", "rh_locators", "science_products"]
+
+        self.delete_from_product_group(deletion_order, rows_dict)
+        self.writer.close()
+
     def sample_project(self, project_code: str) -> RowWriter:
         """
         Get project metadata from the archive database.
@@ -186,6 +287,45 @@ class CoreSampler:
             if len(more) > 0:
                 self.save(more)
 
+    def delete_from_product_group(self, deletion_order, rows_dict: dict):
+        """
+        Fetch the related rows and delete starting from the provided deletion order.
+
+        :param deletion_order:  list of strings representing the table order in which entries should be deleted
+        :param rows_dict:  dictionary mapping table name to row set containing all relevant entries for the table
+        """
+        # Delete rows for each table in the deletion order, starting from the top of the list
+        for table_name in deletion_order:
+            if table_name in rows_dict.keys():
+                rows_dict[table_name].delete()
+            else:
+                print(f"{table_name} not present in dictionary for this product group, ignoring...")
+
+        # only commit the deletes if everything was removed without error
+        self.connection.commit()
+
+    def create_row_dict(self, starting_rowset, rowset_dict):
+        """
+        Build out a dictionary pairing table names to row set objects, accumulating by table all
+        rows related to the given seed rows.
+
+        :param starting_rowset:  the seed rows to start from
+        :param rowset_dict:  the dictionary to add to
+        """
+        # when traversing from the bottom, multiple tables can connect to the same table; capture all rows
+        if starting_rowset.table.name in rowset_dict.keys():
+            rowset_dict[starting_rowset.table.name].combine_rows(starting_rowset)
+        else:
+            rowset_dict[starting_rowset.table.name] = starting_rowset
+
+        # traverse the other tables related to this one, if necessary
+        for relation in starting_rowset.relations():
+            more = relation.fetch_related_to(starting_rowset)
+            if len(more) > 0:
+                rowset_dict = self.create_row_dict(more, rowset_dict)
+
+        return rowset_dict
+
     def table(self, name: str) -> Table:
         """
         Return a Table with the given name.
@@ -204,6 +344,62 @@ class CoreSampler:
         rows.write_to(self.writer)
 
 
+class ProductsProjectsFinder:
+    """Looks up project codes for a list of spls"""
+
+    def __init__(self, connection: MDDBConnector):
+        self.connection = connection
+
+    def find_project_codes_for_product(self, spls: [str]) -> [str]:
+        cursor = self.connection.cursor()
+        sql = """
+            SELECT DISTINCT project_code FROM science_products_projects
+            WHERE science_product_locator IN %(spls)s
+        """
+        cursor.execute(sql, {"spls": tuple(spls)})
+        data = cursor.fetchall()
+
+        if len(data) != 1:
+            raise Exception("Either too many project codes or no project codes were found for product group.")
+
+        # All spls will have the same project code, but return a list with a length matching the spls list
+        return [data[0]["project_code"]] * len(spls)
+
+class ProductFinder:
+    """Looks up science product locators for a product group id"""
+
+    def __init__(self, connection: MDDBConnector):
+        self.connection = connection
+
+    def find_product_locators(self, product_group_id: str) -> [str]:
+        cursor = self.connection.cursor()
+        sql = """
+            SELECT science_product_locator FROM science_products_product_groups
+            WHERE product_group_id=%(product_group_id)s
+        """
+        cursor.execute(sql, {"product_group_id": product_group_id})
+        data = cursor.fetchall()
+        return [item["science_product_locator"] for item in data]
+
+
+class AncillaryProductFinder:
+    """Looks up ancillary product locators for a product group id and list of spls"""
+
+    def __init__(self, connection: MDDBConnector):
+        self.connection = connection
+
+    def find_ancillary_product_locators(self, product_group_id: str, spls: [str]) -> [str]:
+        cursor = self.connection.cursor()
+        sql = """
+            SELECT ancillary_product_locator FROM ancillary_products
+            WHERE product_group_id=%(product_group_id)s
+            OR science_product_locator IN %(spls)s
+        """
+        cursor.execute(sql, {"product_group_id": product_group_id, "spls": tuple(spls)})
+        data = cursor.fetchall()
+        return [item["ancillary_product_locator"] for item in data]
+
+
 class ExecBlockFinder:
     """Looks up execution block ID for an SDM"""
 
@@ -211,6 +407,12 @@ class ExecBlockFinder:
         self.connection = connection
 
     def find_eb_id(self, sdm_name: str) -> int:
+        """
+        Find an execution block id for an sdm
+
+        :param sdm_name:
+        :return:
+        """
         cursor = self.connection.cursor()
         sql = """
 SELECT execution_block_id FROM execution_blocks
@@ -227,6 +429,12 @@ def main():
     group.add_argument(
         "-p", "--project_code", type=str, nargs=1, help="Project code from which to start core sampling", action="store"
     )
+    group.add_argument(
+        "-s", "--science_product_locator", type=str, nargs=1, help="Product locator id from which to start core sampling", action="store"
+    )
+    group.add_argument(
+        "-g", "--product_group_id", type=str, nargs=1, help="Product group id from which to start core sampling", action="store"
+    )
     group.add_argument(
         "-e",
         "--sdm_name",
@@ -242,6 +450,10 @@ def main():
     try:
         if ns.project_code:
             sampler.sample_project(ns.project_code[0])
+        elif ns.science_product_locator:
+            sampler.sample_science_product(ns.science_product_locator[0])
+        elif ns.product_group_id:
+            sampler.sample_product_group(ns.product_group_id[0])
         elif ns.sdm_name:
             sampler.sample_eb(ns.sdm_name[0])
     finally:
diff --git a/apps/cli/utilities/core_sampler/core_sampler/database.py b/apps/cli/utilities/core_sampler/core_sampler/database.py
index d059b0bb1d53caba9d40a88dd9680d568781addb..f5663d72b0a031b5cb721838416118be90d92bb3 100644
--- a/apps/cli/utilities/core_sampler/core_sampler/database.py
+++ b/apps/cli/utilities/core_sampler/core_sampler/database.py
@@ -51,6 +51,13 @@ class PGTable(Table):
         # 4. Manufacture the result
         return PGRowSet(self, self.cursor.fetchall())
 
+    def fetch_from_many(self, primary_keys: Dict[str, Any]) -> RowSet:
+        primary_key_columns = self.primary_key_columns()
+        pkeys = {column_name: primary_keys[column_name] for column_name in primary_key_columns}
+        whereclause = " AND ".join(f"{name} = ANY(%({name})s)" for name in pkeys.keys())
+        self.cursor.execute(f"SELECT * FROM {self.name} WHERE {whereclause}", pkeys)
+        return PGRowSet(self, self.cursor.fetchall())
+
     def primary_key_columns(self):
         self.cursor.execute(
             """SELECT c.column_name
@@ -230,6 +237,21 @@ class PGTable(Table):
                     kcu.constraint_name
                 order by kcu.table_name"""
 
+    def delete(self, rows: RowSet):
+        all_statements = []
+        for row in rows:
+            whereclause = "".join(
+                            f" AND {column_name} = '{row[column_name]}'"
+                            if row[column_name] is not None and row[column_name] != ''
+                            else "" for column_name in row)
+            whereclause = whereclause[5:]
+            all_statements.append(f"DELETE FROM {self.name} WHERE {whereclause}")
+
+        # Remove duplicate statements and execute
+        all_statements = set(all_statements)
+        print(f"deleting {len(all_statements)} rows for {self.name} table")
+        [self.cursor.execute(statement) for statement in all_statements]
+
     def __eq__(self, other):
         return self.name == other.name
 
@@ -263,6 +285,12 @@ class PGRowSet(RowSet):
     def write_to(self, writer: RowWriter):
         writer.write_rows(self.table, self.rows)
 
+    def delete(self):
+        self.table.delete(self)
+
+    def combine_rows(self, other):
+        self.rows = self.rows + other.rows
+
     def __iter__(self) -> Iterable[Dict]:
         return iter(self.rows)
 
diff --git a/apps/cli/utilities/core_sampler/core_sampler/interfaces.py b/apps/cli/utilities/core_sampler/core_sampler/interfaces.py
index fd56dcaf3e9e3d8a50efaa0e903d08e06d3b5263..8d9a3e256745e27ffbb0ebf03ec9f45b719b39f4 100644
--- a/apps/cli/utilities/core_sampler/core_sampler/interfaces.py
+++ b/apps/cli/utilities/core_sampler/core_sampler/interfaces.py
@@ -45,6 +45,17 @@ class Table(ABC):
         """
         pass
 
+    @abc.abstractmethod
+    def fetch_from_many(self, primary_keys: Dict[str, Any]) -> "RowSet":
+        """
+        Fetch rows with the associated primary keys. The result will be at least a single row,
+        but contained in a RowSet object.
+
+        :param primary_keys:  the key to look up by
+        :return:  a RowSet with the row in it
+        """
+        pass
+
     @abc.abstractmethod
     def primary_key_columns(self) -> List[str]:
         """
@@ -115,6 +126,24 @@ class RowSet(ABC):
         """
         pass
 
+    @abc.abstractmethod
+    def delete(self):
+        """
+        Remove the database entries for the table corresponding to the given rows.
+
+        :return:
+        """
+        pass
+
+    @abc.abstractmethod
+    def combine_rows(self, other):
+        """
+        Combine the rows of this RowSet with the rows of another RowSet.
+
+        :return:
+        """
+        pass
+
     @abc.abstractmethod
     def __iter__(self) -> Iterable[Dict]:
         pass
diff --git a/apps/cli/utilities/core_sampler/core_sampler/row_writer.py b/apps/cli/utilities/core_sampler/core_sampler/row_writer.py
index 36b2ac889279b61e944b51fd581c0d500449cae7..cf49de0dbdab983d51fd93b49ece2772b1e68a5f 100644
--- a/apps/cli/utilities/core_sampler/core_sampler/row_writer.py
+++ b/apps/cli/utilities/core_sampler/core_sampler/row_writer.py
@@ -16,6 +16,7 @@
 # You should have received a copy of the GNU General Public License
 # along with Workspaces.  If not, see <https://www.gnu.org/licenses/>.
 import datetime
+import decimal
 from itertools import chain
 from typing import Dict, List
 
@@ -43,7 +44,7 @@ class PostgresCopyRowWriter(RowWriter):
             return "\\N"
         elif isinstance(value, str):
             return value
-        elif isinstance(value, int) or isinstance(value, float):
+        elif isinstance(value, int) or isinstance(value, float) or isinstance(value, decimal.Decimal):
             return str(value)
         elif isinstance(value, datetime.date):
             return value.isoformat()
diff --git a/apps/cli/utilities/core_sampler/pyproject.toml b/apps/cli/utilities/core_sampler/pyproject.toml
index fc40856bc542dcb21f5f957e009144262f870a2b..427edd575f0d61c236eb126f5ab2d5f682d045d2 100644
--- a/apps/cli/utilities/core_sampler/pyproject.toml
+++ b/apps/cli/utilities/core_sampler/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "core_sampler"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Workspaces database core sampler"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/utilities/infrastructure_testing_scripts/delete_evla_eb_metadata.sql b/apps/cli/utilities/infrastructure_testing_scripts/delete_evla_eb_metadata.sql
new file mode 100644
index 0000000000000000000000000000000000000000..ef3887ccf4ecfffec5e7ffebeb2cdf911a4f4bcf
--- /dev/null
+++ b/apps/cli/utilities/infrastructure_testing_scripts/delete_evla_eb_metadata.sql
@@ -0,0 +1,42 @@
+-- To use: Install the below two functions, and then run the latter on an EB you want to delete, like:
+-- 		select delete_metadata_for_evla_eb('15A-397.sb31019491.eb31020561.57236.7198700463');
+
+create or replace function get_info_for_evla_eb_deletion(eb_fileset_id varchar, out science_product_locator VARCHAR, out filegroup_ids int[], out product_group_id int, out execution_block_id int, out observation_configuration_ids int[]) as $$
+declare ancillary_fg_ids int[];
+BEGIN
+science_product_locator := science_products.science_product_locator from science_products where external_name = eb_fileset_id;
+filegroup_ids[1] := filegroup_id from science_products where external_name = eb_fileset_id;
+product_group_id := science_products_product_groups.product_group_id FROM science_products_product_groups WHERE science_products_product_groups.science_product_locator=get_info_for_evla_eb_deletion.science_product_locator;
+ancillary_fg_ids := array(select filegroup_id FROM ancillary_products WHERE ancillary_products.science_product_locator=get_info_for_evla_eb_deletion.science_product_locator OR ancillary_products.product_group_id=get_info_for_evla_eb_deletion.product_group_id);
+filegroup_ids := filegroup_ids || ancillary_fg_ids;
+execution_block_id := execution_blocks.execution_block_id FROM execution_blocks WHERE execution_blocks.science_product_locator=get_info_for_evla_eb_deletion.science_product_locator;
+observation_configuration_ids := array(select observation_configuration_id FROM observation_configurations WHERE observation_configurations.execution_block_id=get_info_for_evla_eb_deletion.execution_block_id);
+return;
+end;
+$$ Language plpgsql;
+
+create or replace function delete_metadata_for_evla_eb(eb_fileset_id varchar) returns void as $$
+declare
+eb_info record;
+begin
+eb_info := get_info_for_evla_eb_deletion(eb_fileset_id);
+delete FROM ancillary_products WHERE science_product_locator=eb_info.science_product_locator OR product_group_id=eb_info.product_group_id;
+delete FROM science_products_product_groups WHERE science_product_locator=eb_info.science_product_locator;
+delete FROM product_groups WHERE product_group_id=eb_info.product_group_id;
+delete FROM science_products_projects WHERE science_product_locator=eb_info.science_product_locator;
+delete FROM science_product_comments WHERE science_product_locator=eb_info.science_product_locator;
+delete FROM rh_locators WHERE locator=eb_info.science_product_locator;
+delete from subscans where observation_configuration_id = any (eb_info.observation_configuration_ids);
+delete from data_descriptions where observation_configuration_id = any (eb_info.observation_configuration_ids);
+delete FROM observation_configurations WHERE execution_block_id = eb_info.execution_block_id;
+delete FROM spectral_windows WHERE execution_block_id = eb_info.execution_block_id;
+delete FROM execution_blocks WHERE science_product_locator=eb_info.science_product_locator;
+delete FROM science_products WHERE science_product_locator=eb_info.science_product_locator;
+delete FROM files WHERE filegroup = any (eb_info.filegroup_ids);
+delete FROM filegroups WHERE filegroup_id = any (eb_info.filegroup_ids);
+return;
+end;
+$$ Language plpgsql;
+
+-- select delete_metadata_for_evla_eb('15A-397.sb31019491.eb31020561.57236.7198700463');
+-- select get_info_for_evla_eb_deletion('15A-397.sb31019491.eb31020561.57236.7198700463');
diff --git a/apps/cli/utilities/wf_monitor/poetry.lock b/apps/cli/utilities/wf_monitor/poetry.lock
index 41f83b33c927499cd1e503b74b6da4faf276b53d..dfe1be5abbef91ab0d1db4bcdef8c0a26a2a1170 100644
--- a/apps/cli/utilities/wf_monitor/poetry.lock
+++ b/apps/cli/utilities/wf_monitor/poetry.lock
@@ -383,7 +383,7 @@ tests = ["pytest", "pytz", "simplejson"]
 
 [[package]]
 name = "messaging"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "SSA Messaging is an AMQP-based asynchronous messaging system based on passing simple Python objects as JSON."
 optional = false
 python-versions = "~3.10"
@@ -706,7 +706,7 @@ files = [
 
 [[package]]
 name = "workspaces"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "SSA Workspaces shared library"
 optional = false
 python-versions = "~3.10"
diff --git a/apps/cli/utilities/wf_monitor/pyproject.toml b/apps/cli/utilities/wf_monitor/pyproject.toml
index e65edea4a8e515b8542ba800a5604177fe52b85e..ff493116e21f2769306467aceb670df742b29734 100644
--- a/apps/cli/utilities/wf_monitor/pyproject.toml
+++ b/apps/cli/utilities/wf_monitor/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "wf_monitor"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Workflow monitor that reads in HTCondor logs and translates them into AMQP events"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/apps/cli/utilities/wf_monitor/wf_monitor/__init__.py b/apps/cli/utilities/wf_monitor/wf_monitor/__init__.py
index ad8766b08664ef7d751a23a23b10abcb8f927e3e..e4c76039a8746948822452177ce77325cf9f0320 100644
--- a/apps/cli/utilities/wf_monitor/wf_monitor/__init__.py
+++ b/apps/cli/utilities/wf_monitor/wf_monitor/__init__.py
@@ -18,4 +18,4 @@
 """
 Workflow monitor that reads in HTCondor logs and translates them into AMQP events
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/apps/cli/utilities/wf_monitor/wf_monitor/monitor.py b/apps/cli/utilities/wf_monitor/wf_monitor/monitor.py
index 372e18cf826b91c6e225af82c827f4e1f8288e0b..8bd65fd6d56c31e9225d0fee1abf438787ef72c6 100644
--- a/apps/cli/utilities/wf_monitor/wf_monitor/monitor.py
+++ b/apps/cli/utilities/wf_monitor/wf_monitor/monitor.py
@@ -157,7 +157,7 @@ def is_final_stage(body: str) -> bool:
     :return: boolean for final stage
     """
 
-    dag_node = r"DAG Node: (?P<dagnode>[A-Z]{4,})"
+    dag_node = r"DAG Node: (?P<dagnode>[A-Z]{1,})"
 
     dagnode = re.search(dag_node, body)
     logger.info(f"current DAG stage: {dagnode.group('dagnode')}")
@@ -321,10 +321,10 @@ class WorkflowMonitor:
                                 # determine current DAG stage from event, if final, set exit condition for DAG
                                 if self.is_dag and event["type"] == WorkflowStatusMessages.EXECUTING.value:
                                     job_count += 1
-                                    is_final_node = is_final_stage(event["condor_metadata"]["log"])
                                     # DAG is still running, change message to 'continue'
-                                    if not is_final_node:
-                                        event["type"] = WorkflowStatusMessages.CONTINUE.value
+                                    event["type"] = WorkflowStatusMessages.CONTINUE.value
+                                    # set is final node flag if necessary
+                                    is_final_node = is_final_stage(event["condor_metadata"]["log"])
 
                                 send_message(event, self.message_router)
                                 # Empty contents to prepare for next event
diff --git a/apps/web/src/app/workspaces/components/capability-request/components/request-operations/request-operations.component.ts b/apps/web/src/app/workspaces/components/capability-request/components/request-operations/request-operations.component.ts
index 145f49fc7e411dc93ea3142ebb1dac350adbbc4c..80a1cc5e6cdac7c26203ee5808029c5260732676 100644
--- a/apps/web/src/app/workspaces/components/capability-request/components/request-operations/request-operations.component.ts
+++ b/apps/web/src/app/workspaces/components/capability-request/components/request-operations/request-operations.component.ts
@@ -48,8 +48,9 @@ export class RequestOperationsComponent implements OnInit {
   @Output() cartaLaunched: EventEmitter<any> = new EventEmitter<any>();
   public cartaResponse: any;
   public hasBeenClicked: boolean = false;
-  public userEmail: string;
-  private defaultCC: string;
+  public cartaEmail: string;
+  public defaultCC: string;
+  public emailParams: any;
 
   // Observer for submitting capability request objects (returns a capability execution)
   public submitRequestObserver = {
@@ -66,7 +67,7 @@ export class RequestOperationsComponent implements OnInit {
     next: (resp: any) => {
       this.cartaResponse = resp;
       this.hasBeenClicked = true;
-      // this.cartaLaunched.emit({ type: "carta", email: this.userEmail });
+      // this.cartaLaunched.emit({ type: "carta", email: this.cartaEmail });
     },
     error: (error) => {
       console.log(error);
@@ -111,7 +112,7 @@ export class RequestOperationsComponent implements OnInit {
 
   launchCarta($event) {
 
-    let email = this.userEmail != null ? this.userEmail : "n/a"
+    let email = this.cartaEmail != null ? this.cartaEmail : "n/a"
     let params = {
       need_data: false,
       notify_ready: true,
@@ -183,21 +184,47 @@ export class RequestOperationsComponent implements OnInit {
     return this.capabilitiesService.getAnalystEmail().subscribe(getAnalystEmailObserver);
   }
 
+  public loadEmailParams() {
+    const getEmailObserver = {
+      next: (response) => {
+        if (response.resp) {
+          this.cartaEmail = response.resp;
+          this.emailParams = {
+            "destination_email": response.resp,
+            "version": this.selectedVersion,
+            "workflow_metadata": this.selectedVersion.workflow_metadata
+          };
+        } else {
+          this.cartaEmail = null;
+          this.emailParams = {
+            "destination_email": null,
+            "version": this.selectedVersion,
+            "workflow_metadata": this.selectedVersion.workflow_metadata
+          };
+        }
+      },
+      error: (error) => {
+        console.error("Failed to load destination email:", error);
+        this.cartaEmail = null;
+        this.emailParams = {
+          "destination_email": null,
+          "version": this.selectedVersion,
+          "workflow_metadata": this.selectedVersion.workflow_metadata
+        };
+      }
+    };
+
+    return this.capabilityRequestService
+      .getUserEmail(this.capabilityRequest.id, this.selectedVersion.version_number)
+      .subscribe(getEmailObserver);
+  }
+
   public getEmailParams() {
-    return {
-      "destination_email": this.selectedVersion.parameters.user_email,
-      "version": this.selectedVersion,
-      "workflow_metadata": this.selectedVersion.workflow_metadata
-    }
+    return this.emailParams;
   }
 
   ngOnInit(): void {
-    if (this.selectedVersion.parameters.user_email) {
-      this.userEmail = this.selectedVersion.parameters.user_email.toString();
-    } else {
-      this.userEmail = null;
-    }
-
     this.loadDefaultCC();
+    this.loadEmailParams();
   }
 }
diff --git a/apps/web/src/app/workspaces/components/editor/editor.component.html b/apps/web/src/app/workspaces/components/editor/editor.component.html
index 7632636af55241cd2559ff6fe6bec3644ead5ea7..3c2cb74e7e79794c9b4c73876ff59457badb3ddf 100644
--- a/apps/web/src/app/workspaces/components/editor/editor.component.html
+++ b/apps/web/src/app/workspaces/components/editor/editor.component.html
@@ -31,7 +31,7 @@
     <div class="modal-footer">
         <button type="button" class="btn btn-outline-secondary mr-auto" (click)="undoEditChanges()">Revert Changes</button>
         <button type="button" class="btn btn-secondary" (click)="modal.close('exit')">Cancel</button>
-        <button type="button" class="btn btn-primary" mdbBtn (click)="this.newEditEvent.emit(this.editedData)">Save</button>
+        <button type="button" class="btn btn-primary" mdbBtn (click)="saveEditChanges()">Save</button>
         <button type="button" class="btn btn-primary" mdbBtn (click)="modal.close('save-and-close')">Save and Close</button>
     </div>
 </ng-template>
diff --git a/apps/web/src/app/workspaces/components/editor/editor.component.ts b/apps/web/src/app/workspaces/components/editor/editor.component.ts
index 83209fe827a622dfb9f223892c179445c763b474..fa33ee1e4d43fc15e4ad8e2c06baae8b1cbc3bdc 100644
--- a/apps/web/src/app/workspaces/components/editor/editor.component.ts
+++ b/apps/web/src/app/workspaces/components/editor/editor.component.ts
@@ -52,8 +52,8 @@ export class EditorComponent implements OnInit {
     this.modalService.open(content, { ariaLabelledBy: "modal-title", centered: true, size: "lg" }).result.then(
       (result) => {
         if (result === "save-and-close" && this.editedData) {
-          // "Save" button clicked; emit edited data to parent component
-          this.newEditEvent.emit(this.editedData)
+          // "Save and Close" button clicked; emit edited data to parent component and then exit the form
+          this.saveEditChanges();
           this.toggleEditorOpen()
         } else {
           // Form was exited by clicking out of it or pressing ESC
@@ -83,6 +83,15 @@ export class EditorComponent implements OnInit {
     this.editedData = this._textToEdit
   }
 
+  public saveEditChanges() {
+    if (this.editedData) {
+      // "Save" or "Save and Close" button clicked; emit edited data to parent component
+      this.newEditEvent.emit(this.editedData);
+    } else {
+      alert("No changes were saved");
+    }
+  }
+
   public edit(changes: string) {
     this.editedData = changes
     this._textToEdit = this.editedData
diff --git a/apps/web/src/app/workspaces/workspaces.component.html b/apps/web/src/app/workspaces/workspaces.component.html
index 5c5f013fb3d44db96acd094a65f298b6bb698f73..93ff62a364b605eea5238d69ac2bdb0c5277eaf8 100644
--- a/apps/web/src/app/workspaces/workspaces.component.html
+++ b/apps/web/src/app/workspaces/workspaces.component.html
@@ -1,2 +1,3 @@
 <app-ws-header></app-ws-header>
+
 <router-outlet></router-outlet>
diff --git a/apps/web/src/app/workspaces/ws-home/ws-home.component.ts b/apps/web/src/app/workspaces/ws-home/ws-home.component.ts
index e12f7d0ab6bd911fe37d1ad3b0bc474779b95c40..7675269f687da50291231ec9e4d4462fadec2b53 100644
--- a/apps/web/src/app/workspaces/ws-home/ws-home.component.ts
+++ b/apps/web/src/app/workspaces/ws-home/ws-home.component.ts
@@ -29,6 +29,7 @@ import {CapabilityRequest} from "../model/capability-request";
 })
 export class WsHomeComponent implements OnInit {
   public productLocator: string;
+  public productGroupId: number;
   public calProductLocator: string;
   public userEmail: string;
   public inputFileList: FileList;
@@ -128,21 +129,37 @@ export class WsHomeComponent implements OnInit {
    * - User email
    */
   LaunchCuratorCapabilityOnClick(curatorType: string): void {
-    this.launchCapability('curator', {
-      curator_type: curatorType,
-      product_locator: this.productLocator,
-      data_location: this.dataLocation,
-      target_list: this.targetList,
-      user_email: this.userEmail,
-    });
+    let parameters;
+
+    if (curatorType == "full"){
+      parameters = {
+        curator_type: curatorType,
+        product_group_id: this.productGroupId,
+        data_src: this.dataLocation,
+        user_email: this.userEmail,
+      }
+    } else {
+      parameters = {
+        curator_type: curatorType,
+        product_locator: this.productLocator,
+        data_src: this.dataLocation,
+        target_list: this.targetList,
+        user_email: this.userEmail,
+      }
+    }
+    this.launchCapability('curator', parameters);
   }
 
   /**
    * method that sets the user input Science Product Locator for the download capability
    * @param spl the Science Product Locator to download
    */
-  setProductLocator(spl: string): void {
-    this.productLocator = spl;
+  setProductLocator(spl: string | number): void {
+    if (Number(spl)) {
+      this.productGroupId = Number(spl);
+    } else {
+      this.productLocator = String(spl);
+    }
   }
 
   /**
diff --git a/ci/push-package.template.yml b/ci/push-package.template.yml
index 7624b922f92e8a9a01018bbac9b529636ba67746..185f4f7c1b5581f18c4429de4d5ababa037dbb85 100644
--- a/ci/push-package.template.yml
+++ b/ci/push-package.template.yml
@@ -16,7 +16,7 @@
       - pip install pex
       - pex ${PIP_NAME}==${RELEASE_VERSION} -c ${PACKAGE_NAME} -o ./${PACKAGE_NAME} -i https://gitlab.nrao.edu/api/v4/projects/621/packages/pypi/simple --python-shebang /home/ssa/bin/python3.10 --disable-cache
       - echo "Releasing PEX to sbin area - ${PACKAGE_NAME}"
-      - scp ${PACKAGE_NAME} root@shipman.aoc.nrao.edu:/lustre/aoc/cluster/pipeline/dsoc-${DEPLOY_ENV}/workspaces/sbin/
+      - scp -O ${PACKAGE_NAME} root@shipman.aoc.nrao.edu:/lustre/aoc/cluster/pipeline/dsoc-${DEPLOY_ENV}/workspaces/sbin/
     rules:
       - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH =~ /(^[0-9](\.[0-9])*)-DEVELOPMENT/'
         variables:
@@ -24,7 +24,11 @@
         changes:
           - apps/cli/executables/pexable/${PACKAGE_NAME}/**/*
       # Uncomment for pipeline testing only
-      # - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+#      - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+#        variables:
+#          DEPLOY_ENV: "dev"
+#        changes:
+#          - apps/cli/executables/pexable/${PACKAGE_NAME}/**/*
       - if: '$CI_COMMIT_TAG =~ /^end-of-sprint-[0-9]+/'
         variables:
           # override DEPLOY_ENV
diff --git a/docker.properties b/docker.properties
index 348435c2527eafeccb26ac07eacc9957d4e012df..e8c4faa91bb681faf9574faa3da63adcbe764a30 100644
--- a/docker.properties
+++ b/docker.properties
@@ -24,7 +24,7 @@ edu.nrao.workspaces.CapabilitySettings.externalServiceUrl = http://capability:34
 edu.nrao.workspaces.ProcessingSettings.useCasa = false
 edu.nrao.workspaces.ProcessingSettings.rootDirectory =  /lustre/aoc/cluster/pipeline/docker/workspaces/spool
 edu.nrao.workspaces.ProcessingSettings.scriptLocation = /lustre/aoc/cluster/pipeline/docker/workspaces/sbin
-edu.nrao.workspaces.ProcessingSettings.ramInGb = 0.21G
+edu.nrao.workspaces.ProcessingSettings.ramInGb = 0.2G
 edu.nrao.workspaces.ProcessingSettings.CasaVersion.vlass = /home/casa/packages/pipeline/casa-6.1.3-3-pipeline-2021.1.1.32
 
 edu.nrao.archive.workflow.config.CasaVersions.homeForReprocessing = /home/casa/packages/pipeline/current
diff --git a/services/capability/capability/__init__.py b/services/capability/capability/__init__.py
index 2e35f0e17fa9a94f9b6f4527ef5e9f70135a933b..987e0746386364d311b3a27cccc3e85e722ac33f 100644
--- a/services/capability/capability/__init__.py
+++ b/services/capability/capability/__init__.py
@@ -18,4 +18,4 @@
 """
 Capability: the Workspaces Capability Service
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/services/capability/capability/routes.py b/services/capability/capability/routes.py
index d451b766d66919fb7d9d13607da1d480acfd03f0..c07ed069b4c6595929015561e22c6da9360684d3 100644
--- a/services/capability/capability/routes.py
+++ b/services/capability/capability/routes.py
@@ -117,6 +117,7 @@ def capability_request_routes(config: Configurator):
         pattern="capability/{capability_name}/request/create-and-submit",
         request_method="POST",
     )
+
     config.add_route(name="close_capability_request", pattern=f"{request_url}/close", request_method="POST")
 
     version_url = request_url + "/version/{version}"
diff --git a/services/capability/capability/views/capability_version.py b/services/capability/capability/views/capability_version.py
index 9c696b7c30c9a620669a001d603e52e134fb415a..7af1184e0f7f62dc510e1fe0b56e8bd41dbef565 100644
--- a/services/capability/capability/views/capability_version.py
+++ b/services/capability/capability/views/capability_version.py
@@ -297,20 +297,28 @@ def add_or_update_file(request: Request) -> Response:
 
     if capability_request:
         if capability_request.current_version:
-            versionFile = request.capability_info.lookup_version_file(
-                request.matchdict["capability_request_id"],
-                capability_request.current_version.version_number,
-                request.matchdict["filename"],
-            )
-            if versionFile:
-                versionFile.content = request.body
-                file = request.capability_info.save_edited_version_file(versionFile)
-                return file
+            # Preventing empty request bodies that were clearing out existing file content
+            if request.body.strip():
+                versionFile = request.capability_info.lookup_version_file(
+                    request.matchdict["capability_request_id"],
+                    capability_request.current_version.version_number,
+                    request.matchdict["filename"],
+                )
+                if versionFile:
+                    versionFile.content = request.body
+                    file = request.capability_info.save_edited_version_file(versionFile)
+                    return file
+                else:
+                    # Add the file if it doesn't exist:
+                    return request.capability_info.save_version_file(
+                        capability_request.current_version, request.matchdict["filename"], request.body
+                    )
             else:
-                # Add the file if it doesn't exist:
-                return request.capability_info.save_version_file(
-                    capability_request.current_version, request.matchdict["filename"], request.body
+                no_content_msg = (
+                    f"No content to save in file {request.matchdict['filename']} "
+                    f"for capability request with ID {request.matchdict['capability_request_id']}."
                 )
+            return HTTPPreconditionFailed(no_content_msg)
         else:
             no_versions_msg = (
                 f"Capability request with ID {request.matchdict['capability_request_id']} has no versions."
diff --git a/services/capability/poetry.lock b/services/capability/poetry.lock
index 6200ab9f2e34d6c2d6477eea24f5c72c373489a6..eaba29551cf09f71b3214098682368a785970e2d 100644
--- a/services/capability/poetry.lock
+++ b/services/capability/poetry.lock
@@ -2,7 +2,7 @@
 
 [[package]]
 name = "aat-wrest"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "AAT Wrest: Workspaces-to-Archive metadata retriever"
 optional = false
 python-versions = "~3.10"
@@ -630,7 +630,7 @@ tests = ["pytest", "pytz", "simplejson"]
 
 [[package]]
 name = "messaging"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "SSA Messaging is an AMQP-based asynchronous messaging system based on passing simple Python objects as JSON."
 optional = false
 python-versions = "~3.10"
@@ -1341,7 +1341,7 @@ testing = ["coverage", "pytest (>=3.1.0)", "pytest-cov", "pytest-xdist"]
 
 [[package]]
 name = "workspaces"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "SSA Workspaces shared library"
 optional = false
 python-versions = "~3.10"
diff --git a/services/capability/pyproject.toml b/services/capability/pyproject.toml
index 901af25c44f398f1b868561113e6161e7026733b..71a58ce33adf4c582f4dc0090fa6283c2489c844 100644
--- a/services/capability/pyproject.toml
+++ b/services/capability/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
 
 [tool.poetry]
 name = "capability"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Capability: the Workspaces Capability Service"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/services/notification/notification/__init__.py b/services/notification/notification/__init__.py
index 7896e637345efff82bbf0f84fca49b2aa7f4b359..13b7c3b048969ce81f7964fb1dba211f01805e5f 100644
--- a/services/notification/notification/__init__.py
+++ b/services/notification/notification/__init__.py
@@ -18,4 +18,4 @@
 """
 The Workspaces notification service
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/services/notification/poetry.lock b/services/notification/poetry.lock
index b648ee2ad436d3732965883bbfacc339b0c2f3a2..6ae9cf7739ef3429f5b9f90c9cc5ab98f1252e04 100644
--- a/services/notification/poetry.lock
+++ b/services/notification/poetry.lock
@@ -1154,7 +1154,7 @@ testing = ["coverage", "pytest (>=3.1.0)", "pytest-cov", "pytest-xdist"]
 
 [[package]]
 name = "workspaces"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "SSA Workspaces shared library"
 optional = false
 python-versions = "~3.10"
diff --git a/services/notification/pyproject.toml b/services/notification/pyproject.toml
index c10ac8c583f1bd88d5e84f09122821ab8d362c0b..4abd4fd805c07e76e19700215a37a92fc443ab9b 100644
--- a/services/notification/pyproject.toml
+++ b/services/notification/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "notification"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "The SSA notification service"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/services/workflow/gitlab-requirements.txt b/services/workflow/gitlab-requirements.txt
index ac6d393f59af43b75ebe2dd851abebc1c8d22b43..79dd0e9fc7d740545a5240849d72b054b5a967f6 100644
--- a/services/workflow/gitlab-requirements.txt
+++ b/services/workflow/gitlab-requirements.txt
@@ -1,14 +1,14 @@
 # Pex requirements pulled from gitlab
-carta-envoy==2.8.2.2
-casa-envoy==2.8.2.2
-conveyor==2.8.2.2
-ssa-deliver==2.8.2.2
-ingest-envoy==2.8.2.2
-ssa-mediator==2.8.2.2
-ssa-null==2.8.2.2
-productfetcher==2.8.2.2
-ssa-update-stage==2.8.2.2
-ssa-vela==2.8.2.2
-wf-inspector==2.8.2.2
-ws-annihilator==2.8.2.2
-ws-metrics==2.8.2.2
+carta-envoy==2.8.2.3rc1
+casa-envoy==2.8.2.3rc1
+conveyor==2.8.2.3rc1
+ssa-deliver==2.8.2.3rc1
+ingest-envoy==2.8.2.3rc1
+ssa-mediator==2.8.2.3rc1
+ssa-null==2.8.2.3rc1
+productfetcher==2.8.2.3rc1
+ssa-update-stage==2.8.2.3rc1
+ssa-vela==2.8.2.3rc1
+wf-inspector==2.8.2.3rc1
+ws-annihilator==2.8.2.3rc1
+ws-metrics==2.8.2.3rc1
diff --git a/services/workflow/poetry.lock b/services/workflow/poetry.lock
index 23ece2348845fee2a5fdacf2735a170957416171..66499981c5f9222d77d35b39de5eb6b0852603ec 100644
--- a/services/workflow/poetry.lock
+++ b/services/workflow/poetry.lock
@@ -503,7 +503,7 @@ tests = ["pytest", "pytz", "simplejson"]
 
 [[package]]
 name = "messaging"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "SSA Messaging is an AMQP-based asynchronous messaging system based on passing simple Python objects as JSON."
 optional = false
 python-versions = "~3.10"
@@ -1104,7 +1104,7 @@ testing = ["coverage", "pytest (>=3.1.0)", "pytest-cov", "pytest-xdist"]
 
 [[package]]
 name = "workspaces"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "SSA Workspaces shared library"
 optional = false
 python-versions = "~3.10"
diff --git a/services/workflow/pyproject.toml b/services/workflow/pyproject.toml
index e8590c2195ad95a3edd32059ea8dd32bb6b0147c..9f595569d0e8c6c66a01f17e3f10a9660abb280b 100644
--- a/services/workflow/pyproject.toml
+++ b/services/workflow/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "workflow"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "Workflow: the Workspaces Workflow Service"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/services/workflow/workflow/__init__.py b/services/workflow/workflow/__init__.py
index a18080fc22e23da22ae0806ab53ab33cf9644507..ba83a4e4e450f255c617a6aaf6e4dda42f0b96a9 100644
--- a/services/workflow/workflow/__init__.py
+++ b/services/workflow/workflow/__init__.py
@@ -18,4 +18,4 @@
 """
 Workflow: the Workspaces Workflow Service
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/services/workflow/workflow/server.py b/services/workflow/workflow/server.py
index 3186bd678c295ae068c86448bb6504e8e8d73927..a6725959f9a80190bbc9668298f67d4c6fd231b8 100644
--- a/services/workflow/workflow/server.py
+++ b/services/workflow/workflow/server.py
@@ -377,7 +377,6 @@ class WorkflowRequestRestService:
         Create a new workflow request from the name/arguments supplied, then submit the request.
 
         Audience: front-end and CLI
-
         :return:
         """
         self.request.context = self.create_workflow_request()
@@ -515,6 +514,7 @@ class WorkflowRequestRestService:
             additional = body["telescope"]
 
         self.request.workflows.message_archive(identifier, msg_type, additional)
+
         return Response(
             status_code=http.HTTPStatus.OK,
             body=f"SUCCESS for identifier '{identifier}': Sent '{msg_type}' to AAT for this workflow's SDM",
@@ -829,6 +829,7 @@ def main(global_config, **settings):
             "/workflows/{name}/requests/{request_id}/submit",
             factory=lookup_request,
         )
+        # Set off ingestion follow-on workflow
         config.add_route(
             "ingest_workflow_result",
             "/workflows/{name}/requests/{request_id}/ingest",
@@ -885,11 +886,13 @@ def main(global_config, **settings):
             factory=lookup_request,
         )
 
+        # route for retrieving a list of requests viable for clean up
         config.add_route(
             "list_stale_requests",
             "/workflows/requests/stale/{days}",
         )
 
+        # route for setting cleaned status via annihilator
         config.add_route(
             "set_request_cleaned",
             "/workflows/requests/cleaned",
diff --git a/shared/messaging/messaging/__init__.py b/shared/messaging/messaging/__init__.py
index 7801c9e5a89ee444686f2611aff518fb4a709503..8c839b741d11f23325f32effc53ceb516d41f53a 100644
--- a/shared/messaging/messaging/__init__.py
+++ b/shared/messaging/messaging/__init__.py
@@ -19,4 +19,4 @@
 SSA Messaging is an AMQP-based asynchronous messaging system based on passing simple Python objects as JSON.
 """
 
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/shared/messaging/pyproject.toml b/shared/messaging/pyproject.toml
index 2a16445964de1496d8c1f65d11485ee187417c2a..a48329512df453c89ad84692ec460d708badd3f8 100644
--- a/shared/messaging/pyproject.toml
+++ b/shared/messaging/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "messaging"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "SSA Messaging is an AMQP-based asynchronous messaging system based on passing simple Python objects as JSON."
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/shared/workspaces/alembic/versions/008590dd66fd_.py b/shared/workspaces/alembic/versions/008590dd66fd_.py
new file mode 100644
index 0000000000000000000000000000000000000000..3fd12a16301d1a36c33d71fb91a646dbc56c0d20
--- /dev/null
+++ b/shared/workspaces/alembic/versions/008590dd66fd_.py
@@ -0,0 +1,24 @@
+"""empty message
+
+Revision ID: 008590dd66fd
+Revises: f2a76d224984, c9d8e14ae603
+Create Date: 2024-02-14 14:16:25.168901
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '008590dd66fd'
+down_revision = ('f2a76d224984', 'c9d8e14ae603')
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    pass
+
+
+def downgrade():
+    pass
diff --git a/shared/workspaces/alembic/versions/1c435b5d7a8d_change_curator_wf_metadata_json_data_.py b/shared/workspaces/alembic/versions/1c435b5d7a8d_change_curator_wf_metadata_json_data_.py
new file mode 100644
index 0000000000000000000000000000000000000000..8891045aa39aeddc4eeee335e7370f2a7e8b77f6
--- /dev/null
+++ b/shared/workspaces/alembic/versions/1c435b5d7a8d_change_curator_wf_metadata_json_data_.py
@@ -0,0 +1,82 @@
+# Copyright (C) 2023 Associated Universities, Inc. Washington DC, USA.
+#
+# This file is part of NRAO Workspaces.
+#
+# Workspaces is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Workspaces is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Workspaces.  If not, see <https://www.gnu.org/licenses/>.
+#
+"""change curator-wf metadata.json data_location to data_src
+
+The data_location key is used by the workflow engine for other stuff, want an isolated key for curator's data source.
+
+Revision ID: 1c435b5d7a8d
+Revises: 49b7c8150e72
+Create Date: 2024-01-02 08:57:57.131486
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "1c435b5d7a8d"
+down_revision = "49b7c8150e72"
+branch_labels = None
+depends_on = None
+
+old_json = """{
+  "product_locator": "{{product_locator}}",{{#data_location}}
+  "data_location": "{{data_location}}",{{/data_location}}
+  "product_type": "{{product_type}}",{{#target_list}}
+  "target_list": ["{{target_list}}"],{{/target_list}}{{#product_group_id}}
+  "product_group_id": {{product_group_id}},{{/product_group_id}}{{#input_group_locator}}
+  "input_group_locator": "{{input_group_locator}}",{{/input_group_locator}}
+  "projectMetadata": {
+    "telescope": "{{telescope}}",
+    "projectCode": "{{projectCode}}"
+  }{{#file_list}},
+  "file_list": {{file_list}}{{/file_list}}
+}
+"""
+
+new_json = """{
+  "product_locator": "{{product_locator}}",{{#data_src}}
+  "data_location": "{{data_src}}",{{/data_src}}
+  "product_type": "{{product_type}}",{{#target_list}}
+  "target_list": ["{{target_list}}"],{{/target_list}}{{#product_group_id}}
+  "product_group_id": {{product_group_id}},{{/product_group_id}}{{#input_group_locator}}
+  "input_group_locator": "{{input_group_locator}}",{{/input_group_locator}}
+  "projectMetadata": {
+    "telescope": "{{telescope}}",
+    "projectCode": "{{projectCode}}"
+  }{{#file_list}},
+  "file_list": {{file_list}}{{/file_list}}
+}
+"""
+
+
+def upgrade():
+    op.execute(
+        f"""
+        UPDATE workflow_templates
+        SET content=E'{new_json}' WHERE filename='metadata.json' AND workflow_name='curator'
+        """
+    )
+
+
+def downgrade():
+    op.execute(
+        f"""
+        UPDATE workflow_templates
+        SET content=E'{old_json}' WHERE filename='metadata.json' AND workflow_name='curator'
+        """
+    )
diff --git a/shared/workspaces/alembic/versions/5eaf550162d3_add_full_curation.py b/shared/workspaces/alembic/versions/5eaf550162d3_add_full_curation.py
new file mode 100644
index 0000000000000000000000000000000000000000..7a309e94055c987a2c63fc1e53a5c0bd7fa4ff69
--- /dev/null
+++ b/shared/workspaces/alembic/versions/5eaf550162d3_add_full_curation.py
@@ -0,0 +1,75 @@
+# Copyright (C) 2023 Associated Universities, Inc. Washington DC, USA.
+#
+# This file is part of NRAO Workspaces.
+#
+# Workspaces is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Workspaces is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Workspaces.  If not, see <https://www.gnu.org/licenses/>.
+#
+"""add full curation
+
+Revision ID: 5eaf550162d3
+Revises: 8aa23551a0fa
+Create Date: 2023-11-09 09:51:43.268832
+
+"""
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "5eaf550162d3"
+down_revision = "8aa23551a0fa"
+branch_labels = None
+depends_on = None
+
+
+old_json = """{
+  "product_locator": "{{product_locator}}",{{#data_location}}
+  "data_location": "{{data_location}}",{{/data_location}}
+  "product_type": "{{product_type}}",{{#target_list}}
+  "target_list": ["{{target_list}}"],{{/target_list}}
+  "projectMetadata": {
+    "telescope": "{{telescope}}",
+    "projectCode": "{{projectCode}}"
+  }
+}
+"""
+
+new_json = """{
+  "product_locator": "{{product_locator}}",{{#data_location}}
+  "data_location": "{{data_location}}",{{/data_location}}
+  "product_type": "{{product_type}}",{{#target_list}}
+  "target_list": ["{{target_list}}"],{{/target_list}}
+  "projectMetadata": {
+    "telescope": "{{telescope}}",
+    "projectCode": "{{projectCode}}"
+  }{{#file_list}},
+"file_list": {{file_list}}{{/file_list}}
+}
+"""
+
+
+def upgrade():
+    op.execute(
+        f"""
+        UPDATE workflow_templates
+        SET content=E'{new_json}' WHERE filename='metadata.json' AND workflow_name='curator'
+        """
+    )
+
+
+def downgrade():
+    op.execute(
+        f"""
+        UPDATE workflow_templates
+        SET content=E'{old_json}' WHERE filename='metadata.json' AND workflow_name='curator'
+        """
+    )
diff --git a/shared/workspaces/alembic/versions/8aa23551a0fa_lustre_lock_ingests.py b/shared/workspaces/alembic/versions/8aa23551a0fa_lustre_lock_ingests.py
index 81b0a6ab897d0c09bf48c36fe59b81889c2ca551..1f327334b82ee5d628d8366e13c31d2a5143453f 100644
--- a/shared/workspaces/alembic/versions/8aa23551a0fa_lustre_lock_ingests.py
+++ b/shared/workspaces/alembic/versions/8aa23551a0fa_lustre_lock_ingests.py
@@ -68,7 +68,7 @@ queue
 ingest_cal_sh_new = """#!/bin/sh
 set -o errexit
 
-SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
+SBIN_PATH=/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin
 
 ${SBIN_PATH}/conveyor --retrieve $1
 ${SBIN_PATH}/ingest_envoy --calibration $1
@@ -130,7 +130,7 @@ queue
 ingest_image_sh_new = """#!/bin/sh
 set -o errexit
 
-SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
+SBIN_PATH=/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin
 
 ${SBIN_PATH}/conveyor --retrieve-img $1
 ${SBIN_PATH}/ingest_envoy --image $1
@@ -191,7 +191,7 @@ queue
 ingest_seci_sh_new = """#!/bin/sh
 set -o errexit
 
-SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
+SBIN_PATH=/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin
 
 ${SBIN_PATH}/ingest_envoy --seci $1 $2
 
diff --git a/shared/workspaces/alembic/versions/91091612b6d0_mark4_workflow_renaming.py b/shared/workspaces/alembic/versions/91091612b6d0_mark4_workflow_renaming.py
index e1f5b12a2fb7487a9c0d2f96d6e2153ae2288c02..8a1e7d659a0c83d20d57d54db1c17ee7217accb4 100644
--- a/shared/workspaces/alembic/versions/91091612b6d0_mark4_workflow_renaming.py
+++ b/shared/workspaces/alembic/versions/91091612b6d0_mark4_workflow_renaming.py
@@ -1,3 +1,20 @@
+# Copyright (C) 2023 Associated Universities, Inc. Washington DC, USA.
+#
+# This file is part of NRAO Workspaces.
+#
+# Workspaces is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Workspaces is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Workspaces.  If not, see <https://www.gnu.org/licenses/>.
+#
 """mark4 workflow renaming
 
 Revision ID: 91091612b6d0
@@ -5,9 +22,8 @@ Revises: d20ceed949b3
 Create Date: 2023-09-22 13:34:54.054114
 
 """
-from alembic import op
 import sqlalchemy as sa
-
+from alembic import op
 
 # revision identifiers, used by Alembic.
 revision = "91091612b6d0"
@@ -57,8 +73,8 @@ failed=$1.failed
 
 SBIN_PATH=/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin
 
-# Are we running live NGAS ingestion or testing only? check what we're feeding the manifests
-ISLIVE=$($SBIN_PATH/pycapo -q archive-ingestion.ingestNGAS) 
+# Are we running live NGAS ingestion or testing only? check what we\\'re feeding the manifests
+ISLIVE=$($SBIN_PATH/pycapo -q archive-ingestion.ingestNGAS)
 
 # Get NGAS hosts and set up variables to randomly select one
 NGASHOSTSTR=$($SBIN_PATH/pycapo -q archive-ingestion.NGASHosts)
diff --git a/shared/workspaces/alembic/versions/c9d8e14ae603_remove_internal_notes_default.py b/shared/workspaces/alembic/versions/c9d8e14ae603_remove_internal_notes_default.py
new file mode 100644
index 0000000000000000000000000000000000000000..c26a78d95af5e381205ef6be1367644d339ad08a
--- /dev/null
+++ b/shared/workspaces/alembic/versions/c9d8e14ae603_remove_internal_notes_default.py
@@ -0,0 +1,42 @@
+# Copyright (C) 2023 Associated Universities, Inc. Washington DC, USA.
+#
+# This file is part of NRAO Workspaces.
+#
+# Workspaces is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Workspaces is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Workspaces.  If not, see <https://www.gnu.org/licenses/>.
+#
+"""remove internal_notes default
+
+Let the capability service set the initial value for capability_versions.internal_notes
+
+Revision ID: c9d8e14ae603
+Revises: 1c435b5d7a8d
+Create Date: 2024-02-09 14:08:53.042890
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "c9d8e14ae603"
+down_revision = "1c435b5d7a8d"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    op.execute("""alter table capability_versions alter column internal_notes drop default;""")
+
+
+def downgrade():
+    op.execute("""alter table capability_versions alter column internal_notes set default 'Created';""")
diff --git a/shared/workspaces/alembic/versions/e680d469ec4e_update_curator_metadata2.py b/shared/workspaces/alembic/versions/e680d469ec4e_update_curator_metadata2.py
new file mode 100644
index 0000000000000000000000000000000000000000..da67fe8a9825434b39b3af49e0f15549578c51ca
--- /dev/null
+++ b/shared/workspaces/alembic/versions/e680d469ec4e_update_curator_metadata2.py
@@ -0,0 +1,55 @@
+"""update_curator_metadata2
+
+Revision ID: e680d469ec4e
+Revises: f49af22c5c0e
+Create Date: 2024-03-12 15:05:15.771787
+
+"""
+from pathlib import Path
+
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = "e680d469ec4e"
+down_revision = "f49af22c5c0e"
+branch_labels = None
+depends_on = None
+
+old_content = """{
+  "product_locator": "{{product_locator}}",{{#data_location}}
+  "data_location": "{{data_location}}",{{/data_location}}
+  "product_type": "{{product_type}}",{{#target_list}}
+  "target_list": ["{{target_list}}"],{{/target_list}}{{#product_group_id}}
+  "product_group_id": {{product_group_id}},{{/product_group_id}}{{#input_group_locator}}
+  "input_group_locator": "{{input_group_locator}}",{{/input_group_locator}}
+  "projectMetadata": {
+    "telescope": "{{telescope}}",
+    "projectCode": "{{projectCode}}"
+  }{{#file_list}},
+  "file_list": {{file_list}}{{/file_list}}
+  }
+"""
+
+new_content = (Path.cwd() / "versions" / "templates" / "curator" / "metadata_2.8.2.3.txt").read_text()
+
+
+def upgrade():
+    op.execute(
+        f"""
+        UPDATE workflow_templates
+        SET content=E'{new_content}'
+        WHERE filename='metadata.json' AND workflow_name='curator'
+        """
+    )
+
+
+def downgrade():
+    op.execute(
+        f"""
+        UPDATE workflow_templates
+        SET content=E'{old_content}'
+        WHERE filename='metadata.json' AND workflow_name='curator'
+        """
+    )
diff --git a/shared/workspaces/alembic/versions/f2a76d224984_add_product_group_id_to_the_curator_.py b/shared/workspaces/alembic/versions/f2a76d224984_add_product_group_id_to_the_curator_.py
new file mode 100644
index 0000000000000000000000000000000000000000..6a12077378b9664a8ee78b1751cae7eca8b453c1
--- /dev/null
+++ b/shared/workspaces/alembic/versions/f2a76d224984_add_product_group_id_to_the_curator_.py
@@ -0,0 +1,60 @@
+"""Add product_group_id to the curator workflow metadata.json
+
+Revision ID: f2a76d224984
+Revises: 5eaf550162d3
+Create Date: 2023-11-15 16:42:50.445896
+
+"""
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "f2a76d224984"
+down_revision = "5eaf550162d3"
+branch_labels = None
+depends_on = None
+
+old_json = """{
+  "product_locator": "{{product_locator}}",{{#data_location}}
+  "data_location": "{{data_location}}",{{/data_location}}
+  "product_type": "{{product_type}}",{{#target_list}}
+  "target_list": ["{{target_list}}"],{{/target_list}}
+  "projectMetadata": {
+    "telescope": "{{telescope}}",
+    "projectCode": "{{projectCode}}"
+  }{{#file_list}},
+  "file_list": {{file_list}}{{/file_list}}
+  }
+"""
+
+new_json = """{
+  "product_locator": "{{product_locator}}",{{#data_location}}
+  "data_location": "{{data_location}}",{{/data_location}}
+  "product_type": "{{product_type}}",{{#target_list}}
+  "target_list": ["{{target_list}}"],{{/target_list}}{{#product_group_id}}
+  "product_group_id": {{product_group_id}},{{/product_group_id}}{{#input_group_locator}}
+  "input_group_locator": "{{input_group_locator}}",{{/input_group_locator}}
+  "projectMetadata": {
+    "telescope": "{{telescope}}",
+    "projectCode": "{{projectCode}}"
+  }{{#file_list}},
+  "file_list": {{file_list}}{{/file_list}}
+  }
+"""
+
+
+def upgrade():
+    op.execute(
+        f"""
+        UPDATE workflow_templates
+        SET content=E'{new_json}' WHERE filename='metadata.json' AND workflow_name='curator'
+        """
+    )
+
+
+def downgrade():
+    op.execute(
+        f"""
+        UPDATE workflow_templates
+        SET content=E'{old_json}' WHERE filename='metadata.json' AND workflow_name='curator'
+        """
+    )
diff --git a/shared/workspaces/alembic/versions/f49af22c5c0e_fix_null_dag_logs.py b/shared/workspaces/alembic/versions/f49af22c5c0e_fix_null_dag_logs.py
new file mode 100644
index 0000000000000000000000000000000000000000..89cf79d28096579200231899fce7473bab25b9c9
--- /dev/null
+++ b/shared/workspaces/alembic/versions/f49af22c5c0e_fix_null_dag_logs.py
@@ -0,0 +1,40 @@
+"""fix null dag logs
+
+Revision ID: f49af22c5c0e
+Revises: 008590dd66fd
+Create Date: 2024-02-20 14:32:15.149922
+
+"""
+from alembic import op
+
+
+# revision identifiers, used by Alembic.
+revision = "f49af22c5c0e"
+down_revision = "008590dd66fd"
+branch_labels = None
+depends_on = None
+
+
+old_log = """log = null_dag.$(jobname).log"""
+
+new_log = """log = condor.log"""
+
+
+def upgrade():
+    op.execute(
+        f"""
+        UPDATE workflow_templates
+        SET content = replace(convert_from(content, 'utf8'),E'{old_log}', E'{new_log}' )::bytea
+        WHERE filename like 'null_dag_%.condor';
+        """
+    )
+
+
+def downgrade():
+    op.execute(
+        f"""
+        UPDATE workflow_templates
+        SET content = replace(convert_from(content, 'utf8'),E'{new_log}', E'{old_log}' )::bytea
+        WHERE filename like 'null_dag_%.condor';
+        """
+    )
diff --git a/shared/workspaces/alembic/versions/templates/curator/metadata_2.8.2.3.txt b/shared/workspaces/alembic/versions/templates/curator/metadata_2.8.2.3.txt
new file mode 100644
index 0000000000000000000000000000000000000000..f09e51c29f736515c1dc20adf6312cb06c00d218
--- /dev/null
+++ b/shared/workspaces/alembic/versions/templates/curator/metadata_2.8.2.3.txt
@@ -0,0 +1,13 @@
+{
+  {{#product_locator}}"product_locator": "{{product_locator}}",{{/product_locator}}{{#data_src}}
+  "data_location": "{{data_src}}",{{/data_src}}
+  "product_type": "{{product_type}}",{{#target_list}}
+  "target_list": ["{{target_list}}"],{{/target_list}}{{#product_group_id}}
+  "product_group_id": {{product_group_id}},{{/product_group_id}}{{#input_group_locator}}
+  "input_group_locator": "{{input_group_locator}}",{{/input_group_locator}}
+  "projectMetadata": {
+    "telescope": "{{telescope}}",
+    "projectCode": "{{projectCode}}"
+  },
+  "file_list": "{{#file_list}}{{.}},{{/file_list}}"
+}
\ No newline at end of file
diff --git a/shared/workspaces/pyproject.toml b/shared/workspaces/pyproject.toml
index c212f1df42f134b1315331016b020b0d08972b54..111f50179270da62932b039b70ce5c42fbbfcac2 100644
--- a/shared/workspaces/pyproject.toml
+++ b/shared/workspaces/pyproject.toml
@@ -1,6 +1,6 @@
 [tool.poetry]
 name = "workspaces"
-version = "2.8.2.2"
+version = "2.8.2.3rc1"
 description = "SSA Workspaces shared library"
 authors = ["DMS SSA <dms-ssa@nrao.edu>"]
 license = "GPL3+"
diff --git a/shared/workspaces/workspaces/__init__.py b/shared/workspaces/workspaces/__init__.py
index 8ddfb27aaea4382aea33c6a76660db8e94599ad7..78186a00c47e3fbeaa74f06a48a07a2d2d647f38 100644
--- a/shared/workspaces/workspaces/__init__.py
+++ b/shared/workspaces/workspaces/__init__.py
@@ -18,4 +18,4 @@
 """
 SSA Workspaces shared library
 """
-__version__ = "2.8.2.2"
+__version__ = "2.8.2.3rc1"
diff --git a/shared/workspaces/workspaces/capability/schema.py b/shared/workspaces/workspaces/capability/schema.py
index 80ab9f89df45d67ed3863b4d0aafd9715933e0f2..a1abefc5e7ca4b850425aa50130f73295cc8d184 100644
--- a/shared/workspaces/workspaces/capability/schema.py
+++ b/shared/workspaces/workspaces/capability/schema.py
@@ -34,8 +34,8 @@ import pendulum
 import requests
 import sqlalchemy as sa
 from pycapo import CapoConfig
-from sqlalchemy.ext.orderinglist import ordering_list
 from sqlalchemy.ext.mutable import MutableDict
+from sqlalchemy.ext.orderinglist import ordering_list
 from sqlalchemy.orm import registry, relationship
 
 from workspaces.capability.enums import (
@@ -1337,7 +1337,7 @@ class CapabilityRequest(JSONSerializable):
         self.determine_state()
 
         # are we loading files?
-        hide_files = 'hide_files' in kwargs and kwargs['hide_files']
+        hide_files = "hide_files" in kwargs and kwargs["hide_files"]
 
         return {
             "type": self.__class__.__name__,
@@ -1397,9 +1397,7 @@ class CapabilityVersion(JSONSerializable):
     files = relationship("CapabilityVersionFile", back_populates="version")
     capability_name = sa.Column("capability_name", sa.String, sa.ForeignKey(CAPABILITY_NAME_FK))
     capability = relationship(Capability)
-    internal_notes = sa.Column(
-        "internal_notes", sa.String, default=f"Version 1: {datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S')}\n"
-    )
+    internal_notes = sa.Column("internal_notes", sa.String)
 
     @property
     def current_execution(self) -> CapabilityExecution:
@@ -1407,7 +1405,7 @@ class CapabilityVersion(JSONSerializable):
 
     # Pyramid support method: must accept a "request" argument that is unused by us
     def __json__(self, request=None, **kwargs) -> dict:
-        hide_files = 'hide_files' in kwargs and kwargs['hide_files']
+        hide_files = "hide_files" in kwargs and kwargs["hide_files"]
         return {
             "type": self.__class__.__name__,
             "capability_request_id": self.capability_request_id,
diff --git a/shared/workspaces/workspaces/capability/services/capability_info.py b/shared/workspaces/workspaces/capability/services/capability_info.py
index a38a91f3a1fb48fe84536c2846a516b0fba0fb5c..879f656e4ea29f948d3473f9fd1a0fb1d11ac34a 100644
--- a/shared/workspaces/workspaces/capability/services/capability_info.py
+++ b/shared/workspaces/workspaces/capability/services/capability_info.py
@@ -28,7 +28,7 @@ from typing import Dict, List, Optional
 import transaction
 from sqlalchemy import desc, text
 from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import Session, selectinload, defer
+from sqlalchemy.orm import Session, defer, selectinload
 
 from workspaces.capability.enums import CapabilityRequestState, QueueState
 from workspaces.capability.helpers_interfaces import ParameterIF
@@ -301,14 +301,21 @@ class CapabilityInfo:
                     for key in metadata:
                         parameters["metadata"][key] = metadata[key]
 
-                if "product_locator" not in parameters:
+                if "product_locator" not in parameters and "product_group_id" not in parameters:
                     parameters["product_locator"] = metadata["spl"]
 
         request = CapabilityRequest(
             state=CapabilityRequestState.Created.name,
             capability=capability,
             # a trick here is to ensure that we always have a first version, with the original parameters
-            versions=[CapabilityVersion(version_number=1, parameters=parameters, capability_name=capability_name)],
+            versions=[
+                CapabilityVersion(
+                    version_number=1,
+                    parameters=parameters,
+                    capability_name=capability_name,
+                    internal_notes=f"Version 1: {datetime.now().strftime('%Y-%m-%dT%H:%M:%S')}\n",
+                )
+            ],
         )
         self.save_entity(request)
 
diff --git a/shared/workspaces/workspaces/workflow/services/interfaces.py b/shared/workspaces/workspaces/workflow/services/interfaces.py
index b75a8e6a5bf2315e72453f8aacca21958528e8a4..136186dab8f50548d5bf48c37ff884041c303edf 100644
--- a/shared/workspaces/workspaces/workflow/services/interfaces.py
+++ b/shared/workspaces/workspaces/workflow/services/interfaces.py
@@ -180,3 +180,12 @@ class WorkflowInfoIF(ABC):
         :return:
         """
         pass
+
+    def refresh_request(self, request: WorkflowRequest) -> WorkflowRequest:
+        """
+        Refresh an in use request object with current data
+
+        :param request: the request to be refreshed
+        :return: the updated WorkflowRequest object
+        """
+        pass
diff --git a/shared/workspaces/workspaces/workflow/services/workflow_info.py b/shared/workspaces/workspaces/workflow/services/workflow_info.py
index aedb5a3b508106062b37a12e8a601e61997de315..9d4f6aa0d7153aeb5188fbd60eea44f54a07d982 100644
--- a/shared/workspaces/workspaces/workflow/services/workflow_info.py
+++ b/shared/workspaces/workspaces/workflow/services/workflow_info.py
@@ -280,3 +280,13 @@ class WorkflowInfo(WorkflowInfoIF):
         """
         request.update_cleaned(update_flag)
         self.save_request(request)
+
+    def refresh_request(self, request: WorkflowRequest) -> WorkflowRequest:
+        """
+        Ensure we are operating on an up-to-date object
+
+        :return: updated WorkflowRequest object
+        """
+
+        request = self.session.merge(request)
+        return request
diff --git a/shared/workspaces/workspaces/workflow/services/workflow_service.py b/shared/workspaces/workspaces/workflow/services/workflow_service.py
index 6fc1016190a5777b8b93190fffe7a44e6bcafe70..fb6dda4d41d1cab8eb2c4997017801c0419b23fa 100644
--- a/shared/workspaces/workspaces/workflow/services/workflow_service.py
+++ b/shared/workspaces/workspaces/workflow/services/workflow_service.py
@@ -33,14 +33,12 @@ from typing import Dict, List, Union
 
 import requests
 import transaction
-
 from messaging.messenger import MessageSender
 from messaging.router import Router, on_message
 from pycapo import CapoConfig
 from requests import Response
 
 from workspaces.system.schema import AbstractFile
-from workspaces.workflow.services.remote_processing_service import CapoInjector
 from workspaces.workflow.enum import WorkflowRequestState
 from workspaces.workflow.message_architect import (
     ArchiveMessageArchitect,
@@ -50,6 +48,7 @@ from workspaces.workflow.schema import Workflow, WorkflowRequest, WorkflowReques
 from workspaces.workflow.services.interfaces import WorkflowInfoIF, WorkflowServiceIF
 from workspaces.workflow.services.monitor import WorkflowMonitor
 from workspaces.workflow.services.recovery import MonitorRecover
+from workspaces.workflow.services.remote_processing_service import CapoInjector
 
 logger = logging.getLogger(__name__)
 logger.setLevel(logging.DEBUG)
@@ -463,7 +462,12 @@ class WorkflowService(WorkflowServiceIF):
 
     def _get_wrester_args(self, wf_request: WorkflowRequest) -> List | WorkflowRequest:
         name = wf_request.workflow_name
-        argument = wf_request.argument["product_locator"] if "product_locator" in wf_request.argument else None
+        if "product_locator" in wf_request.argument:
+            argument = wf_request.argument["product_locator"]
+        elif "product_group_id" in wf_request.argument:
+            argument = str(wf_request.argument["product_group_id"])
+        else:
+            argument = None
         argument2 = []
 
         def in_name(name_element: str) -> bool:
@@ -504,7 +508,7 @@ class WorkflowService(WorkflowServiceIF):
                 )
                 argument = eb
         elif in_name("curator"):
-            wrest_type = "--curator"
+            wrest_type = "--curator_products" if wf_request.argument["curator_type"].lower() == "full" else "--curator"
         else:
             logger.info(f"No wrester found for workflow {name}. Does it actually require metadata?")
             return wf_request
@@ -945,10 +949,12 @@ class WorkflowMessageHandler:
                     self.send_external_event("update", **tack_on)
 
         elif message["type"] == "workflow-failed":
+            request = self.info.refresh_request(request)
             status = WorkflowRequestState.Error.name
             self._post_workflow_cleanup(message, request, "failed")
 
         elif message["type"] == "workflow-aborted":
+            request = self.info.refresh_request(request)
             status = WorkflowRequestState.Failed.name
             self._post_workflow_cleanup(message, request, "failed")
 
@@ -1010,6 +1016,7 @@ class WorkflowMessageHandler:
             self.messenger.send_message(**iterations_msg)
 
         elif message["type"] == "workflow-complete":
+            request = self.info.refresh_request(request)
             status = WorkflowRequestState.Complete.name
             self._post_workflow_cleanup(message, request, "complete")