diff --git a/apps/__init__.py b/apps/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/apps/cli/__init__.py b/apps/cli/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/apps/cli/executables/datafetcher/setup.py b/apps/cli/executables/datafetcher/setup.py
index 84415da77099fcd47dcf8120dd31be7b8a1f0b1c..0da3167676195341f42bd23934f59a0d9d86c6fb 100644
--- a/apps/cli/executables/datafetcher/setup.py
+++ b/apps/cli/executables/datafetcher/setup.py
@@ -2,7 +2,6 @@
 # -*- coding: utf-8 -*-
 
 from pathlib import Path
-
 from setuptools import setup
 
 VERSION = open('_version.py').readlines()[-1].split()[-1].strip("\"'")
@@ -21,6 +20,7 @@ requires = [
 tests_require = [
     'pytest>=5.4,<6.0'
 ]
+
 setup(
     name=Path().absolute().name,
     version=VERSION,
@@ -34,7 +34,7 @@ setup(
     tests_require=tests_require,
     keywords=[],
     packages=['datafetcher'],
-    package_dir={'':'src'},
+    package_dir={'': 'src'},
     classifiers=[
         'Programming Language :: Python :: 3.8'
     ],
diff --git a/apps/cli/executables/datafetcher/test/Dockerfile b/apps/cli/executables/datafetcher/test/Dockerfile
index 5ddc445a1d7abf529847978aa99845c8a6a1f9bb..895c9740b14b469c096eff559c92bd0036f6159e 100644
--- a/apps/cli/executables/datafetcher/test/Dockerfile
+++ b/apps/cli/executables/datafetcher/test/Dockerfile
@@ -15,7 +15,7 @@ FROM continuumio/miniconda3:latest
 
 COPY environment.yml .
 
-ENV PATH $HOME/miniconda3/bin/conda:$PATH
+ENV PATH $HOME/miniconda3/condabin/conda:$PATH
 
 # docker_build.sh should have copied environment.yml from data/;
 # it will be used in the command below
@@ -34,7 +34,6 @@ RUN ["conda", "run", "-n", "data", "python", "setup.py", "develop"]
 # we'll need a Capo profile
 ENV CAPO_PROFILE local
 ENV CAPO_PATH test/
-# ENV PATH $PATH:..
 
 # finally, run the tests. be verbose. log stuff.
 # (for more detailed output, use "-vv" and/or "--log-level=DEBUG";
diff --git a/apps/cli/executables/datafetcher/test/docker-build.sh b/apps/cli/executables/datafetcher/test/docker-build.sh
index 1888c1fb8e1a5861126f32fdd3a74303de0c267a..484146963dd8b85a13b0639b808ee28cc216ab36 100755
--- a/apps/cli/executables/datafetcher/test/docker-build.sh
+++ b/apps/cli/executables/datafetcher/test/docker-build.sh
@@ -60,6 +60,7 @@ cp ${SOURCE} ${NEW_FILE}
 # remove extended attributes, which would cause Capo to balk
 /usr/bin/xattr -c ${NEW_FILE}
 
+## where the magic happens
 if [ "${USE_CACHE}" == 1 ]
 then
   echo '>>>> Using cache, if possible'
diff --git a/apps/cli/executables/datafetcher/tox.ini b/apps/cli/executables/datafetcher/tox.ini
index df64e93966468dc698889e3ec0ad79b39e1677fd..d79ea386ba0dab52544c11a5aab11da66c355ab3 100644
--- a/apps/cli/executables/datafetcher/tox.ini
+++ b/apps/cli/executables/datafetcher/tox.ini
@@ -2,7 +2,7 @@
 # https://tox.readthedocs.io/en/latest/
 
 [tox]
-envlist = py27, py34, py35, py36, py37
+envlist = py27, py34, py35, py36, py37, py38
 skip_missing_interpreters = True
 
 [testenv]
diff --git a/apps/cli/utilities/__init__.py b/apps/cli/utilities/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/apps/cli/utilities/s_code_project_updater/setup.py b/apps/cli/utilities/s_code_project_updater/setup.py
index 36ee1407fe690184083a1b1eeb8d63cc6fa2a9be..2427961859e8a06eaee55fb0771025e1f8cdaecb 100644
--- a/apps/cli/utilities/s_code_project_updater/setup.py
+++ b/apps/cli/utilities/s_code_project_updater/setup.py
@@ -17,6 +17,8 @@ setup(
     url='TBD',
     license="GPL",
     install_requires=['pycapo', 'pymygdala', 'schema', 'sqlalchemy', 'support'],
+    tests_require=['pytest-mock'],
+    requires=['sqlalchemy', 'mysqldb'],
     keywords=[],
     packages=['s_code_project_updater'],
     package_dir={'':'src'},
diff --git a/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/__init__.py b/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..57122f286f27d1a64669e2cbf2932a7088245955 100644
--- a/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/__init__.py
+++ b/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/__init__.py
@@ -0,0 +1,10 @@
+from enum import Enum
+
+
+class Telescope(Enum):
+    ''' all telescopes we expect to find in execution_blocks '''
+    ALMA = 'ALMA'
+    EVLA = 'EVLA'
+    VLA  = 'VLA'
+    VLBA = 'VLBA'
+    GBT  = 'GBT'
diff --git a/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/commands.py b/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/commands.py
index 576b3a55f471d58c0091ca1fc1d43c18c013cd08..901eaad404031424e9c3d63eacb83e46d8ae6cc3 100644
--- a/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/commands.py
+++ b/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/commands.py
@@ -3,20 +3,22 @@ A module for updating properties (title, abstract, PI and coI) of a provided pro
 """
 import argparse as ap
 import logging
-import sys, os
+import os
+import sys
 import warnings
 from typing import List
 
+from pymygdala import LogHandler, SendNRAOEvent
+from s_code_project_updater import Telescope
+from schema.model import Author, Project
+from schema.pstmodel import Person, UserAuthentication
 from sqlalchemy import exc as sa_exc, asc, desc
-
-from ._version import ___version___ as version
 from support.capo import get_my_capo_config
 from support.logging import get_console_logger, LOG_MESSAGE_FORMATTER
-from pymygdala import LogHandler, SendNRAOEvent
+
+from schema import ArchiveDBSession, ExecutionBlock
+from ._version import ___version___ as version
 from .project_fetcher import ArchiveProjectFetcher
-from schema import ArchiveDBSession, ScienceProduct
-from schema.model import Author, Project
-from schema.pstmodel import Person, UserAuthentication
 
 _APPLICATION_NAME = 's_code_project_updater'
 _LOG = get_console_logger(_APPLICATION_NAME, logging.DEBUG)
@@ -32,18 +34,6 @@ _EPILOG = """Return values:
 5: Update failed"""
 
 
-def scode_project_from_args(namespace):
-
-    ns_dict = namespace.__dict__
-    project_code = ns_dict['project']
-    title = ns_dict['title']
-    abstract = ns_dict['abstract']
-    investigators = ns_dict['investigators']
-
-    new_project = ArchiveProject(project_code=project_code, title=title, abstract=abstract, author_pst_ids=investigators)
-    return new_project
-
-
 class ScodeProjectUpdater:
     """
     A class to bundle the operations involved with updating a project in the archive.
@@ -57,7 +47,26 @@ class ScodeProjectUpdater:
         :param kwargs: the command line arguments or namespace with the arguments to the parser
         """
         self._make_parser()
-        self.args = self.parser.parse_args(**kwargs)
+        try:
+            _LOG.warning('parsing....')
+            self.args = self.parser.parse_args(**kwargs)
+            _LOG.warning('parsed')
+
+        except Exception as exc:
+            _LOG.error(f'parser threw {exc}')
+            self.exit_with_error('Capo profile and project code are '
+                                 'required', 2)
+
+        # at a minimum, Capo profile and project code are required
+        if not self.args.profile or not self.args.project:
+            if not self.args.profile and not self.args.project:
+                self.exit_with_error('Capo profile and project code are '
+                                     'required', 2)
+            if not self.args.profile:
+                self.exit_with_error('Capo profile not specified', 2)
+            if not self.args.project:
+                self.exit_with_error('project code not specified', 2)
+
         args_dict = self.args.__dict__
 
         if args_dict['dry']:
@@ -66,15 +75,17 @@ class ScodeProjectUpdater:
             self.is_dry = False
 
         if not args_dict['investigators'] and not args_dict['title'] and not args_dict['abstract']:
-            self.get_minimal_args(args_dict)
-            return
+            self.set_minimum_properties_from_args(args_dict)
+            # return
 
+        self.project_code = args_dict['project']
         self.stored_project = None
         _LOG.debug(f'{self.args}')
 
-        self.sc_project = scode_project_from_args(self.args)
-
-        self.capo_config = get_my_capo_config(profile=self.args.profile)
+        try:
+            self.capo_config = get_my_capo_config(profile=self.args.profile)
+        except Exception as exc:
+            self.exit_with_error(f'Capo configuration error: {exc}', 1)
         try:
             self.archive_context = ArchiveDBSession('SDM', profile=self.capo_config.profile)
             self.pst_context = ArchiveDBSession('PST', profile=self.capo_config.profile)
@@ -88,7 +99,25 @@ class ScodeProjectUpdater:
                                  'in the archive.  There should be only one PI and any number of '
                                  'unique CoIs on a project.', 2)
 
-    def get_minimal_args(self, args):
+
+    def scode_project_from_args(self, args: ap.Namespace):
+        ''' turn the command-line arguments into a project '''
+
+        ns_dict = args.__dict__
+        project_code = ns_dict['project']
+        title = ns_dict['title']
+        abstract = ns_dict['abstract']
+        investigators = ns_dict['investigators']
+
+        new_project = ArchiveProject(project_code=project_code,
+                                     title=title,
+                                     abstract=abstract,
+                                     author_pst_ids=investigators)
+        return new_project
+
+
+    def set_minimum_properties_from_args(self, args):
+        ''' basic info needed for a fetch '''
         self.project_code = args['project']
         self.profile = args['profile']
         self.is_dry = True
@@ -120,12 +149,6 @@ class ScodeProjectUpdater:
                                  'and what the project would look like after the changes.')
         self.parser = result
 
-    def capture_error(self, msg, code):
-        self.error_message = msg
-        self.code = code
-        _LOG.error(f'error message received: {self.error_message}; code = {self.code}')
-        return self.code, self.error_message
-
     def exit_with_error(self, msg, code):
         """
         On discovering we have an unresolvable condition the prevents us from proceeding with the
@@ -134,7 +157,9 @@ class ScodeProjectUpdater:
         :param code: the exit code to accompany the error message
         :return: None
         """
-        self.capture_error(msg, code)
+        self.error_message = msg
+        self.code = code
+        _LOG.error(f'error message received: {self.error_message}; code = {self.code}')
         _LOG.error(msg)
         self.parser.print_help()
         sys.exit(code)
@@ -192,8 +217,9 @@ class ScodeProjectUpdater:
         and person_id
         :return: None
         """
-        old_investigators = self.get_projects_current_investigators()
-        # if any of the new investigators already exists, use the old author_id rather than making a new author
+
+        # if any of the new investigators already exists,
+        # use the old author_id rather than making a new author
         is_pi = True
         num_expected = len(new_investigators)
         num_changed  = 0
@@ -220,7 +246,7 @@ class ScodeProjectUpdater:
 
         if num_changed < num_expected:
             _LOG.error(f'{num_changed} of {num_expected} investigators were NOT set')
-            raise Exception('incomplete investigator update')
+            raise UpdateException('incomplete investigator update')
 
     def print_project(self):
         """
@@ -239,7 +265,6 @@ class ScodeProjectUpdater:
         # we want the PI's pst_person_id followed by the CoIs' pst_person_ids in numeric order
         pi = investigator_list[0]
         if pi.pst_person_id is not None:
-            self.is_alma = False
             coi_pst_ids = [int(coi.pst_person_id) for coi in investigator_list[1:]]
             coi_pst_ids = sorted(coi_pst_ids)
             author_pst_ids = [int(pi.pst_person_id)]
@@ -247,8 +272,6 @@ class ScodeProjectUpdater:
             authors_to_print = [str(id) for id in author_pst_ids]
             id_list = ' '.join(authors_to_print)
             output.append(f'Authors: {id_list}')
-        else:
-            self.is_alma = True
 
         return output
 
@@ -258,10 +281,19 @@ class ScodeProjectUpdater:
         except AttributeError:
             return False
 
-    def update_project(self):
-        if self.is_fetch_only():
-            fetcher = ArchiveProjectFetcher(self.profile)
+    def update_project(self) -> Project:
+        '''
+            Where the magic happens: update aspects of the project
+            (including authors) according to the arguments passed in
+        :return:
+        '''
+        fetcher = ArchiveProjectFetcher(self.args.profile)
+        try:
             self.project = fetcher.fetch_project(self.project_code)
+        except AttributeError:
+            self.exit_with_error(f'project code "{self.project_code}" not '
+                                 f'found', 3)
+        if self.is_fetch_only():
             output = fetcher.build_project_info()
             try:
                 [_LOG.info(line) for line in output]
@@ -286,17 +318,12 @@ class ScodeProjectUpdater:
             if self.stored_project is None:
                 self.exit_with_error('No project found for the project_code provided', 3)
 
-            # is this an ALMA project?
-            self.product = self.archive_context.session.query(ScienceProduct) \
-                .filter(ScienceProduct.project == self.stored_project) \
-                .first()
-            external_system = self.product.external_system
-            if str(external_system).startswith("ALMA"):
-                raise ValueError(f'{self.stored_project.project_code} is an ALMA project; update not permitted')
+            if self.is_alma():
+                self.exit_with_error(f'{self.stored_project.project_code} '
+                                 f'is an ALMA project; update not permitted', 2)
 
             if self.args.investigators:
                 proposed_investigators = self.get_pst_users(self.args.investigators)
-                self.sc_project.investigators = proposed_investigators
                 if len(proposed_investigators) == 0 or \
                         not len(self.args.investigators) == len(proposed_investigators):
                     self.exit_with_error('One or more of the investigators you entered was not '
@@ -306,28 +333,41 @@ class ScodeProjectUpdater:
 
             if self.args.title:
                 self.stored_project.title = self.args.title
-                self.sc_project.title = self.args.title
             if self.args.abstract:
                 self.stored_project.abstract = self.args.abstract
-                self.sc_project.abstract = self.args.abstract
 
             if not self.args.dry:
                 if not self.is_fetch_only():
                     self.archive_context.session.commit()
-                    _LOG.info(f'Changes committed')
+                    _LOG.info('Changes committed')
             elif not self.is_fetch_only():
-                _LOG.info(f'Successful dry run; this would have updated the project')
+                _LOG.info('Successful dry run; this would have updated the project')
 
             self.print_project()
             return self.stored_project
 
+    def is_alma(self):
+        ''' is this an alma project? '''
+        with warnings.catch_warnings():
+            # Suppress SQLAlchemy warnings
+            warnings.simplefilter("ignore", category=sa_exc.SAWarning)
+
+            exec_block = self.archive_context.session.query(ExecutionBlock) \
+                .filter(ExecutionBlock.project_code == self.project_code) \
+                .filter(ExecutionBlock.telescope == Telescope.ALMA.value) \
+                .first()
+            return exec_block is not None
+
+
+
     def reindex_project(self):
         """
         If we are not performing a dry run, and have made it this far without error, then we
         re-index the project so the updates will show up in the profile-mapped archive.
         :return: None
         """
-        if not self.args.dry:
+        if not self.args.dry and not self.is_fetch_only() \
+                and '_TEST_PROJECT' not in self.project_code:
             _LOG.info(f'Re-indexing project {self.args.project} to make changes available....')
             # Set up a LogHandler to record the fact we just made a change to this project.
             # We're adding it here, instead of earlier, because nothing we log earlier should be
@@ -371,7 +411,6 @@ class ArchiveProject:
         self.title = title
         self.abstract = abstract
         self.investigators = author_pst_ids
-        self.is_alma = None
 
         options = []
         options.append('-C')
@@ -386,12 +425,9 @@ class ArchiveProject:
         options.append('--investigators')
         self.options = options
 
-    def set_alma(self, is_alma):
-        self.is_alma = is_alma
-
-    def make_args(self, isDry):
+    def make_args(self, is_dry):
         args = []
-        if isDry:
+        if is_dry:
             args.append('-d')
 
         args.append('-C')
@@ -411,12 +447,6 @@ class ArchiveProject:
 
         return args
 
-    @staticmethod
-    def from_schema_project(project: Project, is_alma: bool):
-        to_return = ArchiveProject(project.project_code, project.title, project.abstract, project.authors)
-        to_return.set_alma(is_alma)
-        return to_return
-
     def is_arg(self, arg):
         return arg in self.options
 
@@ -439,6 +469,43 @@ class ArchiveProject:
         return new_project.investigators
 
 
+class UpdateArgParser:
+    ''' Command-line argument parser for ScodeProjectUpdater '''
+    def __init__(self):
+        self.parser = self._make_parser()
+
+    def _make_parser(self):
+        parser = ap.ArgumentParser(description=_DESCRIPTION.format(version),
+                                   formatter_class=ap.RawTextHelpFormatter,
+                                   epilog=_EPILOG)
+        parser.add_argument('-C', '--project', action='store',
+                            help='project_code to update')
+        parser.add_argument('-P', '--profile', action='store',
+                            help='profile name to use, e.g. test, production')
+        parser.add_argument('-T', '--title', action='store',
+                            help='a quoted string for the new title for the project')
+        parser.add_argument('-A', '--abstract', action='store',
+                            help='a quoted string for the new abstract for the project')
+        parser.add_argument('-I', '--investigators', action='store', type=int, nargs='+',
+                            help='a PST ID, or list of PST IDs, of investigators for the project, '
+                                 'as an unquoted integer or space seperated integer list.  The '
+                                 'first ID in the list will be added as the PI and all subsequenct '
+                                 'IDs will be added as CoIs.')
+        parser.add_argument('-d', '--dry', action='store_true',
+                            help='perform a dry run, going through the motions, but not committing '
+                                 'changes and not performing a re-index of the project.  This may '
+                                 'be useful because it will print the current state of the project '
+                                 'and what the project would look like after the changes.')
+        return parser
+
+    def parse_args(self, **kwargs):
+        return self.parser.parse_args(kwargs)
+
+
+class UpdateException(Exception):
+    ''' throw this if there is trouble during the update '''
+    pass
+
 def main(**kwargs):
     """
     The script's main entry point.
@@ -454,5 +521,3 @@ def main(**kwargs):
 
 if __name__ == '__main__':
     main()
-
-
diff --git a/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/project_fetcher.py b/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/project_fetcher.py
index b66d24241c1ca3af45214782ff968dcfeb512357..639c0c3040f2febc42f49316462262b70113f318 100644
--- a/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/project_fetcher.py
+++ b/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/project_fetcher.py
@@ -2,10 +2,14 @@ import logging
 import sys
 import warnings
 
+from sqlalchemy import exc as sa_exc, asc, desc
+
+from schema import ArchiveDBSession, create_session, ExecutionBlock
+from schema.model import Project, Author
 from support.capo import get_my_capo_config
 from support.logging import get_console_logger
-from schema import ArchiveDBSession, Project, Author, ScienceProduct
-from sqlalchemy import exc as sa_exc, asc, desc
+
+from . import Telescope
 
 _APPLICATION_NAME = 'project_fetcher'
 _LOG = get_console_logger(_APPLICATION_NAME, logging.DEBUG)
@@ -18,50 +22,58 @@ class ArchiveProjectFetcher:
     def __init__(self, profile):
         self.capo_config = get_my_capo_config(profile=profile)
         try:
-            self.archive_context = ArchiveDBSession('SDM', profile=self.capo_config.profile)
+            self.archive_context = create_session('SDM',
+                                                  profile=self.capo_config.profile)
             self.pst_context = ArchiveDBSession('PST', profile=self.capo_config.profile)
         except KeyError as k_ex:
             _LOG.error(f'An error occurred while creating a db context: {k_ex}')
             sys.exit(1)
 
-    def fetch_project(self, project_code):
-        with warnings.catch_warnings(), self.archive_context, self.pst_context:
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        self.archive_context.close()
+        self.pst_context.session.close()
+
+    def fetch_project(self, project_code: str):
+
+        with warnings.catch_warnings():
             # Suppress SQLAlchemy warnings
             warnings.simplefilter("ignore", category=sa_exc.SAWarning)
             """
             Return the project specified by the input arguments, if it exists.
             :return: the first Project we found with the project code passed in
             """
-            self.project = self.archive_context.session.query(Project) \
+            self.project = self.archive_context.query(Project) \
                 .filter(Project.project_code == project_code) \
                 .first()
 
-            self.abstract = self.project.abstract
+            if self.project is None:
+                raise AttributeError(f'project {project_code} not found')
 
-            self.is_alma = self._is_alma()
-            self.authors = self._get_investigators()
-            self.detachable_author_list = self._get_detached_authors()
+            self.abstract = self.project.abstract
+            # self.is_alma = self._is_alma()
 
-            self.project_info = self.build_project_info()
             return self.project
 
     def build_project_info(self):
+        ''' assemble the output '''
         output = []
         output.append(f'Title: {self.project.title}')
         output.append(f'Abstract: {self.project.abstract}')
         if self._is_alma():
             return
 
-        investigator_list = self.authors
-        # investigator_list = self._get_investigators()
-        # We want the PI's pst_person_id followed by the CoIs' pst_person_ids in numeric order.
+        investigator_list = self.project.authors
+        if not investigator_list:
+            raise ValueError(f'no authors found for {self.project.project_code}')
+
+         # We want the PI's pst_person_id followed by the CoIs'
+        # pst_person_ids in numeric order.
         # ALMA authors, however, do not have pst_person_ids
-        pi = investigator_list[0]
+        project_pi = investigator_list[0]
         coi_pst_ids = [int(coi.pst_person_id) for coi in investigator_list[1:]]
-        # TODO: should not need to sort; query does that
         coi_pst_ids = sorted(coi_pst_ids)
 
-        author_pst_ids = [int(pi.pst_person_id)]
+        author_pst_ids = [int(project_pi.pst_person_id)]
         [author_pst_ids.append(id) for id in coi_pst_ids]
         authors_to_print = [str(id) for id in author_pst_ids]
         id_list = ' '.join(authors_to_print)
@@ -71,58 +83,30 @@ class ArchiveProjectFetcher:
 
     def _get_investigators(self):
         """
-         Get a list of investigators associated with this project, with PI(s) as first element(s) of the list
+         Get a list of investigators associated with this project,
+         with PI(s) as first element(s) of the list
 
-         :return: a list of investigators associated with the project code passed in, ordered with
-         the PI(s) first
+         :return: a list of investigators associated with the project code passed in,
+         ordered with the PI(s) first
         """
-        investigators_list = self.archive_context.session.query(Author) \
-            .filter(Author.project == self.project) \
-            .order_by(desc(Author.is_pi), asc(Author.pst_person_id)) \
-            .all()
+        with warnings.catch_warnings():
+            # Suppress SQLAlchemy warnings
+            warnings.simplefilter("ignore", category=sa_exc.SAWarning)
+
+            investigators_list = self.archive_context.query(Author) \
+                .filter(Author.project == self.project) \
+                .order_by(desc(Author.is_pi), asc(Author.pst_person_id)) \
+                .all()
         return investigators_list
 
     def _is_alma(self):
-        with warnings.catch_warnings(), self.archive_context, self.pst_context:
+        ''' is this an alma project? '''
+        with warnings.catch_warnings():
             # Suppress SQLAlchemy warnings
             warnings.simplefilter("ignore", category=sa_exc.SAWarning)
-            self.product = self.archive_context.session.query(ScienceProduct)\
-                .filter(ScienceProduct.project == self.project)\
-                .first()
-            external_system = self.product.external_system
-            return str(external_system).startswith("ALMA")
-
-    def _get_detached_authors(self):
-        return [DetachedAuthor(author) for author in self.authors]
-
-if __name__ == '__main__':
-    fetched = ArchiveProjectFetcher("nmtest").fetch_project('SK0442')
-    authors = fetched.authors
-    assert 4 == len(fetched.authors)
-    _LOG.debug("looks ok")
-
-
-class DetachedAuthor:
 
-    def __init__(self, author:Author):
-        self.author_id     = author.author_id
-        self.username      = author.username
-        self.firstname     = author.firstname
-        self.lastname      = author.lastname
-        self.pst_person_id = author.pst_person_id
-        self.is_pi         = author.is_pi
-        # self.project_code  = author.project_code
-
-    def __eq__(self, other):
-        if type(other) is not type(self):
-            return False
-
-        if other.author_id == self.author_id:
-            if other.pst_person_id == self.pst_person_id:
-                if other.lastname == self.lastname:
-                    return other.firstname == self.firstname
-
-        return False
-
-    def __repr__(self):
-        return f'{self.firstname} {self.lastname}: {self.author_id}, {self.pst_person_id}'
\ No newline at end of file
+            exec_block = self.archive_context.query(ExecutionBlock) \
+                .filter(ExecutionBlock.project == self.project) \
+                .filter(ExecutionBlock.telescope == Telescope.ALMA.value) \
+                .first()
+            return exec_block is not None
diff --git a/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/test_projects.py b/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/test_projects.py
deleted file mode 100644
index 27b8f2014cfe9c56c450ab0c9bc125312dcb9ca8..0000000000000000000000000000000000000000
--- a/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/test_projects.py
+++ /dev/null
@@ -1,205 +0,0 @@
-from abc import ABC
-from typing import List
-
-from .commands import ArchiveProject
-from schema import Author
-
-
-class AbstractTestProject(ABC, ArchiveProject):
-
-    def __init__(self, project_code: str, title: str, abstract: str, authors: List, is_alma: bool):
-        self.project_code = project_code
-        self.title = title
-        self.abstract = abstract
-        self.authors = authors
-        self.is_alma = is_alma
-        # self.author_pst_ids = self._get_author_pst_ids()
-
-    def _get_author_pst_ids(self):
-        # we want the PI's pst_person_id followed by the CoIs' pst_person_ids in numeric order
-        pi = self.authors[0]
-        coi_pst_ids = [int(coi.pst_person_id) for coi in self.authors[1:]]
-        coi_pst_ids = sorted(coi_pst_ids)
-
-        author_pst_ids = [int(pi.pst_person_id)]
-        [author_pst_ids.append(id) for id in coi_pst_ids]
-        return [str(id) for id in author_pst_ids]
-
-    def as_sc_project(self):
-        author_pst_ids = [str(author.pst_person_id) for author in self.authors]
-        return ArchiveProject(self.project_code, self.title, self.abstract, author_pst_ids)
-
-class ScodeTestProject(AbstractTestProject):
-
-    def __init__(self):
-        self.project_code = 'SK0442'
-        self.title = 'Cool Sky Stuff'
-        self.abstract = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ' \
-                        'ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco ' \
-                        'laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in ' \
-                        'voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non ' \
-                        'proident, sunt in culpa qui officia deserunt mollit anim id est laborum.'
-        self.authors = [
-            Author(author_id=65409,
-                   project=self.project_code,
-                   username='srandall',
-                   firstname='Scott',
-                   lastname='Randall',
-                   pst_person_id='4686',
-                   is_pi=True),
-
-            Author(author_id=65410,
-                   project=self.project_code,
-                   username='s.giacintucci',
-                   firstname='Simona',
-                   lastname='Giacintucci',
-                   pst_person_id='317',
-                   is_pi=False),
-
-            Author(author_id=65411,
-                   project=self.project_code,
-                   username='esch44',
-                   firstname='Emma',
-                   lastname='Schwartzman',
-                   pst_person_id='11991',
-                   is_pi=False),
-
-            Author(author_id=65412,
-                   project=self.project_code,
-                   username='tclarke',
-                   firstname='Tracy',
-                   lastname='Clarke',
-                   pst_person_id='341',
-                   is_pi=False),
-
-        ]
-
-        self.is_alma = False
-        self.author_pst_ids = self._get_author_pst_ids()
-
-        super(AbstractTestProject, self).__init__(self.project_code, self.title, self.abstract, self.author_pst_ids)
-
-class ScienceTestProject(AbstractTestProject):
-
-    def __init__(self):
-        self.project_code = '13B-014'
-        self.title = 'The Comprehensive VLA Survey for Black Holes in Globular Clusters'
-        self.abstract = 'Spurred by our surprising VLA discovery of the first black holes in Milky Way ' \
-                        'globular clusters, we propose an ambitious survey for both stellar-mass and ' \
-                        'intermediate-mass black holes in globular clusters. ' \
-                        'With well-defined selection criteria, our sample will allow the first statistical ' \
-                        'determination of the presence of black holes in clusters. This survey will make an ' \
-                        'immediate impact in a number of fields, including black hole demographics, ' \
-                        'accretion physics, gravitational wave predictions, and globular cluster evolution.'
-        self.authors = [
-
-            Author(author_id=8749,
-                   project=self.project_code,
-                   username='jstrader',
-                   firstname='Jay',
-                   lastname='Strader',
-                   pst_person_id='4064',
-                   is_pi=True),
-
-            Author(author_id=8743,
-                   project=self.project_code,
-                   username='jcamj',
-                   firstname='James',
-                   lastname='Miller-Jones',
-                   pst_person_id='490',
-                   is_pi=False),
-
-            Author(author_id=8744,
-                   project=self.project_code,
-                   username='chomiuk',
-                   firstname='Laura',
-                   lastname='Chomiuk',
-                   pst_person_id='701',
-                   is_pi=False),
-
-            Author(author_id=8745,
-                   project=self.project_code,
-                   username='gsivakoff',
-                   firstname='Gregory',
-                   lastname='Sivakoff',
-                   pst_person_id='834',
-                   is_pi=False),
-
-            Author(author_id=8746,
-                   project=self.project_code,
-                   username='tjmaccarone',
-                   firstname='Thomas',
-                   lastname='Maccarone',
-                   pst_person_id='887',
-                   is_pi=False),
-
-            Author(author_id=8747,
-                   project=self.project_code,
-                   username='anilseth',
-                   firstname='Anil',
-                   lastname='Setn',
-                   pst_person_id='1197',
-                   is_pi=False),
-
-            Author(author_id=8748,
-                   project=self.project_code,
-                   username='Craig Heinke',
-                   firstname='Craig',
-                   lastname='Heinke',
-                   pst_person_id='3729',
-                   is_pi=False),
-
-            Author(author_id=8750,
-                   project=self.project_code,
-                   username='evanoyola',
-                   firstname='Eva',
-                   lastname='Noyola',
-                   pst_person_id='5532',
-                   is_pi=False),
-
-        ]
-
-        self.is_alma = False
-        self.author_pst_ids = self._get_author_pst_ids()
-
-        super(AbstractTestProject, self).__init__(self.project_code, self.title, self.abstract, self.author_pst_ids)
-
-
-class AlmaTestProject(AbstractTestProject):
-
-    def __init__(self):
-        self.project_code = '2012.1.00060.S'
-        self.title = "Testing Schmidt's Conjecture in NGC 300: Bridging the Gap between Galactic and Extragalactic Star Formation"
-        self.abstract = "Understanding the physical factors that control the conversion of interstellar gas into stars " \
-                        "is of fundamental importance for both developing a predictive physical theory of star formation and understanding the evolution of galaxies from the earliest epochs of cosmic history to the present time. An important aspect of this question is the study of empirical relations that connect the star formation rate in a given region to local properties of the interstellar medium. An important example is the Schmidt-Kennicutt (KS) law for galaxies that relates the surface densities of the star formation rate and the surface densities of interstellar gas in a non-linear fashion. However, it is also known that there is a linear correlation between the total SFR in galaxies and the mass of dense molecular gas as traced by the high excitation HCN molecule. Contrary to the KS relation, this scaling relation suggests that the total SFR depends simply on the total amount of dense molecular gas in a star forming system. Recently, we have begun to test these scaling relations in the Galactic neighborhood where star formation rates can be much better constrained. We found that for local clouds the total SFR scales most directly, and linearly, with the total mass of high extinction (and dense) molecular gas. Furthermore, we found this linear scaling law between SFR and dense gas to extend and extrapolate directly and smoothly to external galaxies. Moreover, our observations also demonstrate that a KS type relation does not exist for molecular clouds in the Galactic neighborhood. This is a direct consequence of a well known scaling law between the mass and size of molecular clouds, Larson's third law. Overall, our results indicate that a linear scaling law, in which the total amount of dense gas controls the SFR, is the fundamental physical relation that connects star formation across the vast scales from individual GMCs to entire galaxies. Critical testing of these ideas require resolved observations of GMCs in external galaxies. Here we propose to use ALMA to evaluate star formation scaling laws in a nearby galaxy where we can obtain resolved observations of individual GMCs. This allows us to obtain observations of a larger sample of GMCs than is accessible in the Galactic neighborhood. An extensive APEX survey of HII regions in the nearby galaxy NGC 300 has provided us with a sample of 36 star-forming regions with CO(2-1) detections and 42 upper limits. We are currently working on obtaining star formation rates for these regions from multi-wavelength ancillary data including our Herschel observations. We propose to use ALMA's unequalled capabilities to obtain snapshot observations of 40 selected regions in CO(2-1) in order to make resolved measurements of cloud structure to obtain sizes and virial masses. As a pilot project, we also propose to observe the brightest subsample in HCN(1-0) as a dense-gas tracer. Our proposed ALMA CO observations will enable us to to test Larson's scaling laws in an external galaxy and to evaluate which formulation of the Schmidt law is the most meaningful and appropriate to apply to spiral galaxies, and in doing so refine Schmidt's original conjecture of a scaling relation between the rate of star formation and gas density."
-        self.authors = [
-
-            Author(author_id=37200,
-                   project=self.project_code,
-                   username='clada',
-                   firstname='Charles',
-                   lastname='Lada',
-                   pst_person_id=None,
-                   is_pi=True),
-
-            Author(author_id=37201,
-                   project=self.project_code,
-                   username='jforbrich',
-                   firstname='Jan',
-                   lastname='Forbrich',
-                   pst_person_id=None,
-                   is_pi=False),
-
-            Author(author_id=37202,
-                   project=self.project_code,
-                   username='cfaesi',
-                   firstname='Christopher',
-                   lastname='Faesi',
-                   pst_person_id=None,
-                   is_pi=False),
-
-        ]
-        self.is_alma = True
-        super(AbstractTestProject, self).__init__(self.project_code, self.title, self.abstract, None)
-
-
diff --git a/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/updater_tests.py b/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/updater_tests.py
deleted file mode 100644
index 4bbe4cba361c9ff61fc49cdf0f82aaca7636c38c..0000000000000000000000000000000000000000
--- a/apps/cli/utilities/s_code_project_updater/src/s_code_project_updater/updater_tests.py
+++ /dev/null
@@ -1,272 +0,0 @@
-import logging
-import os
-import unittest
-import warnings
-
-from support.logging import get_console_logger
-from .commands import ArchiveProject, ScodeProjectUpdater
-from .project_fetcher import ArchiveProjectFetcher
-from .test_projects import ScienceTestProject, ScodeTestProject, AlmaTestProject
-from schema import Author, ArchiveDBSession
-from sqlalchemy import exc as sa_exc
-
-_LOG = get_console_logger("scode_project_updater_tests", logging.DEBUG)
-
-class UpdaterTestCase(unittest.TestCase):
-
-    @classmethod
-    def setUp(self) -> None:
-        self.profile = os.environ['CAPO_PROFILE']
-
-    def restore_scode_project_to_original_state(self):
-        self.archive_context = ArchiveDBSession('SDM', profile=self.profile)
-        self.pst_context = ArchiveDBSession('PST', profile=self.profile)
-
-        scode_project = ScodeTestProject()
-        # start by restoring the title and abstract; authors will need special treatment
-        # args = scp.make_args(False)
-        args = ['-C', scode_project.project_code, '-P', self.profile, '-T', scode_project.title, '-A', scode_project.abstract]
-        project = ScodeProjectUpdater(args=args).update_project()
-
-        with warnings.catch_warnings(), self.archive_context, self.pst_context:
-            # Suppress SQLAlchemy warnings
-            warnings.simplefilter("ignore", category=sa_exc.SAWarning)
-
-            # clear existing investigators
-            investigators_list = self.archive_context.session.query(Author) \
-                .filter(Author.project_code == scode_project.project_code) \
-                .all()
-            for inv in investigators_list:
-                self.archive_context.session.delete(inv)
-            self.archive_context.session.commit()
-
-            # insert the canonical ones
-            canonical_authors = scode_project.authors
-            for author in canonical_authors:
-                author.project = project
-                self.archive_context.session.add(author)
-            self.archive_context.session.commit()
-
-        # confirm restoration
-        fetcher = ArchiveProjectFetcher(self.profile)
-        restored_project = fetcher.fetch_project(scode_project.project_code)
-        self.assertEqual(scode_project.title, restored_project.title)
-        self.assertEqual(scode_project.abstract, restored_project.abstract)
-
-        restored_authors = fetcher.detachable_author_list
-        self.assertEqual(len(scode_project.authors), len(restored_authors))
-
-        pi_found = False
-        for author in restored_authors:
-            if author.username == 'srandall':
-                self.assertTrue(author.is_pi, 'author is pi')
-                self.assertEqual(65409, author.author_id, 'expecting author_id 65409')
-                self.assertEqual('4686', author.pst_person_id, 'expecting pst_person_id 4686')
-                pi_found = True
-            else:
-                self.assertFalse(author.is_pi, 'author is pi')
-                self.assertNotEqual(65409, author.author_id, 'expecting author_id not 65409')
-                self.assertTrue(author.pst_person_id in ('317','341','11991'), "expecting pst_person_ids 317, 341, 11991")
-
-        self.assertTrue(pi_found)
-
-    def restore_science_project_to_original_state(self):
-        science_project = ScienceTestProject()
-        self.archive_context = ArchiveDBSession('SDM', profile=self.profile)
-        self.pst_context = ArchiveDBSession('PST', profile=self.profile)
-
-        # start by restoring the title and abstract; authors will need special treatment
-        # args = scp.make_args(False)
-        args = ['-C', science_project.project_code, '-P', self.profile, '-T', science_project.title, '-A', science_project.abstract]
-        project = ScodeProjectUpdater(args=args).update_project()
-
-        with warnings.catch_warnings(), self.archive_context, self.pst_context:
-            # Suppress SQLAlchemy warnings
-            warnings.simplefilter("ignore", category=sa_exc.SAWarning)
-            # clear existing investigators
-            investigators_list = self.archive_context.session.query(Author) \
-                .filter(Author.project_code == science_project.project_code) \
-                .all()
-            for inv in investigators_list:
-                self.archive_context.session.delete(inv)
-            self.archive_context.session.commit()
-
-            # insert the canonical ones
-            canonical_authors = science_project.authors
-            for author in canonical_authors:
-                author.project = project
-                self.archive_context.session.add(author)
-            self.archive_context.session.commit()
-
-        # confirm restoration
-        fetcher = ArchiveProjectFetcher(self.profile)
-        restored_project = fetcher.fetch_project(science_project.project_code)
-        self.assertEqual(science_project.title, restored_project.title)
-        self.assertEqual(science_project.abstract, restored_project.abstract)
-        restored_authors = fetcher.detachable_author_list
-        self.assertEqual(len(science_project.authors), len(restored_authors))
-
-        pi_found = False
-        for author in restored_authors:
-            if author.username == 'jstrader':
-                self.assertTrue(author.is_pi, 'author is pi')
-                self.assertEqual(8749, author.author_id, 'expecting author_id 8749')
-                self.assertEqual('4064', author.pst_person_id, 'expecting pst_person_id 4064')
-                pi_found = True
-            else:
-                self.assertFalse(author.is_pi, 'author is pi')
-                self.assertNotEqual(8749, author.pst_person_id, 'expecting author_id not 8749')
-                self.assertTrue(8742 < int(author.author_id) < 8751, 'expecting pst_person_id between 8743 and 8750')
-
-        self.assertTrue(pi_found)
-
-    def test_alma_project_has_not_changed(self):
-        alma_test_project = AlmaTestProject()
-        fetcher = ArchiveProjectFetcher(self.profile)
-        fetched = fetcher.fetch_project(alma_test_project.project_code)
-        self.assertEqual(alma_test_project.title, fetched.title)
-        self.assertEqual(alma_test_project.abstract, fetched.abstract)
-        authors_list = fetcher.detachable_author_list
-        self.assertEqual(len(alma_test_project.authors), len(authors_list))
-
-        pi_found = False
-        for author in authors_list:
-            self.assertIsNone(author.pst_person_id, 'expecting no pst_person_id')
-            if author.username == 'clada':
-                self.assertTrue(author.is_pi, 'author is pi')
-                self.assertEqual(37200, author.author_id, 'expecting author_id 37200')
-                pi_found = True
-            else:
-                self.assertFalse(author.is_pi, 'author is pi')
-                self.assertNotEqual(37200, author.pst_person_id, 'expecting author_id not 8749')
-
-        self.assertTrue(pi_found)
-
-    def test_restores_scode_project_correctly(self):
-        self.restore_scode_project_to_original_state()
-
-    def test_restores_science_project_correctly(self):
-        self.restore_science_project_to_original_state()
-
-    def test_can_fetch_from_project_code(self):
-        scode_project = ScodeTestProject()
-        args = ['-C', scode_project.project_code, '-P', self.profile]
-        fetched = ScodeProjectUpdater(args=args).update_project()
-        self.assertEqual(scode_project.title, fetched.title)
-        self.assertEqual(scode_project.abstract, fetched.abstract)
-
-    def test_can_fetch_non_scode_project(self):
-        args = ['-C', '13B-014', '-P', self.profile]
-        fetched = ScodeProjectUpdater(args=args).update_project()
-        self.assertIsNotNone(fetched)
-
-    def test_can_modify_non_scode_project(self):
-        to_modify = ScienceTestProject()
-        authors = to_modify.authors.copy()
-        pst_ids = [author.pst_person_id for author in authors]
-        scp = ArchiveProject(to_modify.project_code, 'foo', to_modify.abstract, pst_ids)
-        scp_args = scp.make_args(False)
-
-        updater = ScodeProjectUpdater(args=scp_args)
-        updated_project = updater.update_project()
-
-        fetcher = ArchiveProjectFetcher(self.profile)
-        retrieved_project = fetcher.fetch_project(updated_project.project_code)
-        # title should have changed
-        self.assertEqual('foo', retrieved_project.title)
-
-        self.restore_science_project_to_original_state()
-
-    def test_no_update_with_dry_run(self):
-
-        self.restore_scode_project_to_original_state()
-        scode_project = ScodeTestProject()
-        authors = scode_project.authors.copy()
-        pst_ids = [author.pst_person_id for author in authors]
-        pst_ids.append(5654)
-
-        scp = ArchiveProject(scode_project.project_code, scode_project.title, scode_project.abstract, pst_ids)
-        scp_args = scp.make_args(True)
-
-        updater = ScodeProjectUpdater(args=scp_args)
-        updated_project = updater.update_project()
-
-        fetcher = ArchiveProjectFetcher(self.profile)
-        retrieved_project = fetcher.fetch_project(updated_project.project_code)
-        authors = retrieved_project.authors
-
-        self.assertEqual(len(scode_project.authors), len(authors))
-        for author in authors:
-            if author.username == 'srandall':
-                assert author.is_pi
-            else:
-                assert not author.is_pi
-
-            self.assertEqual(scode_project.title, retrieved_project.title)
-            self.assertEqual(scode_project.abstract, retrieved_project.abstract)
-
-    def test_setting_investigators_preserves_title_and_abstract(self):
-        scode_project = ScodeTestProject()
-        authors = scode_project.authors.copy()
-        pst_ids = [author.pst_person_id for author in authors]
-        pst_ids.append(5654)
-
-        scp = ArchiveProject(scode_project.project_code, scode_project.title, scode_project.abstract, pst_ids)
-        scp_args = scp.make_args(False)
-
-        updater = ScodeProjectUpdater(args=scp_args)
-        updater.update_project()
-
-        fetcher = ArchiveProjectFetcher(self.profile)
-        retrieved_project = fetcher.fetch_project(scp.project_code)
-
-        authors = fetcher.detachable_author_list
-        self.assertEqual(len(scode_project.authors) + 1, len(authors))
-        for author in authors:
-            if author.username == 'srandall':
-                assert author.is_pi
-            else:
-                assert not author.is_pi
-
-        self.assertEqual(scode_project.title, retrieved_project.title)
-        self.assertEqual(scode_project.abstract, fetcher.abstract)
-        self.restore_scode_project_to_original_state()
-
-    def test_alma_project_not_updated(self):
-        alma_project = AlmaTestProject()
-        args = alma_project.make_args(False)
-        try:
-            updater = ScodeProjectUpdater(args=args)
-            updater.update_project()
-        except Exception as exc:
-            _LOG.info(f'attempt to update ALMA project failed, as expected: {exc}')
-        self.test_alma_project_has_not_changed()
-
-    def test_output_is_desired_format(self):
-        scode_project = ScodeTestProject()
-        authors = scode_project.authors.copy()
-        pi = authors[0]
-        coi_pst_ids = [int(coi.pst_person_id) for coi in authors[1:]]
-        coi_pst_ids = sorted(coi_pst_ids)
-
-        author_pst_ids = [int(pi.pst_person_id)]
-        [author_pst_ids.append(id) for id in coi_pst_ids]
-        authors_to_print = [str(id) for id in author_pst_ids]
-        id_list = ' '.join(authors_to_print)
-
-        scp = ArchiveProject(scode_project.project_code, scode_project.title, scode_project.abstract, author_pst_ids)
-        scp_args = scp.make_args(True)
-        ScodeProjectUpdater(args=scp_args).update_project()
-
-        fetcher = ArchiveProjectFetcher(self.profile)
-        fetcher.fetch_project(scp.project_code)
-        output = fetcher.build_project_info()
-        self.assertEqual(3, len(output))
-        authors_line = output[2]
-        self.assertEqual(f'Authors: {id_list}', authors_line )
-
-
-UpdaterTestCase()
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/apps/cli/utilities/s_code_project_updater/test/__init__.py b/apps/cli/utilities/s_code_project_updater/test/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/apps/cli/utilities/s_code_project_updater/test/test_projects.py b/apps/cli/utilities/s_code_project_updater/test/test_projects.py
new file mode 100644
index 0000000000000000000000000000000000000000..c6b8e9e60db5111f15cf8f4240431f83da636f3f
--- /dev/null
+++ b/apps/cli/utilities/s_code_project_updater/test/test_projects.py
@@ -0,0 +1,205 @@
+''' fake projects to use in testing scode_project_updater '''
+import warnings
+
+from sqlalchemy import exc as sa_exc
+
+from shared.schema.src.schema import Author, Project
+
+
+class ScodeTestProject():
+    ''' fake s_code project '''
+    def __init__(self):
+        with warnings.catch_warnings():
+            # Suppress SQLAlchemy warnings
+            warnings.simplefilter("ignore", category=sa_exc.SAWarning)
+
+
+            self.project_code = 'SCODE_TEST_PROJECT'
+            self.project = Project (
+                project_code = self.project_code,
+                starttime = 58551.9380295139,
+                endtime = 58924.9589184028,
+                proprietary_duration = 365,
+                title = 'Copy of SK0442',
+                abstract = ' Lorem ipsum dolor sit amet, consectetur adipiscing ' \
+                           'elit, sed do eiusmod tempor incididunt ut labore et ' \
+                           'dolore magna aliqua. Ut enim ad minim veniam, quis ' \
+                           'nostrud exercitation ullamco laboris nisi ut aliquip ' \
+                           'ex ea commodo consequat. Duis aute irure dolor in ' \
+                           'reprehenderit in voluptate velit esse cillum dolore ' \
+                           'eu fugiat nulla pariatur. Excepteur sint occaecat ' \
+                           'cupidatat non proident, sunt in culpa qui officia ' \
+                           'deserunt mollit anim id est laborum.'
+            )
+            self.project.authors = [
+                Author(project_code=self.project_code,
+                       username='srandall',
+                       firstname='Scott',
+                       lastname='Randall',
+                       pst_person_id='4686',
+                       is_pi=True),
+
+                Author(project_code=self.project_code,
+                       username='s.giacintucci',
+                       firstname='Simona',
+                       lastname='Giacintucci',
+                       pst_person_id='317',
+                       is_pi=False),
+
+                Author(project_code=self.project_code,
+                       username='esch44',
+                       firstname='Emma',
+                       lastname='Schwartzman',
+                       pst_person_id='11991',
+                       is_pi=False),
+
+                Author(project_code=self.project_code,
+                       username='tclarke',
+                       firstname='Tracy',
+                       lastname='Clarke',
+                       pst_person_id='341',
+                       is_pi=False),
+
+            ]
+
+
+class ScienceTestProject():
+    ''' fake VLA science project '''
+    def __init__(self):
+        with warnings.catch_warnings():
+            # Suppress SQLAlchemy warnings
+            warnings.simplefilter("ignore", category=sa_exc.SAWarning)
+
+            self.project_code = 'VLA_TEST_PROJECT'
+            self.project = Project(
+                project_code = self.project_code,
+                starttime=56688.8103640046,
+                endtime=56810.4840497685,
+                title='Copy of 13B-014 for testing',
+                abstract='Spurred by our surprising VLA discovery of the first ' \
+                         'black holes in Milky Way ' \
+                         'globular clusters, we propose an ambitious survey for both stellar-mass and ' \
+                         'intermediate-mass black holes in globular clusters. ' \
+                         'With well-defined selection criteria, our sample will allow the first statistical ' \
+                         'determination of the presence of black holes in clusters. This survey will make an ' \
+                         'immediate impact in a number of fields, including black hole demographics, ' \
+                         'accretion physics, gravitational wave predictions, and globular cluster evolution.'
+            )
+            self.project.authors = [
+                Author(project_code=self.project_code,
+                       username='jstrader',
+                       firstname='Jay',
+                       lastname='Strader',
+                       pst_person_id='4064',
+                       is_pi=True),
+                Author(project_code=self.project_code,
+                       username='jcamj',
+                       firstname='James',
+                       lastname='Miller-Jones',
+                       pst_person_id='490',
+                       is_pi=False),
+                Author(project_code=self.project_code,
+                       username='chomiuk',
+                       firstname='Laura',
+                       lastname='Chomiuk',
+                       pst_person_id='701',
+                       is_pi=False),
+                Author(project_code=self.project_code,
+                       username='gsivakoff',
+                       firstname='Gregory',
+                       lastname='Sivakoff',
+                       pst_person_id='834',
+                       is_pi=False),
+                Author(project_code=self.project_code,
+                       username='tjmaccarone',
+                       firstname='Thomas',
+                       lastname='Maccarone',
+                       pst_person_id='887',
+                       is_pi=False),
+
+                Author(project_code=self.project_code,
+                       username='anilseth',
+                       firstname='Anil',
+                       lastname='Setn',
+                       pst_person_id='1197',
+                       is_pi=False),
+
+                Author(project_code=self.project_code,
+                       username='Craig Heinke',
+                       firstname='Craig',
+                       lastname='Heinke',
+                       pst_person_id='3729',
+                       is_pi=False),
+
+                Author(project_code=self.project_code,
+                       username='evanoyola',
+                       firstname='Eva',
+                       lastname='Noyola',
+                       pst_person_id='5532',
+                       is_pi=False),
+
+            ]
+
+class AlmaTestProject():
+    ''' fake ALMA project '''
+    def __init__(self):
+        with warnings.catch_warnings():
+            # Suppress SQLAlchemy warnings
+            warnings.simplefilter("ignore", category=sa_exc.SAWarning)
+
+            self.project_code = 'ALMA_TEST_PROJECT'
+            self.project = Project(
+                project_code=self.project_code,
+                starttime=56799.3877155556,
+                endtime=56799.4128683333,
+                proprietary_duration=365,
+                title='Copy of 2012.1.00060.S for testing',
+                abstract = "Understanding the physical factors that control the " \
+                           "conversion of interstellar gas into stars " \
+                           "is of fundamental importance for both developing a predictive physical theory of star formation and understanding the evolution of galaxies from the earliest epochs of cosmic history to the present time. An important aspect of this question is the study of empirical relations that connect the star formation rate in a given region to local properties of the interstellar medium. An important example is the Schmidt-Kennicutt (KS) law for galaxies that relates the surface densities of the star formation rate and the surface densities of interstellar gas in a non-linear fashion. However, it is also known that there is a linear correlation between the total SFR in galaxies and the mass of dense molecular gas as traced by the high excitation HCN molecule. Contrary to the KS relation, this scaling relation suggests that the total SFR depends simply on the total amount of dense molecular gas in a star forming system. Recently, we have begun to test these scaling relations in the Galactic neighborhood where star formation rates can be much better constrained. We found that for local clouds the total SFR scales most directly, and linearly, with the total mass of high extinction (and dense) molecular gas. Furthermore, we found this linear scaling law between SFR and dense gas to extend and extrapolate directly and smoothly to external galaxies. Moreover, our observations also demonstrate that a KS type relation does not exist for molecular clouds in the Galactic neighborhood. This is a direct consequence of a well known scaling law between the mass and size of molecular clouds, Larson's third law. Overall, our results indicate that a linear scaling law, in which the total amount of dense gas controls the SFR, is the fundamental physical relation that connects star formation across the vast scales from individual GMCs to entire galaxies. Critical testing of these ideas require resolved observations of GMCs in external galaxies. Here we propose to use ALMA to evaluate star formation scaling laws in a nearby galaxy where we can obtain resolved observations of individual GMCs. This allows us to obtain observations of a larger sample of GMCs than is accessible in the Galactic neighborhood. An extensive APEX survey of HII regions in the nearby galaxy NGC 300 has provided us with a sample of 36 star-forming regions with CO(2-1) detections and 42 upper limits. We are currently working on obtaining star formation rates for these regions from multi-wavelength ancillary data including our Herschel observations. We propose to use ALMA's unequalled capabilities to obtain snapshot observations of 40 selected regions in CO(2-1) in order to make resolved measurements of cloud structure to obtain sizes and virial masses. As a pilot project, we also propose to observe the brightest subsample in HCN(1-0) as a dense-gas tracer. Our proposed ALMA CO observations will enable us to to test Larson's scaling laws in an external galaxy and to evaluate which formulation of the Schmidt law is the most meaningful and appropriate to apply to spiral galaxies, and in doing so refine Schmidt's original conjecture of a scaling relation between the rate of star formation and gas density."
+            )
+            self.project.authors = [
+                Author(project_code=self.project_code,
+                       username='clada',
+                       firstname='Charles',
+                       lastname='Lada',
+                       pst_person_id=None,
+                       is_pi=True),
+
+                Author(project_code=self.project_code,
+                       username='jforbrich',
+                       firstname='Jan',
+                       lastname='Forbrich',
+                       pst_person_id=None,
+                       is_pi=False),
+                Author(project_code=self.project_code,
+                       username='cfaesi',
+                       firstname='Christopher',
+                       lastname='Faesi',
+                       pst_person_id=None,
+                       is_pi=False),
+            ]
+
+fake_projects = [ScodeTestProject().project,
+                 ScienceTestProject().project,
+                 AlmaTestProject().project]
+
+def get_test_project(project_code: str):
+    for project in fake_projects:
+        if project.project_code == project_code:
+            return project
+    return None
+
+def get_author_pst_ids(project: Project):
+    ''' build list of pst_person_ids for display;
+        we want the PI's pst_person_id followed by the CoIs' pst_person_ids
+        in numeric order
+    '''
+    project_pi = project.authors[0]
+    coi_pst_ids = [int(coi.pst_person_id) for coi in project.authors[1:]]
+    coi_pst_ids = sorted(coi_pst_ids)
+
+    author_pst_ids = [int(project_pi.pst_person_id)]
+    [author_pst_ids.append(id) for id in coi_pst_ids]
+    return [id for id in author_pst_ids]
+
diff --git a/apps/cli/utilities/s_code_project_updater/test/test_updater.py b/apps/cli/utilities/s_code_project_updater/test/test_updater.py
new file mode 100755
index 0000000000000000000000000000000000000000..aefa05cb64fb3d9802b4c4984e659d875c485bfe
--- /dev/null
+++ b/apps/cli/utilities/s_code_project_updater/test/test_updater.py
@@ -0,0 +1,512 @@
+import logging
+import os
+import subprocess
+import unittest
+import warnings
+
+import pytest
+from s_code_project_updater.commands import UpdateException, ScodeProjectUpdater
+from schema import create_session
+from schema.model import Project
+from schema.pstmodel import Session
+from sqlalchemy import exc as sa_exc
+from support.logging import get_console_logger
+
+from .test_projects import get_author_pst_ids, ScodeTestProject, \
+    ScienceTestProject, AlmaTestProject
+
+_LOG = get_console_logger("scode_project_updater_tests", logging.DEBUG)
+_UPDATE_COMMAND = 'update_sproj'
+PROFILE = 'local'
+
+class UpdaterTestCase(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(cls) -> None:
+        os.environ['CAPO_PROFILE'] = PROFILE
+        cls.return_values = build_updater_return_values()
+
+    @classmethod
+    def setUp(cls) -> None:
+        cls.initialize_test_data(cls)
+
+    @classmethod
+    def tearDownClass(cls) -> None:
+        cls.remove_test_data(cls)
+
+    def test_dry_run_does_not_update(self):
+        fake_project = ScodeTestProject().project
+        project_code = fake_project.project_code
+        session = create_session('SDM')
+        return_code = None
+        try:
+            new_title = 'this is the new title'
+            self.assertNotEqual(fake_project.title, new_title,
+                                f'new title should be {new_title}; got '
+                                f'{fake_project.title}')
+            args = [
+                '-C', project_code,
+                '-P', PROFILE,
+                '-T', new_title,
+                '--dry'
+                ]
+            try:
+                return_code = CommandLineUpdaterLauncher(args).run()
+            except Exception as exc:
+                text = self.return_values[return_code] if return_code else ''
+                pytest.fail(f'{exc} {text}')
+
+            if not return_code:
+                updated = self.get_project_from_db(session, project_code)
+                # nothing should have been updated
+                self.assertEqual(fake_project.title, updated.title,
+                                 f'expecting same title, but before is '
+                                 f'{fake_project.title} and after is {updated.title}')
+                self.assertEqual(fake_project.abstract, updated.abstract,
+                                 f'expecting same abstract, but before is '
+                                 f'{fake_project.abstract} and updated is {updated.abstract}')
+                self.assertEqual(len(fake_project.authors),
+                                     len(updated.authors),
+                                 f'expecting same number of authors, '
+                                 f'but before has {len(fake_project.authors)} '
+                                 f'and after has {len(updated.authors)}')
+            else:
+                pytest.fail(f'unexpected failure; return code ={return_code}')
+
+        except Exception as exc:
+            pytest.fail(f'{project_code}: {exc}')
+        finally:
+            session.close()
+
+    def test_project_code_only_fetches(self):
+        fake_project = ScodeTestProject().project
+        project_code = fake_project.project_code
+        args = [
+            '-C', project_code,
+            '-P', PROFILE,
+        ]
+        return_code = None
+        try:
+            return_code = CommandLineUpdaterLauncher(args).run()
+            if not return_code:
+                session = create_session('SDM')
+                try:
+                    updated = self.get_project_from_db(session, project_code)
+                    self.assertEqual(fake_project.title, updated.title,
+                                     f'expecting same title, but before is '
+                                     f'{fake_project.title} and after is {updated.title}')
+                    self.assertEqual(fake_project.abstract, updated.abstract,
+                                     f'expecting same abstract, but before is '
+                                     f'{fake_project.abstract} and updated is {updated.abstract}')
+                    self.assertEqual(len(fake_project.authors),
+                                     len(updated.authors),
+                                     f'expecting same number of authors, '
+                                     f'but before has {len(fake_project.authors)} '
+                                     f'and after has {len(updated.authors)}')
+                    count = 0
+                    for orig_author in fake_project.authors:
+                        for author in updated.authors:
+                            if author.username == orig_author.username:
+                                count += 1
+                                break
+                    self.assertEqual(len(fake_project.authors), count,
+                                     f'before and after projects should have '
+                                     f'same authors')
+                finally:
+                    session.close()
+        except Exception as exc:
+            text = self.return_values[return_code] if return_code else ''
+            pytest.fail(f'{exc} {text}')
+
+    def test_updates_abstract_only(self):
+        fake_project = ScodeTestProject().project
+        project_code = fake_project.project_code
+        session = create_session('SDM')
+        new_abstract = "Well, here's another nice mess you've gotten us into, Ollie"
+        self.assertNotEqual(fake_project.abstract, new_abstract,
+                            f'expecting new abstract {new_abstract} but got {fake_project.abstract}')
+        args = [
+            '-C', project_code,
+            '-P', PROFILE,
+            '-A', new_abstract,
+        ]
+        return_code = None
+        try:
+            try:
+                return_code = CommandLineUpdaterLauncher(args).run()
+            except subprocess.TimeoutExpired as exp:
+                raise UpdateException(exp)
+            except Exception as exc:
+                text = self.return_values[return_code] if return_code else ''
+                pytest.fail(f'{exc} {text}')
+
+            if not return_code:
+                updated = self.get_project_from_db(session, project_code)
+                # only abstract should have been updated;
+                # all else should be same
+                self.assertEqual(fake_project.title, updated.title,
+                                 f'expecting same title, but before is '
+                                 f'{fake_project.title} and after is {updated.title}')
+                self.assertEqual(new_abstract, updated.abstract,
+                                 f'expecting same abstract, but before is '
+                                 f'{fake_project.abstract} and updated is {updated.abstract}')
+                self.assertEqual(len(fake_project.authors),
+                                 len(updated.authors))
+            else:
+                raise UpdateException()
+
+        except Exception as exc:
+            pytest.fail(f'{project_code}: {exc}')
+        finally:
+            session.close()
+
+    def test_updates_abstract_and_title(self):
+        fake_project = ScodeTestProject().project
+        project_code = fake_project.project_code
+        session = create_session('SDM')
+        new_abstract = "I think you ought to know I'm feeling very depressed"
+        new_title = 'A Survey of the Mattresses of Sqornshellous Zeta'
+        self.assertNotEqual(fake_project.abstract, new_abstract,
+                            f'expecting new abstract {new_abstract}, '
+                            f'but abstract was not changed from {fake_project.abstract}')
+        self.assertNotEqual(fake_project.title, new_title,
+                            f'expecting new title {new_title}, '
+                            f'but abstract was not changed from {fake_project.title}')
+        args = [
+            '-C', project_code,
+            '-P', PROFILE,
+            '-A', new_abstract,
+            '-T', new_title,
+        ]
+        return_code = None
+        try:
+            try:
+                return_code = CommandLineUpdaterLauncher(args).run()
+            except subprocess.TimeoutExpired as exp:
+                raise UpdateException(exp)
+            except Exception as exc:
+                text = self.return_values[return_code] if return_code else ''
+                pytest.fail(f'{exc} {text}')
+
+            if not return_code:
+                updated = self.get_project_from_db(session, project_code)
+                # abstract and title should have been updated;
+                # all else should be same
+                self.assertEqual(new_title, updated.title,
+                                 'title should  not have changed')
+                self.assertEqual(new_abstract, updated.abstract,
+                                 'abstract should not have changed')
+                self.assertEqual(len(fake_project.authors),
+                                 len(updated.authors),
+                                 'authors should not have changed')
+            else:
+                raise UpdateException()
+
+        except Exception as exc:
+            pytest.fail(f'{project_code}: {exc}')
+        finally:
+            session.close()
+
+    def test_adds_new_abstract_deletes_author(self):
+        fake_project = ScodeTestProject().project
+        project_code = fake_project.project_code
+        new_project = Project(project_code=project_code,
+                              title=fake_project.title,
+                              abstract=fake_project.abstract)
+        new_abstract = "First there is a mountain, then there is no " \
+                        "mountain, then there is"
+        self.assertNotEqual(new_abstract, fake_project.abstract)
+        new_project.abstract = new_abstract
+        original_authors = fake_project.authors.copy()
+        self.assertEqual(4, len(original_authors),
+                         'expected 4 authors before update')
+        last_author = original_authors[3]
+        new_authors = original_authors[:3]
+        self.assertEqual(len(original_authors) - 1, len(new_authors),
+                         f'expecting {len(original_authors) - 1} new authors, '
+                         f'but there are {len(new_authors)}')
+        new_project.authors = new_authors
+        args = [
+            '-C', project_code,
+            '-P', PROFILE,
+            '-A', new_abstract,
+            '-I',
+        ]
+        for id in get_author_pst_ids(new_project):
+            args.append(str(id))
+
+        return_code = None
+        session = create_session('SDM')
+        try:
+            try:
+                return_code = CommandLineUpdaterLauncher(args).run()
+            except subprocess.TimeoutExpired as exp:
+                raise UpdateException(exp)
+            except Exception as exc:
+                text = self.return_values[return_code] if return_code else ''
+                pytest.fail(f'{exc} {text}')
+
+            self.assertEqual(0, return_code, f'command should have succeeded '
+                                             f'but return code was {return_code}')
+
+            updated = self.get_project_from_db(session, project_code)
+            # last author should have been removed and the abstract changed;
+            # title should remain same
+            self.assertNotEqual(fake_project.abstract, updated.abstract,
+                                'abstract should have changed')
+            self.assertEqual(fake_project.title, updated.title,
+                             'title should not have changed')
+            expected = len(original_authors) - 1
+            actual = len(updated.authors)
+            self.assertEqual(expected, actual,
+                             'one author should have been removed')
+            authors_updated = last_author in updated.authors
+            self.assertFalse(authors_updated, 'THIS IS THE MESSAGE')
+            count = 0
+            for orig_author in original_authors[:3]:
+                for new_author in updated.authors:
+                    if new_author.username == orig_author.username:
+                        count += 1
+                        break
+            self.assertEqual(len(new_authors), count,
+                             f'expected {len(new_authors)} authors in '
+                             f'updated project; there were {count}')
+
+        except Exception as exc:
+            pytest.fail(f'{project_code}: {exc}')
+
+        finally:
+            session.close()
+
+    def test_output_is_as_expected(self):
+        fake_project = ScodeTestProject().project
+        project_code = fake_project.project_code
+        args = [
+            '-C', project_code,
+            '-P', PROFILE,
+        ]
+
+        runner = CommandLineUpdaterLauncher(args)
+        return_code = runner.run()
+        if return_code:
+            text = self.return_values[return_code]
+            pytest.fail(text)
+
+        stdout = runner.stdout
+        self.assertIsNotNone(stdout, 'program output is expected')
+        self.assertTrue('Title: ' + fake_project.title in stdout,
+                        'title should be in output')
+        self.assertTrue('Abstract: ' + fake_project.abstract in stdout,
+                        'abstract should be in output')
+        pst_ids = [str(id) for id in get_author_pst_ids(fake_project)]
+        pst_id_str = ' '.join(pst_ids)
+        self.assertTrue('Authors: ' + pst_id_str in stdout,
+                        f'output should have PST IDs {pst_ids}')
+
+    def test_copes_with_single_pi(self):
+        project = ScodeTestProject().project
+        args = ['-P', PROFILE, '-C', project.project_code, '-I', '4686']
+        return_code = CommandLineUpdaterLauncher(args=args).run()
+        self.assertEqual(0, return_code,
+                         'update to single author should succeed')
+
+    def test_alma_project_is_rejected(self):
+        project_code = '2018.A.00062.S'
+        args = ['-P', PROFILE, '-C', project_code,
+                '-T', 'Physics at High Angular Resolution in Nearby Galaxies: '
+                      'The Local Galaxy Inventory Continued']
+
+        with pytest.raises(SystemExit) as exc:
+            ScodeProjectUpdater(args=args).update_project()
+            self.assertEqual(2, exc.code, 'ALMA project should be rejected')
+
+    def test_errors_return_expected_codes(self):
+        # minimum required arguments -- profile & project -- omitted
+        return_code = CommandLineUpdaterLauncher([]).run()
+        self.assertEqual(return_code, 2,
+                         'expected return code 2 for no args')
+
+        project_code = ScodeTestProject().project.project_code
+
+        # update failure
+        result = FailingUpdater().update_project()
+        self.assertIsInstance(result, SystemExit)
+        self.assertEqual(5, result.code,
+                         'expecting return code 5 for update failure')
+
+        # profile not specified
+        args = ['-C', project_code,]
+        return_code = CommandLineUpdaterLauncher(args).run()
+        self.assertEqual(return_code, 2,
+                         'expecting return code 2 when profile not specified')
+
+        # project code not specified
+        args = ['-P', PROFILE]
+        self.assertEqual(CommandLineUpdaterLauncher(args).run(), 2,
+                         'expecting return code 2 when project not specified')
+
+        # profile value missing
+        args = ['-P', '-C', project_code]
+        return_code = CommandLineUpdaterLauncher(args).run()
+        self.assertEqual(return_code, 2,
+                         'expecting return code 2 for missing profile')
+
+        # project code missing
+        args = ['-P', PROFILE, '-C']
+        self.assertEqual(CommandLineUpdaterLauncher(args).run(), 2,
+                         'expecting return code 2 for missing project code')
+
+        # bad project code
+        args = ['-P', PROFILE, '-C', 'bogus']
+        self.assertEqual(CommandLineUpdaterLauncher(args).run(), 3,
+                         'expecting return code 3 for invalid project code')
+
+        # bad profile
+        args = ['-P', 'not_a_profile', '-C', project_code]
+        self.assertEqual(CommandLineUpdaterLauncher(args).run(), 1,
+                         'expecting return code 1 for invalid Capo profile')
+
+        # missing title as last argument
+        args = ['-P', PROFILE, '-C', project_code, '-T']
+        self.assertEqual(CommandLineUpdaterLauncher(args).run(), 2,
+                         'expecting return code 2 for missing title')
+
+        # missing title as first argument
+        args = [ '-T', '-P', PROFILE, '-C', project_code,]
+        self.assertEqual(CommandLineUpdaterLauncher(args).run(), 2,
+                         'expecting return code 2 for missing title')
+
+        # nonexistent investigator
+        args = ['-P', PROFILE, '-C', project_code, '-I', '-22']
+        self.assertEqual(CommandLineUpdaterLauncher(args).run(), 4,
+                         'expecting return code 4 for invalid investigator')
+
+
+    ### UTILITIES ###
+
+    def initialize_test_data(self):
+        session = create_session('SDM')
+        num_commits = num_found = 0
+        try:
+            with warnings.catch_warnings():
+                # Suppress SQLAlchemy warnings
+                warnings.simplefilter("ignore", category=sa_exc.SAWarning)
+
+                fake_projects = [ScodeTestProject().project,
+                                 ScienceTestProject().project,
+                                 AlmaTestProject().project]
+                try:
+                    for fake_project in fake_projects:
+                        project_code = fake_project.project_code
+                        existing = session.query(Project) \
+                            .filter(Project.project_code ==
+                                    project_code) \
+                            .first()
+                        if existing is not None:
+                            num_found += 1
+                            session.delete(existing)
+                            session.commit()
+                        session.add(fake_project)
+                        session.commit()
+                        num_commits += 1
+
+                    if num_commits < num_found:
+                        pytest.fail(f'{num_found} fake projects were found '
+                                    f'and deleted, but {num_commits} were '
+                                    f'added and committed')
+                except Exception as exc:
+                    pytest.fail(f'{exc}')
+        finally:
+            session.close()
+
+    def remove_test_data(self):
+        session = create_session('SDM')
+        try:
+            with warnings.catch_warnings():
+                # Suppress SQLAlchemy warnings
+                warnings.simplefilter("ignore", category=sa_exc.SAWarning)
+
+                fake_projects = [ScodeTestProject().project,
+                                 ScienceTestProject().project,
+                                 AlmaTestProject().project]
+                for fake_project in fake_projects:
+                    project_code = fake_project.project_code
+                    existing = session.query(Project) \
+                        .filter(Project.project_code ==
+                                project_code) \
+                        .first()
+                    if existing is not None:
+                        session.delete(existing)
+                        session.commit()
+        except Exception as exc:
+            pytest.fail(f'{exc}')
+        finally:
+            session.close()
+
+    def get_project_from_db(self, session: Session, project_code: str):
+        with warnings.catch_warnings():
+            # Suppress SQLAlchemy warnings
+            warnings.simplefilter("ignore", category=sa_exc.SAWarning)
+
+            return session.query(Project) \
+                .filter(Project.project_code == project_code) \
+                .first()
+
+class FailingUpdaterHelper:
+    # def __init__(self, **kwargs):
+    #     pass
+
+    @pytest.fixture()
+    def update_project(self):
+        return SystemExit(5)
+
+class FailingUpdater:
+    def __init__(self):
+        self.helper = FailingUpdaterHelper()
+
+    def update_project(self):
+        return SystemExit(5)
+
+
+class CommandLineUpdaterLauncher:
+
+    def __init__(self, args: list):
+        self.args = [_UPDATE_COMMAND]
+        for arg in args:
+            self.args.append(str(arg))
+        _LOG.info(f'{self.args}')
+
+    def run(self):
+        ''' launch updater from command line
+            @:returns directory listing
+        '''
+
+        args = self.args
+        try:
+            proc = subprocess.run(args,
+                                       stdout=subprocess.PIPE,
+                                       stderr=subprocess.STDOUT,
+                                       timeout=60,
+                                       check=False,
+                                       bufsize=1,
+                                       universal_newlines=True)
+            self.stdout = proc.stdout
+            return proc.returncode
+        except Exception as exc:
+            _LOG.error(f'{exc}')
+            if not isinstance(exc, subprocess.TimeoutExpired):
+                return exc.returncode
+            else:
+                raise
+
+def build_updater_return_values():
+    return {
+        1: 'error with capo configuration',
+        2: 'error with input parameters',
+        3: 'project not found',
+        4: 'investigator not found',
+        5: 'update failed',
+    }
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/deploy.sh b/deploy.sh
index 9d2d00437e18b6acdea32e4b7be2b33ff47a23ea..7540346c3e19daf7f214bf632c590a217e3f8f5f 100755
--- a/deploy.sh
+++ b/deploy.sh
@@ -32,7 +32,7 @@ if [ "$VIRTUAL_ENV" = "" ]; then
     # Make sure we have an appropriate venv available here: 
     if [ ! -d venv ]; then
 	echo "No virtual environment detected, creating a basic one."
-	python3.6 -m venv venv
+	python3.8 -m venv venv
 	source ./venv/bin/activate
 	pip install --upgrade pip
 	pip install -r deployment/requirements.txt
diff --git a/deployment/requirements.txt b/deployment/requirements.txt
index 8df4ef3e9fe10a7d7672d7e2d498eb0f6f227765..9abc3fe110fc4f4b5c2ac5a8b7a78d1e497d1575 100644
--- a/deployment/requirements.txt
+++ b/deployment/requirements.txt
@@ -1,4 +1,4 @@
-pycapo==0.2.0
+pycapo==0.3.0
 fabric==2.4.0
 pyYAML==3.13
 requests==2.18.4
diff --git a/environment.yml b/environment.yml
index 134998cb21caf6ac9702692f6aaa61286cebe041..b0cc5db541dafcd6920ef7aaa455f841b8cae943 100644
--- a/environment.yml
+++ b/environment.yml
@@ -1,7 +1,7 @@
 name: data
 channels:
-  - https://builder.aoc.nrao.edu/conda
   - defaults
+  - http://builder.aoc.nrao.edu/conda
 dependencies:
   - alembic=1.4
   - astropy=4.0
@@ -24,9 +24,8 @@ dependencies:
   - pysftp=0.2.9
   - pytest=5.4
   - python=3.8
-  - requests>=2.23,<3.0
+  - requests=2.23
   - simplejson=3.17
   - sqlalchemy=1.3
   - tqdm=4.46
-  - waitress=1.4
-  - zc.buildout=2.13.2
\ No newline at end of file
+  - waitress=1.4
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 7530ccea973076f4145e19d48abb271512796146..80cf286896647136d4ef50487e58c6d15f65bb0f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,2 +1,2 @@
 pika==1.1.0
-pycapo==0.2.1post1
+pycapo==0.3.0
diff --git a/shared/schema/src/schema/__init__.py b/shared/schema/src/schema/__init__.py
index 9ef86007564c99b61f4b61268cfa35c9c87cce9e..41f4f739166fb7e3d20918e7b2b29919c8ec6321 100644
--- a/shared/schema/src/schema/__init__.py
+++ b/shared/schema/src/schema/__init__.py
@@ -1,5 +1,7 @@
 # publish our behavior-enhanced table classes
 import sqlalchemy
+from sqlalchemy.sql import ClauseElement
+
 from .model import *
 from sqlalchemy.orm import sessionmaker
 from pycapo import CapoConfig
@@ -47,6 +49,26 @@ def create_session(instrument, **kwargs):
     session = session_mkr()
     return session
 
+def create_model_instance(session, model, defaults=None, commit=True, **kwargs):
+    params = dict((k, v) for k, v in kwargs.items() if not isinstance(v, ClauseElement))
+    params.update(defaults or {})
+    instance = model(**params)
+    session.add(instance)
+    if commit:
+        session.commit()
+        # Need to read it back to get the new PK, etc.
+        session.refresh(instance)
+    return instance
+
+def get_or_create_model_instance(session, model, defaults=None, commit=True,
+                           **kwargs):
+    instance = session.query(model).filter_by(**kwargs).first()
+    if instance:
+        return instance, False
+    else:
+        instance = create_model_instance(session, model, defaults=defaults, commit=commit, **kwargs)
+        return instance, True
+
 
 class ArchiveDBSession:
     """A class to create context manager around an archive connection."""
diff --git a/shared/schema/src/schema/model.py b/shared/schema/src/schema/model.py
index dfa18bb132714475be3e355d100ff5043c857633..bc46a8f583248131eacd30da989bf3f355909b06 100644
--- a/shared/schema/src/schema/model.py
+++ b/shared/schema/src/schema/model.py
@@ -693,7 +693,8 @@ class Project(Base):
     proprietary_duration = Column(Float(53))
 
     science_products = relationship('ScienceProduct', secondary='science_products_projects')
-    authors = relationship('Author')
+    authors = relationship('Author', cascade="all,delete, delete-orphan",
+                           backref='parent')
     execution_blocks = relationship('ExecutionBlock')
     file_groups = relationship('Filegroup')
     alma_ouses = relationship('AlmaOus', backref='projects')