Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • ssa/workspaces
1 result
Show changes
Showing
with 136 additions and 159 deletions
......@@ -180,7 +180,7 @@ e2e:
variables:
ENV_HOST: workflow
before_script:
- docker build -t base-build:ci --target base-build -f apps/web/Dockerfile.dev .
- docker build -t base-build:ci --target base-build -f apps/web/Dockerfile --build-arg env=dev .
- docker build -t e2e:${CI_COMMIT_SHORT_SHA} -f apps/web/Dockerfile.ci .
script:
# setting env variables with .env https://docs.docker.com/compose/environment-variables/
......@@ -287,8 +287,8 @@ deploy:
script:
# Docker doesn't allow variable interpolation when declaring Docker Secret names
# This sed command finds and replaces "dsoc_ENV_secrets:" with "dsoc_${DEPLOY_ENV}_secrets:"
- sed -i "s/dsoc_ENV_secrets:/dsoc_${DEPLOY_ENV}_secrets:/g" docker-compose.dev.yml
- ENV=$DEPLOY_ENV TAG=$IMAGE_TAG DL_HOST=$DL_HOST ENV_HOST=$ENV_HOST docker stack deploy --compose-file docker-compose.dev.yml workspaces-${DEPLOY_ENV}
- sed -i "s/dsoc_ENV_secrets:/dsoc_${DEPLOY_ENV}_secrets:/g" docker-compose.yml
- ENV=$DEPLOY_ENV TAG=$IMAGE_TAG DL_HOST=$DL_HOST ENV_HOST=$ENV_HOST docker stack deploy --compose-file docker-compose.yml workspaces-${DEPLOY_ENV}
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
variables:
......@@ -302,11 +302,15 @@ deploy:
DL_HOST: https://dl-dsoc-test.nrao.edu
# override ENV_HOST
ENV_HOST: ws-test.nrao.edu
# - if: '$CI_COMMIT_TAG =~ /^test_[0-9]+\.[0-9]+.[0-9]+$/'
# variables:
# IMAGE_TAG: $CI_COMMIT_TAG
# # override DEPLOY_ENV
# DEPLOY_ENV: "test"
- if: '$CI_COMMIT_TAG =~ /[0-9]+\.[0-9]+\.[0-9]+$/'
variables:
IMAGE_TAG: $CI_COMMIT_TAG
# override DEPLOY_ENV
DEPLOY_ENV: "prod"
# override DL_HOST
DL_HOST: https://dl-dsoc.nrao.edu
# override ENV_HOST
ENV_HOST: ws.nrao.edu
# e2e:
# stage: e2e-test
......
......@@ -7,7 +7,7 @@ all: dev check-build
# Check if local code will pass CI build
check-build: docker-base docker-dev-images-locally test-dev
# Run tests on Dockerfile.dev images
# Run tests on Dockerfile images
test-dev:
docker run nrao:workflow ./bin/run-tests.sh
docker run nrao:capability ./bin/run-tests.sh
......@@ -26,13 +26,13 @@ test_cp:
dev: cache docker-base
@echo "starting docker compose up in the background"
@echo "wait a few seconds and your environment should be done setting up"
docker compose up -d
docker compose -f docker-compose.local.yml up -d
alembic-update:
cd schema; \
env CAPO_PROFILE=local alembic upgrade head
# Build images from Dockerfile.dev
# Build images from Dockerfile
docker-dev-images-locally:
docker build -t nrao:workflow -f services/workflow/Dockerfile.local . --build-arg capo_env=docker
docker build -t nrao:capability -f services/capability/Dockerfile.local . --build-arg capo_env=docker
......@@ -53,7 +53,7 @@ db:
# Build docker images
build: docker-base db cache
docker compose build --no-cache
docker compose -f docker-compose.local.yml build --no-cache
# Generate HTML coverage report
coverage:
......@@ -69,7 +69,7 @@ coverage:
# Clean up environment
clean:
# Clean up Docker environment
docker-compose down || true
docker-compose -f docker-compose.local.yml down || true
# remove nrao: labeled containers
@docker container list --all | grep nrao: | awk '{ print $$1 }' | xargs docker container rm -f
# remove workspaces and nrao labeled images
......@@ -86,4 +86,3 @@ reallyclean: clean
.PHONY: docs
docs:
cd docs && make html
......@@ -21,7 +21,7 @@ logger.addHandler(logging.StreamHandler(sys.stdout))
def _get_settings(cwd: pathlib.Path, args: list):
use_casa = CapoConfig().getboolean("edu.nrao.archive.workspaces.ProcessingSettings.useCasa")
use_casa = CapoConfig().getboolean("edu.nrao.workspaces.ProcessingSettings.useCasa")
casa_home = (
CapoConfig().settings("edu.nrao.archive.workflow.config.CasaVersions").homeForReprocessing
)
......
......@@ -45,22 +45,24 @@ def arg_parser() -> argparse.ArgumentParser:
required=False,
help="deliver standard image results to analyst image QA area",
)
parser.add_argument(
"--retrieve-img",
nargs=1,
action="store",
required=False,
help="retrieve standard image results to original parent directory",
)
return parser
def _get_settings(filename: str) -> Dict[str, str]:
delivery_area = (
CapoConfig().settings("edu.nrao.archive.workspaces.DeliverySettings").ciplDelivery
)
weblog_cache_area = (
CapoConfig().settings("edu.nrao.archive.workspaces.DeliverySettings").cacheWeblogDirectory
)
image_qa_area = (
CapoConfig().settings("edu.nrao.archive.workspaces.DeliverySettings").standardImageDelivery
)
workspaces_lustre_root_dir = (
CapoConfig().settings("edu.nrao.archive.workspaces.ProcessingSettings").rootDirectory
)
delivery_properties = CapoConfig().settings("edu.nrao.workspaces.DeliverySettings")
processing_properties = CapoConfig().settings("edu.nrao.workspaces.ProcessingSettings")
delivery_area = delivery_properties.standardCalibrationDelivery
weblog_cache_area = delivery_properties.cacheWeblogDirectory
image_qa_area = delivery_properties.standardImageDelivery
workspaces_lustre_root_dir = processing_properties.rootDirectory
current_root_directory = str(pathlib.Path.cwd().parent)
current_subdirectory = str(pathlib.Path.cwd().stem)
......@@ -96,6 +98,10 @@ def main():
action = "Image Delivery"
settings = _get_settings(args.deliver_img[0])
conveyor = DeliveryConveyor(settings, action)
elif args.retrieve_img is not None:
action = "Image Retrieval"
settings = _get_settings(args.retrieve_img[0])
conveyor = RetrievalConveyor(settings, action)
conveyor.convey()
logger.info(f"Standard {action} is complete!")
......@@ -27,22 +27,28 @@ class RetrievalConveyor(ConveyorIF):
contents = glob.glob("*/", recursive=True)
self.break_symlinks(spool_path, contents)
self.move_subdirectories_to_spool(qa_path, spool_path, contents)
check = self.check_spool_contents(spool_path, contents)
if check:
Path(qa_path).rmdir()
if qa_path.exists():
self.move_subdirectories_to_spool(qa_path, spool_path, contents)
check = self.check_spool_contents(spool_path, contents)
if check:
Path(qa_path).rmdir()
else:
self.logger.info(
f"Directories from {qa_path} have already been retrieved! Continuing..."
)
def break_symlinks(self, spool_path: Path, dir_list: List[str]):
self.logger.info(
f"Breaking symlinks between qa2 and spool for directory {spool_path.stem}..."
)
for directory in dir_list:
directory = directory.strip("/")
if Path(spool_path / directory).is_symlink():
Path(spool_path / directory).unlink()
self.logger.info(f"Symlink for {directory} removed.")
self.logger.info(f'Symlink for "{directory}" removed.')
else:
self.logger.info(f"Directory {directory} is not a symlink! Skipping.")
self.logger.info(f'Directory "{directory}" is not a symlink! Skipping.')
def move_subdirectories_to_spool(self, qa_path: Path, spool_path: Path, dir_list: List[str]):
self.logger.info(f"Moving directories from qa2 to spool for directory {qa_path.stem}...")
......@@ -54,7 +60,11 @@ class RetrievalConveyor(ConveyorIF):
self.logger.info("Done.")
def determine_qa_directory(self) -> Path:
qa_area = self.settings["qa_delivery_area"]
if "Calibration" in self.action:
qa_area = self.settings["qa_delivery_area"]
else:
qa_area = self.settings["image_qa_area"]
wf_dir = self.settings["destination_subdir"]
return Path(qa_area + "/" + wf_dir)
......@@ -79,5 +89,5 @@ class RetrievalConveyor(ConveyorIF):
return False
def convey(self):
self.logger.info("RUNNING POST QA STANDARD CALIBRATION DIRECTORY RETRIEVAL!")
self.logger.info(f"RUNNING POST QA STANDARD {self.action.upper()}!")
self.retrieval()
......@@ -45,9 +45,6 @@ class TestRetrievalConveyor:
assert mock_chdir.call_count == 1
assert mock_glob.call_count == 1
assert symlinks.call_count == 1
assert move.call_count == 1
assert contents.call_count == 1
assert mock_rm.call_count == 1
@patch("pathlib.Path.is_symlink", return_value=True)
@patch("pathlib.Path.unlink")
......
......@@ -16,7 +16,7 @@ logger.addHandler(logging.StreamHandler(sys.stdout))
def _get_settings(filename: str) -> dict:
ingestion_settings = CapoConfig().settings("edu.nrao.archive.workspaces.IngestionSettings")
ingestion_settings = CapoConfig().settings("edu.nrao.workspaces.IngestionSettings")
staging_root_dir = ingestion_settings.stagingDirectory
storage_root_dir = ingestion_settings.storageDirectory
......
from __future__ import annotations
from pathlib import Path
"""
Adapted from shared/system to avoid including ssa-workspaces in pex
"""
class JSONSerializable:
def __json__(self, request=None) -> dict:
"""
Allows this object to be converted to JSON
:param request: this parameter is the active Pyramid request, if applicable (None otherwise)
:return: a dictionary which can be converted to JSON using json.dump
"""
pass
@classmethod
def from_json(cls, json: dict) -> any:
pass
class AbstractTextFile(JSONSerializable):
"""
Abstract text file is exactly that, an abstract concept of what a file is, to be
returned from various non-filesystem places.
"""
def __init__(self, filename: str, content: str):
self.filename, self.content = filename, content
def write_to(self, directory: Path):
(directory / self.filename).write_text(self.content)
@classmethod
def from_path(cls, path: Path) -> AbstractTextFile:
return cls(path.name, path.read_text())
......@@ -29,11 +29,6 @@ class TestIngestCalibrationLauncher:
assert mock_run.call_count == 2
assert mock_manifest.call_count == 1
@patch("subprocess.run")
def test_run_ingest(self, mock_run):
IngestCalibrationLauncher(parameters).run_ingest()
assert mock_run.call_count == 1
@pytest.mark.skip("Skip until manifest builder is complete")
@patch("ingest_envoy.ingestion_manifest.IngestionManifestBuilder.build")
@patch("subprocess.run")
......
......@@ -11,8 +11,6 @@ from pathlib import Path
# pylint: disable=E0401, E0402, R1721, W0621
import pytest
from ingest_envoy.ingestion_manifest import (
IngestionManifestBuilder,
find_manifest,
......@@ -52,8 +50,7 @@ logger.addHandler(logging.StreamHandler(sys.stdout))
FAKE_LOCATOR = "uid://evla/calibration/doo-wah-ditty-ditty-af123"
# TODO:
@pytest.mark.skip("FIXME")
def test_filters_cal_input_files(ingest_path: Path):
"""
We'll be getting calibration/image/eb, etc. science products from a directory under
......@@ -101,8 +98,6 @@ def test_filters_cal_input_files(ingest_path: Path):
shutil.rmtree(ingest_path)
# TODO:
@pytest.mark.skip("FIXME")
def test_writes_expected_output_files(ingest_path: Path):
"""
Did the manifest builder produce the manifest file, the weblog, and the science product tar?
......@@ -132,8 +127,6 @@ def test_writes_expected_output_files(ingest_path: Path):
shutil.rmtree(ingest_path)
# TODO:
@pytest.mark.skip("FIXME")
def test_params_json_well_formed():
"""
Make sure our ManifestParameters makes nice JSON
......@@ -153,17 +146,6 @@ def test_params_json_well_formed():
json.dumps(params_json)
@pytest.mark.skip("TODO")
def test_params_properly_formatted():
"""
TODO
:return:
"""
raise NotImplementedError
# TODO:
@pytest.mark.skip("FIXME")
def test_input_sp_well_formed():
"""
Make sure our InputScienceProduct makes nice JSON
......@@ -180,8 +162,6 @@ def test_input_sp_well_formed():
assert sp_in.to_json() == sp_dict
# TODO:
@pytest.mark.skip("FIXME or get rid of me")
def test_input_group_well_formed():
"""
Make sure our InputGroup makes nice JSON
......@@ -189,39 +169,17 @@ def test_input_group_well_formed():
:return:
"""
sp1 = InputScienceProduct(locator="uid://evla/execblock/coffee_heath_bar_crunch_7a23f")
sp1_json = sp1.to_json()
sp2 = InputScienceProduct(locator="uid://evla/execblock/mint_oreo_omg_omg")
sp2_json = sp2.to_json()
expected = {
IngestionManifestKey.INPUT_GROUP.value: {
IngestionManifestKey.SCIENCE_PRODUCTS.value: [sp1_json, sp2_json]
}
}
ingroup = InputGroup(science_products=[sp1, sp2])
actual = ingroup.to_json()
actual = actual[IngestionManifestKey.INPUT_GROUP.value]
expected = expected[IngestionManifestKey.INPUT_GROUP.value]
assert actual.keys() == expected.keys()
actual = actual[IngestionManifestKey.SCIENCE_PRODUCTS.value]
expected = expected[IngestionManifestKey.SCIENCE_PRODUCTS.value]
assert len(actual) == len(expected) == 2
marvin = actual[0]
trillian = expected[0]
for key, _ in marvin.items():
assert trillian[key] == marvin[key]
input_group = InputGroup(science_products=[sp1, sp2])
assert IngestionManifestKey.SCIENCE_PRODUCTS.value in input_group.to_json().keys()
marvin = actual[1]
trillian = expected[1]
for key, _ in marvin.items():
assert trillian[key] == marvin[key]
sps_found = input_group.science_products
assert len(sps_found) == 2
assert sp1 in sps_found
assert sp2 in sps_found
# TODO:
@pytest.mark.skip("FIXME")
def test_ancillary_product_well_formed():
"""
The JSON shouldn't contain empty fields
......@@ -235,70 +193,30 @@ def test_ancillary_product_well_formed():
assert actual == expected
# TODO:
@pytest.mark.skip("FIXME or get rid of me")
def test_output_group_well_formed():
"""
Make sure our OutputScienceProduct makes nice JSON
:return:
"""
sp1 = OutputScienceProduct(type=ScienceProductType.IMAGE, filename="see_no_evil.fits")
sp2 = OutputScienceProduct(type=ScienceProductType.IMAGE, filename="hear_no_evil.fits")
osp = OutputScienceProduct(
type=ScienceProductType.EVLA_CAL, filename="im_a_lil_calibration.tar"
)
ap1 = AncillaryProduct(
type=AncillaryProductType.PIPELINE_ARTIFACTS, filename="without_feathers.tar"
)
ap2 = AncillaryProduct(
type=AncillaryProductType.PIPELINE_ARTIFACTS, filename="with_feathers.tar"
)
opg = OutputGroup(science_products=[sp1, sp2], ancillary_products=[ap1, ap2])
opg_json = opg.to_json()
dumped = json.dumps(opg_json)
assert (
dumped == '{"output_group": '
'{"science_products": '
'[{"type": "image", "filename": "see_no_evil.fits"}, '
'{"type": "image", "filename": "hear_no_evil.fits"}], '
'"ancillary_products": [{"type": "pipeline_artifacts", '
'"filename": "without_feathers.tar"}, '
'{"type": "pipeline_artifacts", "filename": "with_feathers.tar"}]}}'
)
@pytest.mark.skip("TODO, or maybe not")
def test_input_group_properly_formatted():
"""
Does the InputGroup get written to the file in the expected format?
:return:
"""
sp1 = InputScienceProduct(locator="uid://evla/execblock/coffee_heath_bar_crunch_7a23f")
ingroup = InputGroup(science_products=[sp1])
ig_dict = json.loads(ingroup.to_json())
ig_text = json.dumps(ig_dict, indent=4)
expected = """
"input_group": {
"science_products": [
{
"locator": "uid://evla/execblock/coffee_heath_bar_crunch_7a23f"
}
]
},
"""
assert ig_text == expected
@pytest.mark.skip("TODO")
def test_output_group_properly_formatted():
"""
TODO
Does the OutputGroup get written to the file in the expected format?
expected_json = {
IngestionManifestKey.SCIENCE_PRODUCTS.value: [osp.to_json()],
IngestionManifestKey.ANCILLARY_PRODUCTS.value: [ap1.to_json(), ap2.to_json()],
}
opg = OutputGroup(science_products=[osp], ancillary_products=[ap1, ap2])
actual_json = opg.to_json()
:return:
"""
raise NotImplementedError
assert actual_json == expected_json
def test_evla_cal_manifest_matches_example(ingest_path: Path):
......
......@@ -223,12 +223,14 @@ class ServiceLocator(Locator):
self.spl = science_product_locator
def locate(self) -> LocationReport:
settings = CapoConfig().settings("edu.nrao.archive.datafetcher.DataFetcherSettings")
settings = CapoConfig().settings("edu.nrao.workspaces.ProductFetcherSettings")
# this is needed to prevent SSL errors when tests are run
# inside a Docker container
requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS += ":HIGH:!DH:!aNULL"
requests.Session().mount(settings.locatorServiceUrlPrefix, adapter=requests.adapters.HTTPAdapter())
requests.Session().mount(
settings.locatorServiceUrlPrefix, adapter=requests.adapters.HTTPAdapter()
)
try:
response = requests.get(settings.locatorServiceUrlPrefix, params={"locator": self.spl})
......
......@@ -32,9 +32,9 @@ SDM_ID=$1;shift
CAL_PROCESSING_DATETIME=$1;shift
# Get the spool, staging and storage paths from CAPO
SPOOL_DIR=$(./pycapo -P "${CAPO_PROFILE}" -q edu.nrao.archive.workspaces.ProcessingSettings.rootDirectory)
STAGING_DIR=$(./pycapo -P "${CAPO_PROFILE}" -q edu.nrao.archive.workspaces.IngestionSettings.stagingDirectory)
STORAGE_DIR=$(./pycapo -P "${CAPO_PROFILE}" -q edu.nrao.archive.workspaces.IngestionSettings.storageDirectory)
SPOOL_DIR=$(./pycapo -P "${CAPO_PROFILE}" -q edu.nrao.workspaces.ProcessingSettings.rootDirectory)
STAGING_DIR=$(./pycapo -P "${CAPO_PROFILE}" -q edu.nrao.workspaces.IngestionSettings.stagingDirectory)
STORAGE_DIR=$(./pycapo -P "${CAPO_PROFILE}" -q edu.nrao.workspaces.IngestionSettings.storageDirectory)
# File name for output tar of calibration products
#
......
......@@ -24,7 +24,7 @@ case $option in
;;
esac
WORKFLOW_SERVICE=$(capo -q edu.nrao.archive.workspaces.WorkflowSettings.serviceUrl)
WORKFLOW_SERVICE=$(capo -q edu.nrao.workspaces.WorkflowSettings.serviceUrl)
if [ "$action" = "ingest_cal" ]; then
curl -X POST $WORKFLOW_SERVICE/workflows/std_calibration/requests/$2/ingest
......
FROM ssa-containers.aoc.nrao.edu/ops/base:nodejs-14 as base-build
# Requies build-arg $env
# Build arg that sets environment; sets to "dev" if no build arg is given
ARG env=dev
ENV ENV=${env}
# Switch to vlapipe
USER vlapipe
......@@ -37,7 +42,7 @@ USER vlapipe
COPY --chown=vlapipe:vlapipe ./apps/web ./
# Build the angular app
RUN ./node_modules/.bin/ng build --configuration=dev --output-path=dist
RUN ./node_modules/.bin/ng build --configuration=${ENV} --output-path=dist
#
## NGINX section of multi-stage image
......
# base-build:ci references the first stage in app/web/Dockerfile.dev
# base-build:ci references the first stage in app/web/Dockerfile
# in which npm install has already been ran and app/web/ has been
# copied to /app in the image
# For e2e testing, we want to use this first stage as the base for our
# e2e container when we run e2e tests in the CI. We'll copy /app/ from
# e2e container when we run e2e tests in the CI. We'll copy /app/ from
# base-build:ci and copy it to /app of the trion/ng-cli-karma:11.2.7 image.
# Then we are able to run `ng e2e` with our Angular environment already setup.
# https://hub.docker.com/r/trion/ng-cli-karma/
......@@ -17,4 +17,3 @@ WORKDIR /app
COPY --from=web-base /home/vlapipe/app/ ./
CMD ./node_modules/protractor/bin/webdriver-manager update --versions.chrome 89.0.4389.114 && ng e2e --configuration=ci --webdriver-update=false
......@@ -36,7 +36,7 @@
"scripts": []
},
"configurations": {
"production": {
"prod": {
"fileReplacements": [
{
"replace": "src/environments/environment.ts",
......@@ -183,6 +183,9 @@
},
"test": {
"baseUrl": "https://ws-test.nrao.edu"
},
"prod": {
"baseUrl": "https://ws.nrao.edu"
}
}
},
......
export const environment = {
production: true,
// archive search services
apiUrl: 'https://webtest.aoc.nrao.edu/archive-services/',
apiUrl: 'https://webtest.aoc.nrao.edu/archive-service/',
// workspace services
workspacesUrl: 'https://ws-dev.nrao.edu/'
};
export const environment = {
production: true,
// archive search services
apiUrl: 'https://webtest.aoc.nrao.edu/archive-services/',
apiUrl: "https://data.nrao.edu/archive-service/",
// workspace services
workspacesUrl: 'http://localhost:3457/'
workspacesUrl: "http://ws.nrao.edu/",
};
export const environment = {
production: true,
// archive search services
apiUrl: 'https://webtest.aoc.nrao.edu/archive-services/',
apiUrl: 'https://webtest.aoc.nrao.edu/archive-service/',
// workspace services
workspacesUrl: 'http://shipman.aoc.nrao.edu:3457/'
};
......@@ -5,7 +5,7 @@
export const environment = {
production: false,
// archive search services
apiUrl: 'https://webtest.aoc.nrao.edu/archive-services/',
apiUrl: 'https://webtest.aoc.nrao.edu/archive-service/',
// workspace services
workspacesUrl: 'http://localhost:3457/'
};
......