Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • ssa/workspaces
1 result
Show changes
Commits on Source (3)
Showing
with 71 additions and 153 deletions
......@@ -49,6 +49,7 @@ build base image:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && $CI_COMMIT_MESSAGE =~ /\A(?i)-ops/'
changes:
- Dockerfile.base
- docker.properties
# Push Base Image Stage
push base image:
......@@ -61,6 +62,7 @@ push base image:
- if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_COMMIT_MESSAGE =~ /\A(?i)-ops/'
changes:
- Dockerfile.base
- docker.properties
# Run Schema
run schema:
......@@ -112,9 +114,6 @@ unit test dev workflow:
variables:
SERVICE_NAME: "workflow"
extends: .unit-test
artifacts:
paths:
- .coverage.${SERVICE_NAME}.${CI_COMMIT_SHORT_SHA}
needs:
- build dev workflow
......@@ -123,9 +122,6 @@ unit test dev capability:
variables:
SERVICE_NAME: "capability"
extends: .unit-test
artifacts:
paths:
- .coverage.${SERVICE_NAME}.${CI_COMMIT_SHORT_SHA}
needs:
- build dev capability
......@@ -134,9 +130,6 @@ unit test dev notification:
variables:
SERVICE_NAME: "notification"
extends: .unit-test
artifacts:
paths:
- .coverage.${SERVICE_NAME}.${CI_COMMIT_SHORT_SHA}
needs:
- build dev notification
......@@ -164,9 +157,7 @@ unit test coverage:
- unit test dev notification
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: always
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
# Push Stages
push dev workflow:
......@@ -246,7 +237,6 @@ pages:
expire_in: 2 weeks
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: always
# Development
deploy dev:
......
......@@ -31,5 +31,5 @@ setup(
keywords=[],
packages=["datafetcher"],
classifiers=["Programming Language :: Python :: 3.8"],
entry_points={"console_scripts": ["datafetcher = datafetcher.commands:main"]},
entry_points={"console_scripts": ["datafetcher = datafetcher.datafetcher:main"]},
)
# datafetcher Dockerfile
#
# TO BUILD the docker image: -don't- "docker build" directly!
# use docker_build.sh:
# from apps/cli/executables/datafetcher,
#
# ./docker_build.sh datafetcher_test[:N]
#
# where '-t' specifies a name and N' is the version.
# (If ':N' is omitted, version is 'latest' by default.)
# tag is not required for the build, but without it
# the container name is an unhelpful hexadecimal value.
FROM continuumio/miniconda3:latest
COPY environment.yml .
ENV PATH $HOME/miniconda3/bin/conda:$PATH
# docker_build.sh should have copied environment.yml from data/;
# it will be used in the command below
RUN conda env update
# get what we'll need for the build
COPY . .
# get application files and tests
COPY src/ .
COPY test/ .
# install the application
RUN ["conda", "run", "-n", "data", "python", "setup.py", "develop"]
# we'll need a Capo profile
ENV CAPO_PROFILE local
ENV CAPO_PATH test/
# finally, run the tests. be verbose. log stuff.
# (for more detailed output, use "-vv" and/or "--log-level=DEBUG";
# to quit after first failure, use "-x")
ENTRYPOINT ["conda", "run", "-n", "data", "pytest", "-vv", "--log-level=DEBUG", "--showlocals", "test/"]
......@@ -24,7 +24,7 @@ def get_project_root() -> Path:
"""
my_path = Path(__file__)
path = my_path
while not path.name.endswith("workspaces") and not path.name.endswith("code"):
while not path.name.endswith("workspaces") and not path.name.endswith("packages"):
path = path.parent
return path
......@@ -53,7 +53,7 @@ from datafetcher.utilities import (
RetrievalMode,
)
TEST_PROFILE = "local"
TEST_PROFILE = "docker"
MISSING_SETTING = ReturnCode.MISSING_SETTING.value["code"]
MISSING_PROFILE = ReturnCode.MISSING_PROFILE.value["code"]
RUN_ALL = True
......
#!/bin/bash
# Building a Docker image in which to execute tests
# will require a copy of the local Capo properties
# file, which can be found at /home/casa/capo
# on boxes that can see /home, but which on boxes
# that can't is likely to be at ~/home/.capo for
# any given user. Find local.properties and
# copy it to our test directory. Dockerfiles
# do not support conditional logic; hence this script.
# Execute script from apps/executables/cli/datafetcher/
FILENAME=local.properties
CONTAINER_NAME=$1;shift
CACHE_FLAG=$1;shift
USAGE='Usage: $0 <container_name> [--NO-CACHE]'
if [[ -z "${CONTAINER_NAME}" ]]
then
echo "${USAGE}"
exit 1
fi
if [ -z "${CACHE_FLAG}" ]
then
shopt -s nocasematch
if [[ "${CACHE_FLAG}" =~ ^NO[-_]CACHE$ ]]
then
echo 'invalid cache flag: '"${CACHE_FLAG}"
exit 1
else
USE_CACHE=1
fi
else
USE_CACHE=0
fi
# conda will need the environment.yml
export ENV_YML=environment.yml
export YML_DIR=../../../../
cp $YML_DIR${ENV_YML} ${ENV_YML}
# The preferred version of Capo .properties files is always
# the one at /home/casa/capo, -if- this is visible
# (i.e., NRAO internal system). If not (i.e., developer laptop),
# get the one in the user's .capo directory
if [ -e /home/casa/capo/${FILENAME} ]
then
SOURCE=/home/casa/capo/${FILENAME}
elif [ -e ~/.capo/${FILENAME} ]
then
SOURCE=~/.capo/${FILENAME}
else
echo '${FILENAME} not found!'
exit 1
fi
NEW_FILE=./test/${FILENAME}
cp ${SOURCE} ${NEW_FILE}
# remove extended attributes, which would cause Capo to balk
/usr/bin/xattr -c ${NEW_FILE}
## where the magic happens
if [ "${USE_CACHE}" == 1 ]
then
echo '>>>> Using cache, if possible'
docker build . -f test/Dockerfile -t ${CONTAINER_NAME}
else
echo '>>>> no cache'
docker build . -f test/Dockerfile --no-cache -t ${CONTAINER_NAME}
fi
# now get rid of the properties file; containing sensitive info, it must NOT be saved or committed
rm -f ${NEW_FILE}
# get rid of the .yml, too
rm -f ${ENV_YML}
# to run the image: docker run ${CONTAINER_NAME}[:latest]
......@@ -66,12 +66,22 @@ def test_omitted_profile_returns_expected_code(make_tempdir, settings):
:return:
"""
# store existing CAPO_PROFILE
existing_capo_profile = os.environ["CAPO_PROFILE"]
# remove existing CAPO_PROFILE
os.environ["CAPO_PROFILE"] = ""
# omit --profile arg
args = ['--product-locator', settings.test_data['product_locator'],
'--output-dir', str(make_tempdir)]
return_code = launch_datafetcher(args, settings.capo_settings)
assert return_code == MISSING_PROFILE
# restore the existing CAPO_PROFILE
os.environ["CAPO_PROFILE"] = existing_capo_profile
@pytest.mark.skipif(not RUN_ALL, reason='debug')
def test_omitted_capo_value_returns_expected_code(make_tempdir, settings):
......
executable = test.sh
arguments = hello
queue
\ No newline at end of file
#!/bin/sh
echo $*
......@@ -56,6 +56,16 @@ def test_job_stdout():
os.remove("null.error")
def test_shell_script_in_working_directory(caplog):
test_job = Job("test/localscript.condor")
test_job.execute()
with open("test.sh.out", "r") as f:
contents = f.read()
assert "hello" in contents
os.remove("test.sh.out")
os.remove("test.sh.error")
def test_job_stderr():
"""
Test that vulture writes correct output from stderr to a log file
......
......@@ -133,6 +133,7 @@ class Job:
:param write_log: Boolean that determines whether a log file will be written
"""
self.fields, self.command = self.parse(file)
self.job_directory = Path(file).resolve().parent
self.write_log = write_log
if self.write_log:
if self.fields.get("executable"):
......@@ -216,8 +217,19 @@ class Job:
# Execute
if self.fields.get("executable"):
# So there is a thing that can happen here, where the condor file refers to a script in the same directory
# HTCondor will happily execute such a thing, but using subprocess.run like this will not. So we have to
# detect if there is a file in the current directory with the same name as the executable, and if there is,
# we must prefix with "./" to get the right behavior.
if (self.job_directory / self.fields["executable"]).exists():
executable = str(
(self.job_directory / self.fields["executable"]).resolve()
)
else:
executable = self.fields["executable"]
process = subprocess.run(
[self.fields["executable"], self.fields["arguments"]],
[executable, self.fields["arguments"]],
capture_output=capture_output,
text=True,
)
......
......@@ -12,6 +12,4 @@
dotenv: build.env
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: always
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
\ No newline at end of file
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
\ No newline at end of file
......@@ -11,6 +11,4 @@
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && $CI_COMMIT_MESSAGE =~ /\A(?i)-debug/'
when: never
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: always
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
\ No newline at end of file
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
\ No newline at end of file
No preview for this file type
......@@ -6,5 +6,4 @@
- docker push ${NAME}:${CI_COMMIT_SHORT_SHA}
- docker push ${NAME}:${CI_DEFAULT_BRANCH}
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: always
\ No newline at end of file
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
\ No newline at end of file
.unit-test:
image: ${REGISTRY_URL}/${PROJECT_NAME}/${SERVICE_NAME}:${CI_COMMIT_SHORT_SHA}
script:
- IMAGE_NAME="${REGISTRY_URL}/${PROJECT_NAME}/${SERVICE_NAME}"
- TEST_CONTAINER="${SERVICE_NAME}-${CI_COMMIT_SHORT_SHA}"
- docker run --name "${TEST_CONTAINER}" ${IMAGE_NAME}:${CI_COMMIT_SHORT_SHA} ./bin/run-tests.sh -b
- docker cp "${TEST_CONTAINER}":/code/.coverage ./.coverage.${SERVICE_NAME}.${CI_COMMIT_SHORT_SHA}
- docker container stop "${TEST_CONTAINER}" && docker container rm "${TEST_CONTAINER}"
- cd /code && ./bin/run-tests.sh -b
- mv .coverage ${CI_PROJECT_DIR}/.coverage.${SERVICE_NAME}.${CI_COMMIT_SHORT_SHA}
artifacts:
paths:
- .coverage.${SERVICE_NAME}.${CI_COMMIT_SHORT_SHA}
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: always
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
when: always
\ No newline at end of file
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
\ No newline at end of file
......@@ -14,3 +14,15 @@ edu.nrao.archive.configuration.AmqpServer.port = 5672
#
edu.nrao.archive.workspaces.CapabilitySettings.serviceUrl = http://capability:3457
edu.nrao.archive.workspaces.WorkflowSettings.serviceUrl = http://workflow:3456
#
# Data fetcher settings
#
edu.nrao.archive.workflow.config.DataFetcherSettings.ramInGb = 16
edu.nrao.archive.workflow.config.DataFetcherSettings.clusterTimeout = 01:00:00:00
edu.nrao.archive.datafetcher.DataFetcherSettings.locatorServiceUrlPrefix = https://webtest.aoc.nrao.edu/archiveServices/location?locator
edu.nrao.archive.datafetcher.DataFetcherSettings.defaultThreadsPerHost = 4
edu.nrao.archive.datafetcher.DataFetcherSettings.executionSite = local_test
edu.nrao.archive.datafetcher.DataFetcherSettings.downloadDirectory=/tmp/
edu.nrao.archive.workflow.config.RequestHandlerSettings.downloadDirectory=/tmp/
\ No newline at end of file
......@@ -5,16 +5,19 @@
-e ../packages/shared/channels
-e ../packages/shared/workspaces
-e ../packages/apps/cli/utilities/wf_monitor
-e ../packages/apps/cli/executables/datafetcher
-e ../packages/apps/cli/executables/null
-e ../packages/apps/cli/executables/vulture
-e ../packages/testing
pycapo == 0.3.0
psycopg2 >= 2.8.5,<3.0
pycapo == 0.3.1
pyramid == 1.10
pyramid_beaker == 0.8
pyramid_debugtoolbar == 4.5
pyramid_retry == 2.1.1
pyramid_tm == 2.2.1
pytest == 5.4.3
requests == 2.23
sqlalchemy == 1.3.23
waitress == 1.4
......