Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • ssa/workspaces
1 result
Show changes
Showing
with 489 additions and 382 deletions
# CI Build Template # CI Build Template
.build: .build:
variables:
CAPO_BUILD_ARG: "docker"
script: script:
- echo "Building branch or tag -- ${IMAGE_TAG}" - echo "Building branch or tag -- ${IMAGE_TAG}"
- NAME="${REGISTRY_URL}/${PROJECT_NAME}/${SERVICE_NAME}" - NAME="${REGISTRY_URL}/${PROJECT_NAME}/${SERVICE_NAME}"
- docker build -t ${NAME}:${CI_COMMIT_SHORT_SHA} -f ${PATH_PREFIX}${SERVICE_NAME}/Dockerfile.dev . --build-arg capo_env=${CAPO_BUILD_ARG} --build-arg TAGNAME=${CI_COMMIT_SHORT_SHA} - docker build -t ${NAME}:${CI_COMMIT_SHORT_SHA} -f ${PATH_PREFIX}${SERVICE_NAME}/Dockerfile . --build-arg env=${DEPLOY_ENV} --build-arg TAGNAME=${CI_COMMIT_SHORT_SHA}
- docker tag ${NAME}:${CI_COMMIT_SHORT_SHA} ${NAME}:${IMAGE_TAG} - docker tag ${NAME}:${CI_COMMIT_SHORT_SHA} ${NAME}:${IMAGE_TAG}
- echo "TAG=${IMAGE_TAG}" >> build.env - echo "TAG=${IMAGE_TAG}" >> build.env
artifacts: artifacts:
...@@ -15,16 +13,19 @@ ...@@ -15,16 +13,19 @@
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
variables: variables:
IMAGE_TAG: $CI_COMMIT_BRANCH IMAGE_TAG: $CI_COMMIT_BRANCH
DEPLOY_ENV: "dev"
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"' - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
variables: variables:
IMAGE_TAG: $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME IMAGE_TAG: $CI_MERGE_REQUEST_SOURCE_BRANCH_NAME
DEPLOY_ENV: "dev"
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
- if: '$CI_COMMIT_TAG =~ /^end-of-sprint-[0-9]+/' - if: '$CI_COMMIT_TAG =~ /^end-of-sprint-[0-9]+/'
variables: variables:
IMAGE_TAG: $CI_COMMIT_TAG IMAGE_TAG: $CI_COMMIT_TAG
# override DEPLOY_ENV # override DEPLOY_ENV
DEPLOY_ENV: "test" DEPLOY_ENV: "test"
# - if: '$CI_COMMIT_TAG =~ /^test_[0-9]+\.[0-9]+.[0-9]+$/' - if: '$CI_COMMIT_TAG =~ /[0-9]+\.[0-9]+\.[0-9]+$/'
# variables: variables:
# IMAGE_TAG: $CI_COMMIT_TAG IMAGE_TAG: $CI_COMMIT_TAG
# # override DEPLOY_ENV # override DEPLOY_ENV
# DEPLOY_ENV: "test" DEPLOY_ENV: "prod"
\ No newline at end of file
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
- if: '$CI_COMMIT_TAG =~ /^end-of-sprint-[0-9]+/' - if: '$CI_COMMIT_TAG =~ /^end-of-sprint-[0-9]+/'
variables: variables:
IMAGE_TAG: $CI_COMMIT_TAG IMAGE_TAG: $CI_COMMIT_TAG
# - if: '$CI_COMMIT_TAG =~ /^test_[0-9]+\.[0-9]+.[0-9]+$/' - if: '$CI_COMMIT_TAG =~ /[0-9]+\.[0-9]+\.[0-9]+$/'
# variables: variables:
# IMAGE_TAG: $CI_COMMIT_TAG IMAGE_TAG: $CI_COMMIT_TAG
dependencies: [] dependencies: []
\ No newline at end of file
...@@ -10,4 +10,10 @@ ...@@ -10,4 +10,10 @@
rules: rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"' - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
- if: '$CI_COMMIT_TAG =~ /^end-of-sprint-[0-9]+/'
variables:
IMAGE_TAG: $CI_COMMIT_TAG
- if: '$CI_COMMIT_TAG =~ /[0-9]+\.[0-9]+\.[0-9]+$/'
variables:
IMAGE_TAG: $CI_COMMIT_TAG
dependencies: [] dependencies: []
CONDOR_HOST = nmpost-cm.aoc.nrao.edu
use ROLE : Submit
ALLOW_WRITE = *.aoc.nrao.edu
ALLOW_DAEMON = $(ALLOW_WRITE)
# one or more submit hosts has an external IP
PRIVATE_NETWORK_NAME = nrao.edu
# while not in production yet
CONDOR_ADMIN = krowe@nrao.edu
# send email if job errors
JOB_DEFAULT_NOTIFICATION = ERROR
# Differentiate from other pools if needed
PoolName = "nmpost"
# Set UID domain
UID_DOMAIN = aoc.nrao.edu
...@@ -22,24 +22,24 @@ command again unless you delete this image or modify the `Dockerfile.base` Docke ...@@ -22,24 +22,24 @@ command again unless you delete this image or modify the `Dockerfile.base` Docke
### Starting Workspaces ### Starting Workspaces
To start Workspaces as foreground process, run: To start Workspaces as foreground process, run:
```sh ```sh
docker-compose up docker compose -f docker-compose.local.yml up
``` ```
To start Workspaces as background process, run: To start Workspaces as background process, run:
```sh ```sh
docker-compose up -d docker compose -f docker-compose.local.yml up -d
``` ```
### Stopping Workspaces ### Stopping Workspaces
To stop Workspaces, run: To stop Workspaces, run:
```sh ```sh
docker-compose down docker compose -f docker-compose.local.yml down
``` ```
### Starting Individual Services ### Starting Individual Services
To start Workspaces as foreground process, run: To start Workspaces as foreground process, run:
```sh ```sh
docker-compose up <service_name> docker compose -f docker-compose.local.yml up <service_name>
``` ```
### List Containers ### List Containers
...@@ -71,7 +71,7 @@ docker exec -it <container_name> bash ...@@ -71,7 +71,7 @@ docker exec -it <container_name> bash
Using Docker Compose: Using Docker Compose:
```sh ```sh
docker-compose exec <service_name> bash docker compose -f docker-compose.local.yml exec <service_name> bash
``` ```
### Run a command on a Running Container ### Run a command on a Running Container
...@@ -82,5 +82,5 @@ docker exec <container_name> pwd ...@@ -82,5 +82,5 @@ docker exec <container_name> pwd
Using Docker Compose: Using Docker Compose:
```sh ```sh
docker-compose exec <service_name> pwd docker compose -f docker-compose.local.yml exec <service_name> pwd
``` ```
version: '3.8'
services:
workflow:
image: ssa-containers.aoc.nrao.edu/workspaces/workflow:${TAG}
networks:
- host
secrets:
- source: dsoc_${ENV}_secrets
target: dsoc-${ENV}.properties
environment:
CAPO_PATH: /run/secrets
CAPO_PROFILE: dsoc-${ENV}
deploy:
placement:
constraints:
- "node.labels.node_env==${ENV}"
restart_policy:
condition: on-failure
max_attempts: 3
window: 5s
update_config:
parallelism: 2
delay: 5s
order: stop-first
failure_action: rollback
rollback_config:
parallelism: 0
order: stop-first
volumes:
- /lustre/aoc/cluster/pipeline/dsoc-${ENV}/downloads/nrao:/lustre/aoc/cluster/pipeline/dsoc-${ENV}/downloads/nrao
- /lustre/aoc/cluster/pipeline/dsoc-${ENV}/workspaces:/lustre/aoc/cluster/pipeline/dsoc-${ENV}/workspaces
- /home/ssa/bin/python3.8:/home/ssa/bin/python3.8
- /home/casa/capo:/home/casa/capo
capability:
image: ssa-containers.aoc.nrao.edu/workspaces/capability:${TAG}
ports:
- target: 3457
published: 3457
protocol: tcp
mode: host
secrets:
- source: dsoc_${ENV}_secrets
target: dsoc-${ENV}.properties
environment:
CAPO_PATH: /run/secrets
CAPO_PROFILE: dsoc-${ENV}
deploy:
placement:
constraints:
- "node.labels.node_env==${ENV}"
restart_policy:
condition: on-failure
max_attempts: 3
window: 5s
update_config:
parallelism: 2
delay: 5s
order: stop-first
failure_action: rollback
rollback_config:
parallelism: 0
order: stop-first
notification:
image: ssa-containers.aoc.nrao.edu/workspaces/notification:${TAG}
ports:
- target: 3458
published: 3458
protocol: tcp
mode: host
secrets:
- source: dsoc_${ENV}_secrets
target: dsoc-${ENV}.properties
environment:
CAPO_PATH: /run/secrets
CAPO_PROFILE: dsoc-${ENV}
deploy:
placement:
constraints:
- "node.labels.node_env==${ENV}"
restart_policy:
condition: on-failure
max_attempts: 3
window: 5s
update_config:
parallelism: 2
delay: 5s
order: stop-first
failure_action: rollback
rollback_config:
parallelism: 0
order: stop-first
web:
image: ssa-containers.aoc.nrao.edu/workspaces/web:${TAG}
ports:
- target: 80
published: 4444
protocol: tcp
mode: host
environment:
DL_HOST: ${DL_HOST}
ENV_HOST: ${ENV_HOST}
deploy:
placement:
constraints:
- "node.labels.node_env==${ENV}"
restart_policy:
condition: on-failure
max_attempts: 3
window: 5s
update_config:
parallelism: 2
delay: 5s
order: stop-first
failure_action: rollback
rollback_config:
parallelism: 0
order: stop-first
secrets:
dsoc_ENV_secrets:
external: true
networks:
host:
external: true
name: host
version: '3.8'
services:
nginx:
image: nginx:1.19.7-alpine
restart: always
ports:
- "4444:80"
depends_on:
- frontend
command: nginx -g "daemon off;"
volumes:
- ./apps/web/ws-nginx.local.conf:/etc/nginx/conf.d/default.conf
- ./lustre/:/lustre/aoc/cluster/pipeline/docker/workspaces
- ./delivery_root:/tmp/delivery_root
db:
image: ssa-containers.aoc.nrao.edu/ops/ci/db:workspaces
restart: always
ports:
- "54322:5432"
environment:
POSTGRES_USER: archive
POSTGRES_PASSWORD: docker
command: postgres -c listen_addresses=*
amqp:
image: rabbitmq:3.8-management
restart: always
ports:
- "15672:15672"
condor-cm:
build:
context: .
dockerfile: ./config/htcondor/cm/Dockerfile.local
ports:
- 9618
condor-execute:
build:
context: .
dockerfile: ./config/htcondor/execute/Dockerfile.local
volumes:
- ./delivery_root:/tmp/delivery_root
- ./lustre/aoc/cluster/pipeline/docker/workspaces:/lustre/aoc/cluster/pipeline/docker/workspaces
ngas:
build:
context: .
dockerfile: ./Dockerfile.ngas
depends_on:
- db
ports:
- "7777:7777"
volumes:
- ./local_ngas_root:/home/ngas/NGAS
schema:
build:
context: ./schema/
dockerfile: Dockerfile.local
depends_on:
- db
volumes:
- ./schema:/code/schema
- ./docker.properties:/home/ssa/capo/docker.properties
# Used for doing a DB migration on dsoc-dev
# Uncomment this and run the command `docker compose -f docker-compose.local.yml up schema-dev` and the migration will run
# schema-dev:
# build:
# context: ./schema/
# dockerfile: Dockerfile.local
# command: ["./bin/run-migrations.sh", "dsoc-dev"]
# depends_on:
# - db
# volumes:
# - ./schema:/code/schema
# - ~/.capo:/home/casa/capo
workflow:
build:
context: .
dockerfile: ./services/workflow/Dockerfile.local
ports:
- "3456:3456"
- 9618
depends_on:
- schema
- amqp
volumes:
- ./services/workflow:/code
- /code/ssa_workflow.egg-info
- ./shared:/packages/shared
- ./apps/cli:/packages/apps/cli
- ./testing:/packages/testing
- ./lustre/aoc/cluster/pipeline/docker/workspaces:/lustre/aoc/cluster/pipeline/docker/workspaces
- ./delivery_root:/tmp/delivery_root
- ~/.capo:/home/ssa/capo
- ./docker.properties:/home/ssa/capo/docker.properties
- /packages/shared/workspaces/ssa_workspaces.egg-info
- /packages/shared/schema/ssa_schema.egg-info
- /packages/shared/messaging/ssa_messaging.egg-info
- /packages/apps/cli/utilities/wf_monitor/ssa_wf_monitor.egg-info
- /packages/apps/cli/utilities/system_mediator/ssa_system_mediator.egg-info
- /packages/apps/cli/utilities/ws_metrics/ssa_ws_metrics.egg-info
- /packages/apps/cli/utilities/aat_wrest/ssa_aat_wrest.egg-info
- /packages/apps/cli/executables/tmpdir_eraser/ssa_tmpdir_eraser.egg-info
- /packages/apps/cli/executables/pexable/productfetcher/ssa_productfetcher.egg-info
- /packages/apps/cli/executables/pexable/casa_envoy/ssa_casa_envoy.egg-info
- /packages/apps/cli/executables/pexable/vela/ssa_vela.egg-info
- /packages/apps/cli/executables/pexable/delivery/ssa_delivery.egg-info
- /packages/apps/cli/executables/pexable/null/ssa_null.egg-info
capability:
build:
context: .
dockerfile: ./services/capability/Dockerfile.local
ports:
- "3457:3457"
depends_on:
- schema
- workflow
- notification
- amqp
volumes:
- ./docker.properties:/home/ssa/capo/docker.properties
- ./services/capability:/code
- ./shared:/packages/shared
- ./apps/cli:/packages/apps/cli
- ./testing:/packages/testing
- /code/ssa_capability.egg-info
- /packages/shared/workspaces/ssa_workspaces.egg-info
- /packages/shared/schema/ssa_schema.egg-info
- /packages/shared/messaging/ssa_messaging.egg-info
- /packages/apps/cli/utilities/wf_monitor/ssa_wf_monitor.egg-info
- /packages/apps/cli/utilities/system_mediator/ssa_system_mediator.egg-info
- /packages/apps/cli/utilities/ws_metrics/ssa_ws_metrics.egg-info
- /packages/apps/cli/utilities/aat_wrest/ssa_aat_wrest.egg-info
- /packages/apps/cli/executables/pexable/vela/ssa_vela.egg-info
- /packages/apps/cli/executables/tmpdir_eraser/ssa_tmpdir_eraser.egg-info
- /packages/apps/cli/executables/pexable/productfetcher/ssa_productfetcher.egg-info
- /packages/apps/cli/executables/pexable/casa_envoy/ssa_casa_envoy.egg-info
- /packages/apps/cli/executables/pexable/delivery/ssa_delivery.egg-info
- /packages/apps/cli/executables/pexable/null/ssa_null.egg-info
notification:
build:
context: .
dockerfile: ./services/notification/Dockerfile.local
ports:
- "3458:3458"
depends_on:
- schema
- amqp
volumes:
- ./docker.properties:/home/ssa/capo/docker.properties
- ./services/notification:/code
- /code/ssa_notification.egg-info
- ./shared:/packages/shared
- ./apps/cli:/packages/apps/cli
- ./testing:/packages/testing
- /packages/shared/workspaces/ssa_workspaces.egg-info
- /packages/shared/schema/ssa_schema.egg-info
- /packages/shared/messaging/ssa_messaging.egg-info
- /packages/apps/cli/utilities/wf_monitor/ssa_wf_monitor.egg-info
- /packages/apps/cli/utilities/system_mediator/ssa_system_mediator.egg-info
- /packages/apps/cli/utilities/ws_metrics/ssa_ws_metrics.egg-info
- /packages/apps/cli/utilities/aat_wrest/ssa_aat_wrest.egg-info
- /packages/apps/cli/executables/pexable/vela/ssa_vela.egg-info
- /packages/apps/cli/executables/tmpdir_eraser/ssa_tmpdir_eraser.egg-info
- /packages/apps/cli/executables/pexable/productfetcher/ssa_productfetcher.egg-info
- /packages/apps/cli/executables/pexable/casa_envoy/ssa_casa_envoy.egg-info
- /packages/apps/cli/executables/pexable/delivery/ssa_delivery.egg-info
- /packages/apps/cli/executables/pexable/null/ssa_null.egg-info
frontend:
build:
context: .
dockerfile: ./apps/web/Dockerfile.local
init: true
ports:
- "4200:4200"
depends_on:
- capability
- workflow
volumes:
- ./apps/web:/code
- /code/node_modules
version: '3.8' version: '3.8'
services: services:
nginx:
image: nginx:1.19.7-alpine
restart: always
ports:
- "4444:80"
depends_on:
- frontend
command: nginx -g "daemon off;"
volumes:
- ./apps/web/ws-nginx.local.conf:/etc/nginx/conf.d/default.conf
- ./lustre/:/lustre/aoc/cluster/pipeline/docker/workspaces
- ./delivery_root:/tmp/delivery_root
db:
image: ssa-containers.aoc.nrao.edu/ops/ci/db:workspaces
restart: always
ports:
- "54322:5432"
environment:
POSTGRES_USER: archive
POSTGRES_PASSWORD: docker
command: postgres -c listen_addresses=*
amqp:
image: rabbitmq:3.8-management
restart: always
ports:
- "15672:15672"
condor-cm:
build:
context: .
dockerfile: ./config/htcondor/cm/Dockerfile.local
ports:
- 9618
condor-execute:
build:
context: .
dockerfile: ./config/htcondor/execute/Dockerfile.local
volumes:
- ./delivery_root:/tmp/delivery_root
- ./lustre/aoc/cluster/pipeline/docker/workspaces:/lustre/aoc/cluster/pipeline/docker/workspaces
ngas:
build:
context: .
dockerfile: ./Dockerfile.ngas
depends_on:
- db
ports:
- "7777:7777"
volumes:
- ./local_ngas_root:/home/ngas/NGAS
schema:
build:
context: ./schema/
dockerfile: Dockerfile.local
depends_on:
- db
volumes:
- ./schema:/code/schema
- ./docker.properties:/home/ssa/capo/docker.properties
# Used for doing a DB migration on dsoc-dev
# Uncomment this and run the command `docker compose up schema-dev` and the migration will run
# schema-dev:
# build:
# context: ./schema/
# dockerfile: Dockerfile.local
# command: ["./bin/run-migrations.sh", "dsoc-dev"]
# depends_on:
# - db
# volumes:
# - ./schema:/code/schema
# - ~/.capo:/home/casa/capo
workflow: workflow:
build: image: ssa-containers.aoc.nrao.edu/workspaces/workflow:${TAG}
context: . networks:
dockerfile: ./services/workflow/Dockerfile.local - host
ports: secrets:
- "3456:3456" - source: dsoc_${ENV}_secrets
- 9618 target: dsoc-${ENV}.properties
depends_on: environment:
- schema CAPO_PATH: /run/secrets
- amqp CAPO_PROFILE: dsoc-${ENV}
deploy:
placement:
constraints:
- "node.labels.node_env==${ENV}"
restart_policy:
condition: on-failure
max_attempts: 3
window: 5s
update_config:
parallelism: 2
delay: 5s
order: stop-first
failure_action: rollback
rollback_config:
parallelism: 0
order: stop-first
volumes: volumes:
- ./services/workflow:/code - /lustre/aoc/cluster/pipeline/dsoc-${ENV}/downloads/nrao:/lustre/aoc/cluster/pipeline/dsoc-${ENV}/downloads/nrao
- /code/ssa_workflow.egg-info - /lustre/aoc/cluster/pipeline/dsoc-${ENV}/workspaces:/lustre/aoc/cluster/pipeline/dsoc-${ENV}/workspaces
- ./shared:/packages/shared - /home/ssa/bin/python3.8:/home/ssa/bin/python3.8
- ./apps/cli:/packages/apps/cli - /home/casa/capo:/home/casa/capo
- ./testing:/packages/testing
- ./lustre/aoc/cluster/pipeline/docker/workspaces:/lustre/aoc/cluster/pipeline/docker/workspaces
- ./delivery_root:/tmp/delivery_root
- ~/.capo:/home/ssa/capo
- ./docker.properties:/home/ssa/capo/docker.properties
- /packages/shared/workspaces/ssa_workspaces.egg-info
- /packages/shared/schema/ssa_schema.egg-info
- /packages/shared/messaging/ssa_messaging.egg-info
- /packages/apps/cli/utilities/wf_monitor/ssa_wf_monitor.egg-info
- /packages/apps/cli/utilities/system_mediator/ssa_system_mediator.egg-info
- /packages/apps/cli/utilities/ws_metrics/ssa_ws_metrics.egg-info
- /packages/apps/cli/utilities/aat_wrest/ssa_aat_wrest.egg-info
- /packages/apps/cli/executables/tmpdir_eraser/ssa_tmpdir_eraser.egg-info
- /packages/apps/cli/executables/pexable/productfetcher/ssa_productfetcher.egg-info
- /packages/apps/cli/executables/pexable/casa_envoy/ssa_casa_envoy.egg-info
- /packages/apps/cli/executables/pexable/vela/ssa_vela.egg-info
- /packages/apps/cli/executables/pexable/delivery/ssa_delivery.egg-info
- /packages/apps/cli/executables/pexable/null/ssa_null.egg-info
capability: capability:
build: image: ssa-containers.aoc.nrao.edu/workspaces/capability:${TAG}
context: .
dockerfile: ./services/capability/Dockerfile.local
ports: ports:
- "3457:3457" - target: 3457
depends_on: published: 3457
- schema protocol: tcp
- workflow mode: host
- notification secrets:
- amqp - source: dsoc_${ENV}_secrets
volumes: target: dsoc-${ENV}.properties
- ./docker.properties:/home/ssa/capo/docker.properties environment:
- ./services/capability:/code CAPO_PATH: /run/secrets
- ./shared:/packages/shared CAPO_PROFILE: dsoc-${ENV}
- ./apps/cli:/packages/apps/cli deploy:
- ./testing:/packages/testing placement:
- /code/ssa_capability.egg-info constraints:
- /packages/shared/workspaces/ssa_workspaces.egg-info - "node.labels.node_env==${ENV}"
- /packages/shared/schema/ssa_schema.egg-info restart_policy:
- /packages/shared/messaging/ssa_messaging.egg-info condition: on-failure
- /packages/apps/cli/utilities/wf_monitor/ssa_wf_monitor.egg-info max_attempts: 3
- /packages/apps/cli/utilities/system_mediator/ssa_system_mediator.egg-info window: 5s
- /packages/apps/cli/utilities/ws_metrics/ssa_ws_metrics.egg-info update_config:
- /packages/apps/cli/utilities/aat_wrest/ssa_aat_wrest.egg-info parallelism: 2
- /packages/apps/cli/executables/pexable/vela/ssa_vela.egg-info delay: 5s
- /packages/apps/cli/executables/tmpdir_eraser/ssa_tmpdir_eraser.egg-info order: stop-first
- /packages/apps/cli/executables/pexable/productfetcher/ssa_productfetcher.egg-info failure_action: rollback
- /packages/apps/cli/executables/pexable/casa_envoy/ssa_casa_envoy.egg-info rollback_config:
- /packages/apps/cli/executables/pexable/delivery/ssa_delivery.egg-info parallelism: 0
- /packages/apps/cli/executables/pexable/null/ssa_null.egg-info order: stop-first
notification: notification:
build: image: ssa-containers.aoc.nrao.edu/workspaces/notification:${TAG}
context: .
dockerfile: ./services/notification/Dockerfile.local
ports: ports:
- "3458:3458" - target: 3458
depends_on: published: 3458
- schema protocol: tcp
- amqp mode: host
volumes: secrets:
- ./docker.properties:/home/ssa/capo/docker.properties - source: dsoc_${ENV}_secrets
- ./services/notification:/code target: dsoc-${ENV}.properties
- /code/ssa_notification.egg-info environment:
- ./shared:/packages/shared CAPO_PATH: /run/secrets
- ./apps/cli:/packages/apps/cli CAPO_PROFILE: dsoc-${ENV}
- ./testing:/packages/testing deploy:
- /packages/shared/workspaces/ssa_workspaces.egg-info placement:
- /packages/shared/schema/ssa_schema.egg-info constraints:
- /packages/shared/messaging/ssa_messaging.egg-info - "node.labels.node_env==${ENV}"
- /packages/apps/cli/utilities/wf_monitor/ssa_wf_monitor.egg-info restart_policy:
- /packages/apps/cli/utilities/system_mediator/ssa_system_mediator.egg-info condition: on-failure
- /packages/apps/cli/utilities/ws_metrics/ssa_ws_metrics.egg-info max_attempts: 3
- /packages/apps/cli/utilities/aat_wrest/ssa_aat_wrest.egg-info window: 5s
- /packages/apps/cli/executables/pexable/vela/ssa_vela.egg-info update_config:
- /packages/apps/cli/executables/tmpdir_eraser/ssa_tmpdir_eraser.egg-info parallelism: 2
- /packages/apps/cli/executables/pexable/productfetcher/ssa_productfetcher.egg-info delay: 5s
- /packages/apps/cli/executables/pexable/casa_envoy/ssa_casa_envoy.egg-info order: stop-first
- /packages/apps/cli/executables/pexable/delivery/ssa_delivery.egg-info failure_action: rollback
- /packages/apps/cli/executables/pexable/null/ssa_null.egg-info rollback_config:
parallelism: 0
order: stop-first
frontend: web:
build: image: ssa-containers.aoc.nrao.edu/workspaces/web:${TAG}
context: .
dockerfile: ./apps/web/Dockerfile.local
init: true
ports: ports:
- "4200:4200" - target: 80
depends_on: published: 4444
- capability protocol: tcp
- workflow mode: host
volumes: environment:
- ./apps/web:/code DL_HOST: ${DL_HOST}
- /code/node_modules ENV_HOST: ${ENV_HOST}
deploy:
placement:
constraints:
- "node.labels.node_env==${ENV}"
restart_policy:
condition: on-failure
max_attempts: 3
window: 5s
update_config:
parallelism: 2
delay: 5s
order: stop-first
failure_action: rollback
rollback_config:
parallelism: 0
order: stop-first
secrets:
dsoc_ENV_secrets:
external: true
networks:
host:
external: true
name: host
...@@ -12,19 +12,19 @@ edu.nrao.archive.configuration.AmqpServer.port = 5672 ...@@ -12,19 +12,19 @@ edu.nrao.archive.configuration.AmqpServer.port = 5672
# #
# Workspaces settings # Workspaces settings
# #
edu.nrao.archive.workspaces.CapabilitySettings.serviceUrl = http://capability:3457 edu.nrao.workspaces.CapabilitySettings.serviceUrl = http://capability:3457
edu.nrao.archive.workspaces.WorkflowSettings.serviceUrl = http://workflow:3456 edu.nrao.workspaces.WorkflowSettings.serviceUrl = http://workflow:3456
edu.nrao.archive.workspaces.NotificationSettings.serviceUrl = http://notification:3458 edu.nrao.workspaces.NotificationSettings.serviceUrl = http://notification:3458
edu.nrao.archive.workspaces.UISettings.serviceUrl = http://localhost:4444/workspaces edu.nrao.workspaces.UISettings.serviceUrl = http://localhost:4444/workspaces
# #
# Processing Settings # Processing Settings
# #
edu.nrao.archive.workspaces.ProcessingSettings.useCasa = false edu.nrao.workspaces.ProcessingSettings.useCasa = false
edu.nrao.archive.workspaces.ProcessingSettings.rootDirectory = /lustre/aoc/cluster/pipeline/docker/workspaces/spool edu.nrao.workspaces.ProcessingSettings.rootDirectory = /lustre/aoc/cluster/pipeline/docker/workspaces/spool
edu.nrao.archive.workspaces.ProcessingSettings.scriptLocation = /packages/apps/cli/executables/wf_framework/sh edu.nrao.workspaces.ProcessingSettings.scriptLocation = /lustre/aoc/cluster/pipeline/docker/workspaces/sbin
edu.nrao.archive.workspaces.ProcessingSettings.ramInGb = 1G edu.nrao.workspaces.ProcessingSettings.ramInGb = 1G
edu.nrao.workspaces.ProcessingSettings.autoGenerateStandardCals = False
edu.nrao.archive.workflow.config.CasaVersions.homeForReprocessing = /home/casa/packages/pipeline/current edu.nrao.archive.workflow.config.CasaVersions.homeForReprocessing = /home/casa/packages/pipeline/current
...@@ -34,37 +34,31 @@ edu.nrao.archive.workflow.config.CasaVersions.homeForReprocessing = /home/casa/p ...@@ -34,37 +34,31 @@ edu.nrao.archive.workflow.config.CasaVersions.homeForReprocessing = /home/casa/p
edu.nrao.archive.workflow.config.DeliverySettings.nraoDownloadDirectory = /tmp/delivery_root edu.nrao.archive.workflow.config.DeliverySettings.nraoDownloadDirectory = /tmp/delivery_root
edu.nrao.archive.workflow.config.DeliverySettings.nraoDownloadUrl = http://localhost:4444/dl edu.nrao.archive.workflow.config.DeliverySettings.nraoDownloadUrl = http://localhost:4444/dl
# #
# Ingestion Settings # Ingestion Settings
# #
edu.nrao.archive.workspaces.IngestionSettings.stagingDirectory = /lustre/aoc/cluster/pipeline/docker/workspaces/staging edu.nrao.workspaces.IngestionSettings.stagingDirectory = /lustre/aoc/cluster/pipeline/docker/workspaces/staging
edu.nrao.archive.workspaces.IngestionSettings.storageDirectory = /lustre/aoc/cluster/pipeline/docker/workspaces/storage edu.nrao.workspaces.IngestionSettings.storageDirectory = /lustre/aoc/cluster/pipeline/docker/workspaces/storage
# #
# Standard Calibration Settings # Standard Calibration Settings
# #
edu.nrao.archive.workspaces.DeliverySettings.ciplDelivery = /lustre/aoc/cluster/pipeline/docker/workspaces/qa2 edu.nrao.workspaces.DeliverySettings.standardCalibrationDelivery = /lustre/aoc/cluster/pipeline/docker/workspaces/qa2
edu.nrao.archive.workspaces.DeliverySettings.cacheWeblogDirectory = /lustre/aoc/cluster/pipeline/docker/workspaces/cache/weblog edu.nrao.workspaces.DeliverySettings.cacheWeblogDirectory = /lustre/aoc/cluster/pipeline/docker/workspaces/cache/weblog
# #
# Standard Imaging Settings # Standard Imaging Settings
# #
edu.nrao.archive.workspaces.DeliverySettings.standardImageDelivery = /lustre/aoc/cluster/pipeline/docker/workspaces/image-qa edu.nrao.workspaces.DeliverySettings.standardImageDelivery = /lustre/aoc/cluster/pipeline/docker/workspaces/image-qa
# #
# Data fetcher settings # Product Fetcher settings
# #
edu.nrao.archive.workflow.config.DataFetcherSettings.ramInGb = 16 edu.nrao.workspaces.ProductFetcherSettings.locatorServiceUrlPrefix = https://webtest.aoc.nrao.edu/archive-service/location?locator
edu.nrao.archive.workflow.config.DataFetcherSettings.clusterTimeout = 01:00:00:00 edu.nrao.workspaces.ProductFetcherSettings.defaultThreadsPerHost = 4
edu.nrao.archive.datafetcher.DataFetcherSettings.locatorServiceUrlPrefix = https://webtest.aoc.nrao.edu/archive-services/location?locator edu.nrao.workspaces.ProductFetcherSettings.executionSite = local_test
edu.nrao.archive.datafetcher.DataFetcherSettings.defaultThreadsPerHost = 4
edu.nrao.archive.datafetcher.DataFetcherSettings.executionSite = local_test
edu.nrao.archive.datafetcher.DataFetcherSettings.downloadDirectory=/tmp/
edu.nrao.archive.workflow.config.RequestHandlerSettings.downloadDirectory=/tmp/
# #
# Notification Settings # Notification Settings
# #
edu.nrao.archive.workspaces.NotificationSettings.analystEmail = workspaces-analysts@listmgr.nrao.edu edu.nrao.workspaces.NotificationSettings.analystEmail = workspaces-analysts-test@listmgr.nrao.edu
...@@ -36,7 +36,7 @@ make clean ...@@ -36,7 +36,7 @@ make clean
make build make build
docker-compose up docker compose -f docker-compose.local.yml up
make setup # in a different terminal window make setup # in a different terminal window
...@@ -119,7 +119,7 @@ needing to access them via Docker Desktop or terminal. ...@@ -119,7 +119,7 @@ needing to access them via Docker Desktop or terminal.
|image2| |image2|
Now you should be able to run 'docker-compose up' by simply selecting Now you should be able to run 'docker compose up' by simply selecting
this configuration and clicking the green run arrow next to the drop this configuration and clicking the green run arrow next to the drop
down menu. down menu.
...@@ -151,7 +151,7 @@ create one now. ...@@ -151,7 +151,7 @@ create one now.
You should now be able to debug workspaces! To use this do the You should now be able to debug workspaces! To use this do the
following: following:
1. Make sure you've run docker-compose up and that your containers are 1. Make sure you've run docker compose up and that your containers are
running running
2. Set breakpoints at the lines you wish to investigate. 2. Set breakpoints at the lines you wish to investigate.
...@@ -168,7 +168,7 @@ relaunch the debugger to pick it up. ...@@ -168,7 +168,7 @@ relaunch the debugger to pick it up.
Also: Once you are done debugging, the debugger shuts down the attached Also: Once you are done debugging, the debugger shuts down the attached
container on exit. You will need to restart the container via container on exit. You will need to restart the container via
docker-compose up <service> docker compose -f docker-compose.local.yml up <service>
Debugging Workspaces Tests Debugging Workspaces Tests
^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^
......
...@@ -8,4 +8,4 @@ where `$PROFILE` is `local` or `nmtest` or `nmprod` or whatever. ...@@ -8,4 +8,4 @@ where `$PROFILE` is `local` or `nmtest` or `nmprod` or whatever.
To upgrade in docker, run: To upgrade in docker, run:
docker-compose up schema docker compose -f docker-compose.local.yml up schema
\ No newline at end of file
...@@ -11,21 +11,24 @@ from pycapo import CapoConfig ...@@ -11,21 +11,24 @@ from pycapo import CapoConfig
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '291edd508557' revision = "291edd508557"
down_revision = '16b40772e51a' down_revision = "16b40772e51a"
branch_labels = None branch_labels = None
depends_on = None depends_on = None
def get_script_location() -> str: def get_script_location() -> str:
return CapoConfig().settings("edu.nrao.archive.workspaces.ProcessingSettings").scriptLocation return CapoConfig().settings("edu.nrao.workspaces.ProcessingSettings").scriptLocation
def upgrade(): def upgrade():
path = get_script_location() path = get_script_location()
script_body = """#!/bin/sh script_body = (
"""#!/bin/sh
export PATH=$PATH:""" + path + """ export PATH=$PATH:"""
+ path
+ """
framework.sh -d . framework.sh -d .
cd rawdata/ cd rawdata/
...@@ -34,11 +37,14 @@ def upgrade(): ...@@ -34,11 +37,14 @@ def upgrade():
casa_envoy --standard-cal $3 $4 casa_envoy --standard-cal $3 $4
deliver -r ./products --prefix $2 deliver -r ./products --prefix $2
""" """
)
op.execute( op.execute(
""" """
INSERT INTO workflow_templates (filename, content, workflow_name) INSERT INTO workflow_templates (filename, content, workflow_name)
VALUES ('std_calibration.sh', E'""" + script_body + """' , 'std_calibration') VALUES ('std_calibration.sh', E'"""
+ script_body
+ """' , 'std_calibration')
""" """
) )
......
...@@ -36,7 +36,7 @@ queue ...@@ -36,7 +36,7 @@ queue
) )
script_contents = """#!/bin/sh script_contents = """#!/bin/sh
SCRIPTS_DIR=$(pycapo -q edu.nrao.archive.workspaces.ProcessingSettings.scriptLocation) SCRIPTS_DIR=$(pycapo -q edu.nrao.workspaces.ProcessingSettings.scriptLocation)
export PATH=$PATH:${SCRIPTS_DIR} export PATH=$PATH:${SCRIPTS_DIR}
...@@ -58,7 +58,7 @@ def downgrade(): ...@@ -58,7 +58,7 @@ def downgrade():
UPDATE workflow_templates UPDATE workflow_templates
SET content = SET content =
E'#!/bin/sh E'#!/bin/sh
SCRIPTS_DIR=$(pycapo -q edu.nrao.archive.workspaces.ProcessingSettings.scriptLocation) SCRIPTS_DIR=$(pycapo -q edu.nrao.workspaces.ProcessingSettings.scriptLocation)
export PATH=$PATH:${SCRIPTS_DIR} export PATH=$PATH:${SCRIPTS_DIR}
......
...@@ -56,7 +56,7 @@ queue ...@@ -56,7 +56,7 @@ queue
def downgrade(): def downgrade():
script_contents = """#!/bin/sh script_contents = """#!/bin/sh
SCRIPTS_DIR=$(pycapo -q edu.nrao.archive.workspaces.ProcessingSettings.scriptLocation) SCRIPTS_DIR=$(pycapo -q edu.nrao.workspaces.ProcessingSettings.scriptLocation)
export PATH=$PATH:${SCRIPTS_DIR} export PATH=$PATH:${SCRIPTS_DIR}
......
"""add ingest_img workflow
Revision ID: dcbfdfafe16c
Revises: f2e524e1e04d
Create Date: 2021-07-27 15:38:06.960178
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "dcbfdfafe16c"
down_revision = "f2e524e1e04d"
branch_labels = None
depends_on = None
condor_content = """executable = ingest_image.sh
arguments = metadata.json
output = ingest.out
error = ingest.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
should_transfer_files = yes
transfer_input_files = $(SBIN_PATH)/pycapo, $(SBIN_PATH)/conveyor, $(SBIN_PATH)/ingest_envoy, $(SBIN_PATH)/ingest, $(SBIN_PATH)/image-product-collector.sh, ./metadata.json
getenv = True
environment = "CAPO_PATH=/home/casa/capo"
queue
"""
script_content = """#!/bin/sh
set -o errexit
./conveyor --retrieve-img $1
./ingest_envoy --image $1
"""
def upgrade():
op.execute(
f"""
INSERT INTO workflows (workflow_name)
VALUES ('ingest_image')
"""
)
op.execute(
f"""
INSERT INTO workflow_templates (filename, content, workflow_name)
VALUES ('ingest_image.condor', E'{condor_content}', 'ingest_image')
"""
)
op.execute(
f"""
INSERT INTO workflow_templates (filename, content, workflow_name)
VALUES ('ingest_image.sh', E'{script_content}', 'ingest_image')
"""
)
def downgrade():
op.execute(
"""
DELETE FROM workflows WHERE workflow_name='ingest_image'
"""
)
op.execute(
"""
DELETE FROM workflow_templates WHERE workflow_name='ingest_image'
"""
)
...@@ -10,8 +10,8 @@ import sqlalchemy as sa ...@@ -10,8 +10,8 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'e4452ab33088' revision = "e4452ab33088"
down_revision = '291edd508557' down_revision = "291edd508557"
branch_labels = None branch_labels = None
depends_on = None depends_on = None
...@@ -22,7 +22,7 @@ def upgrade(): ...@@ -22,7 +22,7 @@ def upgrade():
UPDATE workflow_templates UPDATE workflow_templates
SET content = SET content =
E'#!/bin/sh E'#!/bin/sh
SCRIPTS_DIR=$(pycapo -q edu.nrao.archive.workspaces.ProcessingSettings.scriptLocation) SCRIPTS_DIR=$(pycapo -q edu.nrao.workspaces.ProcessingSettings.scriptLocation)
export PATH=$PATH:${SCRIPTS_DIR} export PATH=$PATH:${SCRIPTS_DIR}
...@@ -33,7 +33,8 @@ def upgrade(): ...@@ -33,7 +33,8 @@ def upgrade():
casa_envoy --standard-cal $3 $4 casa_envoy --standard-cal $3 $4
deliver-cipl.sh ${PWD##*/}' deliver-cipl.sh ${PWD##*/}'
WHERE filename = 'std_calibration.sh' WHERE filename = 'std_calibration.sh'
""") """
)
pass pass
......
...@@ -20,8 +20,8 @@ ENV PATH "${PATH}:/home/vlapipe/.local/bin" ...@@ -20,8 +20,8 @@ ENV PATH "${PATH}:/home/vlapipe/.local/bin"
RUN python setup.py develop --user RUN python setup.py develop --user
# Set Capo # Set Capo for build stage
ARG capo_env=dsoc-dev # Gets reset to proper environment's profile in the deploy stage
ENV CAPO_PROFILE $capo_env ENV CAPO_PROFILE docker
CMD ["pserve", "--reload", "development.ini"] CMD ["pserve", "--reload", "development.ini"]
...@@ -20,8 +20,8 @@ ENV PATH "${PATH}:/home/vlapipe/.local/bin" ...@@ -20,8 +20,8 @@ ENV PATH "${PATH}:/home/vlapipe/.local/bin"
RUN python setup.py develop --user RUN python setup.py develop --user
# Set Capo # Set Capo for build stage
ARG capo_env=dsoc-dev # Gets reset to proper environment's profile in the deploy stage
ENV CAPO_PROFILE $capo_env ENV CAPO_PROFILE docker
CMD ["pserve", "--reload", "development.ini"] CMD ["pserve", "--reload", "development.ini"]
...@@ -2,6 +2,10 @@ ...@@ -2,6 +2,10 @@
ARG TAGNAME="tmp" ARG TAGNAME="tmp"
FROM cache:${TAGNAME} FROM cache:${TAGNAME}
# Build arg that sets environment; sets to "dev" if no build arg is given
ARG env=dev
ENV ENV=${env}
USER root USER root
# HTCondor install # HTCondor install
...@@ -14,7 +18,7 @@ RUN sudo apt install -y htcondor ...@@ -14,7 +18,7 @@ RUN sudo apt install -y htcondor
# HTCondor setup # HTCondor setup
# Copy over HTCondor submit node config # Copy over HTCondor submit node config
COPY ./config/htcondor/submit/99-workspaces-submit.dev.conf /etc/condor/config.d/99-workspaces-submit.dev.conf COPY ./config/htcondor/submit/99-workspaces-submit.${ENV}.conf /etc/condor/config.d/99-workspaces-submit.${ENV}.conf
# Change working directory to /code # Change working directory to /code
WORKDIR /code WORKDIR /code
...@@ -34,10 +38,9 @@ ENV PATH "${PATH}:/home/vlapipe/.local/bin" ...@@ -34,10 +38,9 @@ ENV PATH "${PATH}:/home/vlapipe/.local/bin"
RUN python setup.py develop --user RUN python setup.py develop --user
# Set Capo # Set Capo for build stage
ARG capo_env=dsoc-dev # Gets reset to proper environment's profile in the deploy stage
ENV CAPO_PROFILE $capo_env ENV CAPO_PROFILE docker
ENV PATH $PATH:/lustre/aoc/pipeline/$CAPO_PROFILE/workflow
USER root USER root
......
...@@ -23,8 +23,8 @@ SPOOL_DIR=/lustre/aoc/cluster/pipeline/"$CAPO_PROFILE"/workspaces/spool ...@@ -23,8 +23,8 @@ SPOOL_DIR=/lustre/aoc/cluster/pipeline/"$CAPO_PROFILE"/workspaces/spool
QA_DIR=/lustre/aoc/cluster/pipeline/"$CAPO_PROFILE"/workspaces/qa2 QA_DIR=/lustre/aoc/cluster/pipeline/"$CAPO_PROFILE"/workspaces/qa2
WEBLOG_DIR=/lustre/aoc/cluster/pipeline/"$CAPO_PROFILE"/workspaces/cache WEBLOG_DIR=/lustre/aoc/cluster/pipeline/"$CAPO_PROFILE"/workspaces/cache
IMAGE_QA_DIR=/lustre/aoc/cluster/pipeline/"$CAPO_PROFILE"/workspaces/image-qa IMAGE_QA_DIR=/lustre/aoc/cluster/pipeline/"$CAPO_PROFILE"/workspaces/image-qa
STAGING_DIR=lustre/aoc/cluster/pipeline/"$CAPO_PROFILE"/workspaces/staging STAGING_DIR=/lustre/aoc/cluster/pipeline/"$CAPO_PROFILE"/workspaces/staging
STORAGE_DIR=lustre/aoc/cluster/pipeline/"$CAPO_PROFILE"/workspaces/storage STORAGE_DIR=/lustre/aoc/cluster/pipeline/"$CAPO_PROFILE"/workspaces/storage
mkdir -p "$SPOOL_DIR" mkdir -p "$SPOOL_DIR"
mkdir -p "$QA_DIR" mkdir -p "$QA_DIR"
mkdir -p "$WEBLOG_DIR" mkdir -p "$WEBLOG_DIR"
...@@ -34,9 +34,7 @@ mkdir -p "$STAGING_DIR" ...@@ -34,9 +34,7 @@ mkdir -p "$STAGING_DIR"
mkdir -p "$STORAGE_DIR" mkdir -p "$STORAGE_DIR"
# Copy wf_framework shell scripts to workflow dir # Copy wf_framework shell scripts to workflow dir
cp /packages/apps/cli/executables/wf_framework/sh/framework.sh "$WORKFLOW_DIR" cp -a /packages/apps/cli/executables/wf_framework/sh/. "$WORKFLOW_DIR"
cp /packages/apps/cli/executables/wf_framework/sh/calibration-table-collector.sh "$WORKFLOW_DIR"
cp /packages/apps/cli/executables/wf_framework/sh/ingest-request.sh "$WORKFLOW_DIR"
cp -R /packages/apps/cli/executables/wf_framework/casa_requirements/.matplotlib "$WORKFLOW_DIR" cp -R /packages/apps/cli/executables/wf_framework/casa_requirements/.matplotlib "$WORKFLOW_DIR"
......