Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • ssa/workspaces
1 result
Show changes
Showing
with 405 additions and 63 deletions
...@@ -81,15 +81,17 @@ class WrestWorkflowMetadata: ...@@ -81,15 +81,17 @@ class WrestWorkflowMetadata:
""" """
sql = f""" sql = f"""
SELECT science_product_locator as spl, SELECT e.science_product_locator as spl,
e.project_code as projectCode, e.project_code as projectCode,
p.title as title, p.title as title,
e.starttime as startTime, e.starttime as startTime,
(a.firstname || ' ' || a.lastname) as observer, (a.firstname || ' ' || a.lastname) as observer,
telescope as telescope telescope as telescope,
c.science_product_locator as cal_locator
FROM execution_blocks e FROM execution_blocks e
JOIN projects p on e.project_code = p.project_code JOIN projects p on e.project_code = p.project_code
JOIN authors a on p.project_code = a.project_code JOIN authors a on p.project_code = a.project_code
JOIN calibrations c on e.execution_block_id = c.execution_block_id
WHERE ngas_fileset_id = %(sdmId)s AND a.is_pi = true WHERE ngas_fileset_id = %(sdmId)s AND a.is_pi = true
""" """
make_json = {} make_json = {}
...@@ -106,6 +108,7 @@ class WrestWorkflowMetadata: ...@@ -106,6 +108,7 @@ class WrestWorkflowMetadata:
"startTime": data[0][3], "startTime": data[0][3],
"observer": data[0][4], "observer": data[0][4],
"telescope": data[0][5], "telescope": data[0][5],
"cal_locator": data[0][6],
"created_at": str( "created_at": str(
pendulum.now().in_timezone(TIME_ZONE).format(PENDULUM_FORMAT) pendulum.now().in_timezone(TIME_ZONE).format(PENDULUM_FORMAT)
), ),
...@@ -174,3 +177,45 @@ class WrestWorkflowMetadata: ...@@ -174,3 +177,45 @@ class WrestWorkflowMetadata:
finally: finally:
self.conn.close() self.conn.close()
return make_json return make_json
def wrest_aux_imaging_info(self) -> json:
"""
Given a parent workflow request id for an image ingestion, returns the required metadata to run
the image ingestion
:return:
"""
sql = """
SELECT project_code as projectCode,
band_code as bandCodes,
configuration
FROM execution_blocks e
"""
condition = (
f"WHERE science_product_locator = '{self.spl}'"
if self.spl is not None
else f"WHERE ngas_fileset_id = '{self.sdm_id}'"
)
make_json = {}
try:
cursor = self.conn.cursor()
cursor.execute(sql + condition)
data = cursor.fetchall()
if data:
make_json = json.dumps(
{
"projectCode": data[0][0],
"bands": data[0][1],
"configurations": data[0][2],
}
)
else:
self.logger.error(
f"ERROR: aat-wrest query returned no results!"
f" The database appears to be missing information for sdm id {self.sdm_id} or spl id {self.spl}!"
)
finally:
self.conn.close()
return make_json
...@@ -52,6 +52,14 @@ def parser() -> argparse.ArgumentParser: ...@@ -52,6 +52,14 @@ def parser() -> argparse.ArgumentParser:
required=False, required=False,
help="Find display metadata for observations by SDM id", help="Find display metadata for observations by SDM id",
) )
arg_parser.add_argument(
"-aux",
"--auxiliary-ingestion",
nargs=1,
action="store",
required=False,
help="Find auxiliary imaging metadata for ingestion",
)
return arg_parser return arg_parser
...@@ -66,6 +74,15 @@ def determine_wrester(connection: MDDBConnector, args: argparse.Namespace): ...@@ -66,6 +74,15 @@ def determine_wrester(connection: MDDBConnector, args: argparse.Namespace):
data = WrestObservationMetadata( data = WrestObservationMetadata(
connection, sdm_id=args.observation[0] connection, sdm_id=args.observation[0]
).wrest_observation_info() ).wrest_observation_info()
elif args.auxiliary_ingestion:
if str(args.auxiliary_ingestion[0]).startswith("uid"):
data = WrestWorkflowMetadata(
connection, spl=args.auxiliary_ingestion[0]
).wrest_aux_imaging_info()
else:
data = WrestWorkflowMetadata(
connection, sdm_id=args.auxiliary_ingestion[0]
).wrest_aux_imaging_info()
else: else:
data = None data = None
......
...@@ -22,6 +22,7 @@ ...@@ -22,6 +22,7 @@
"@ng-bootstrap/ng-bootstrap": "^4.2.1", "@ng-bootstrap/ng-bootstrap": "^4.2.1",
"bootstrap": "^4.3.1", "bootstrap": "^4.3.1",
"core-js": "^2.5.4", "core-js": "^2.5.4",
"dayjs": "^1.10.6",
"jquery": "^3.4.1", "jquery": "^3.4.1",
"ng-angular": "0.0.1", "ng-angular": "0.0.1",
"ng-bootstrap": "^0.46.0", "ng-bootstrap": "^0.46.0",
...@@ -37,7 +38,7 @@ ...@@ -37,7 +38,7 @@
"@angular/language-service": "~11.1.1", "@angular/language-service": "~11.1.1",
"@types/jasmine": "~2.8.8", "@types/jasmine": "~2.8.8",
"@types/jasminewd2": "~2.0.3", "@types/jasminewd2": "~2.0.3",
"@types/node": "~8.9.4", "@types/node": "^8.10.66",
"@typescript-eslint/eslint-plugin": "^4.21.0", "@typescript-eslint/eslint-plugin": "^4.21.0",
"@typescript-eslint/parser": "^4.21.0", "@typescript-eslint/parser": "^4.21.0",
"codelyzer": "^5.0.1", "codelyzer": "^5.0.1",
......
...@@ -16,16 +16,18 @@ ...@@ -16,16 +16,18 @@
</h5> </h5>
</div> </div>
<div id="created-and-updated-timestamps" class="row px-5"> <div id="created-and-updated-timestamps" class="row px-5 justify-content-between">
<div class="col text-left p-2"> <div class="col-4 text-left p-2">
<h5 id="created-time" class="timestamp"> <ng-template class="timestamp" #createdAtToolTip>{{capabilityRequest.created_at | date: "medium"}}</ng-template>
Created <strong>{{ capabilityRequest.created_at | date: "medium" }}</strong> <span id="created-time" class="timestamp" placement="top" [ngbTooltip]="createdAtToolTip">
</h5> Created <strong>{{ createdAt }}</strong>
</span>
</div> </div>
<div class="col text-right p-2"> <div class="col-4 text-right p-2">
<h5 id="last-updated-time" class="timestamp"> <ng-template class="timestamp" #updatedAtToolTip>{{capabilityRequest.updated_at | date: "medium"}}</ng-template>
Last Updated <strong>{{ capabilityRequest.updated_at | date: "medium" }}</strong> <span id="last-updated-time" class="timestamp" placement="top" [ngbTooltip]="updatedAtToolTip">
</h5> {{ capabilityRequest.state.toUpperCase() === 'COMPLETE' ? 'Completed' : 'Last Updated' }} <strong>{{ updatedAt }}</strong>
</span>
</div> </div>
</div> </div>
</div> </div>
import { Component, Input, OnInit } from "@angular/core"; import { Component, Input, OnInit } from "@angular/core";
import { CapabilityRequest } from "../../../../model/capability-request"; import { CapabilityRequest } from "../../../../model/capability-request";
import * as dayjs from 'dayjs';
import * as relativeTime from 'dayjs/plugin/relativeTime';
import * as localizedFormat from 'dayjs/plugin/localizedFormat';
@Component({ @Component({
selector: "app-request-header", selector: "app-request-header",
templateUrl: "./request-header.component.html", templateUrl: "./request-header.component.html",
...@@ -8,8 +10,28 @@ import { CapabilityRequest } from "../../../../model/capability-request"; ...@@ -8,8 +10,28 @@ import { CapabilityRequest } from "../../../../model/capability-request";
}) })
export class RequestHeaderComponent implements OnInit { export class RequestHeaderComponent implements OnInit {
@Input() capabilityRequest: CapabilityRequest; @Input() capabilityRequest: CapabilityRequest;
createdAt;
updatedAt;
constructor() {} constructor() {}
ngOnInit(): void {} ngOnInit(): void {
this.createdAt = this.formatDate(this.capabilityRequest.created_at)
this.updatedAt = this.formatDate(this.capabilityRequest.updated_at)
}
private formatDate(date): string {
dayjs.extend(relativeTime)
dayjs.extend(localizedFormat)
var dateFromNow = dayjs(date).fromNow()
var afterOneWeekDisplay = dateFromNow + " on " + dayjs(date).format('llll');
return this.isOlderThanOneWeek(date) ? afterOneWeekDisplay : dateFromNow;
}
private isOlderThanOneWeek(date): boolean {
const now = dayjs();
date = dayjs(date)
return now.diff(date, "week") ? true : false
}
} }
import { NgModule } from "@angular/core"; import { NgModule } from "@angular/core";
import { CommonModule } from "@angular/common"; import { CommonModule } from "@angular/common";
import { NgbModule } from "@ng-bootstrap/ng-bootstrap";
import { WorkspacesRoutingModule } from "./workspaces-routing.module"; import { WorkspacesRoutingModule } from "./workspaces-routing.module";
import { WorkspacesComponent } from "./workspaces.component"; import { WorkspacesComponent } from "./workspaces.component";
import { FormsModule, ReactiveFormsModule } from "@angular/forms"; import { FormsModule, ReactiveFormsModule } from "@angular/forms";
...@@ -28,6 +29,6 @@ import { RequestOperationsComponent } from "./components/capability-request/comp ...@@ -28,6 +29,6 @@ import { RequestOperationsComponent } from "./components/capability-request/comp
FilesComponent, FilesComponent,
RequestOperationsComponent, RequestOperationsComponent,
], ],
imports: [CommonModule, WorkspacesRoutingModule, ReactiveFormsModule, FormsModule], imports: [CommonModule, NgbModule, WorkspacesRoutingModule, ReactiveFormsModule, FormsModule],
}) })
export class WorkspacesModule {} export class WorkspacesModule {}
...@@ -43,6 +43,7 @@ services: ...@@ -43,6 +43,7 @@ services:
- ./delivery_root:/tmp/delivery_root - ./delivery_root:/tmp/delivery_root
- ./lustre/aoc/cluster/pipeline/docker/workspaces:/lustre/aoc/cluster/pipeline/docker/workspaces - ./lustre/aoc/cluster/pipeline/docker/workspaces:/lustre/aoc/cluster/pipeline/docker/workspaces
# To enable NGAS container, run: `docker compose -f docker-compose.local.yml --profile ngas up`
ngas: ngas:
build: build:
context: . context: .
...@@ -53,6 +54,8 @@ services: ...@@ -53,6 +54,8 @@ services:
- "7777:7777" - "7777:7777"
volumes: volumes:
- ./local_ngas_root:/home/ngas/NGAS - ./local_ngas_root:/home/ngas/NGAS
profiles:
- ngas
schema: schema:
build: build:
...@@ -65,17 +68,19 @@ services: ...@@ -65,17 +68,19 @@ services:
- ./docker.properties:/home/ssa/capo/docker.properties - ./docker.properties:/home/ssa/capo/docker.properties
# Used for doing a DB migration on dsoc-dev # Used for doing a DB migration on dsoc-dev
# Uncomment this and run the command `docker compose -f docker-compose.local.yml up schema-dev` and the migration will run # Run the command `docker compose -f docker-compose.local.yml --profile schema-dev up schema-dev` and the migration will run
# schema-dev: schema-dev:
# build: build:
# context: ./schema/ context: ./schema/
# dockerfile: Dockerfile.local dockerfile: Dockerfile.local
# command: ["./bin/run-migrations.sh", "dsoc-dev"] command: ["./bin/run-migrations.sh", "dsoc-dev"]
# depends_on: depends_on:
# - db - db
# volumes: volumes:
# - ./schema:/code/schema - ./schema:/code/schema
# - ~/.capo:/home/casa/capo - ~/.capo:/home/casa/capo
profiles:
- schema-dev
workflow: workflow:
build: build:
......
"""add image ingestion templates
Revision ID: f0f6d7be45e3
Revises: dcbfdfafe16c
Create Date: 2021-07-28 14:24:40.213660
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "f0f6d7be45e3"
down_revision = "dcbfdfafe16c"
branch_labels = None
depends_on = None
content = """{
"project_code": "{{projectCode}}",
"band_code": "{{bands}}",
"configurations": "{{configurations}}",
"starttime": null,
"endtime": null,
"exposure_time": null,
"rms_noise": null,
"image_tags": "",
"product_tags": "",
"collection_name": "",
"calibration_level": 2
}
"""
metadata_content = """{
"fileSetIds": "{{sdmId}}",
"workflowName": "std_cms_imaging",
"systemId": "{{request_id}}",
"creationTime": "{{created_at}}",
"productLocator": "{{product_locator}}",
"calProductLocator": "{{cal_locator}}",
"projectMetadata": {
"projectCode": "{{projectCode}}",
"title": "{{title}}",
"telescope": "{{telescope}}",
"startTime": "{{startTime}}",
"observer": "{{observer}}"
},
"destinationDirectory": "{{root_directory}}/{{relative_path}}",
"calibrationSourceDirectory":"{{cms_path}}",
"cmsName":"{{sdmId}}.ms"
}
"""
condor_content = """executable = ingest_image.sh
arguments = metadata.json
output = ingest.out
error = ingest.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
should_transfer_files = yes
transfer_input_files = $(SBIN_PATH)/pycapo, $(SBIN_PATH)/conveyor, $(SBIN_PATH)/ingest_envoy, $(SBIN_PATH)/ingest, $(SBIN_PATH)/image-product-collector.sh, ./metadata.json, ./aux_image_metadata.json
getenv = True
environment = "CAPO_PATH=/home/casa/capo"
queue
"""
old_metadata = """{
"fileSetIds": "{{sdmId}}",
"workflowName": "std_cms_imaging",
"systemId": "{{request_id}}",
"creationTime": "{{created_at}}",
"productLocator": "{{product_locator}}",
"projectMetadata": {
"projectCode": "{{projectCode}}",
"title": "{{title}}",
"telescope": "{{telescope}}",
"startTime": "{{startTime}}",
"observer": "{{observer}}"
},
"destinationDirectory": "{{root_directory}}/{{relative_path}}",
"calibrationSourceDirectory":"{{cms_path}}",
"cmsName":"{{sdmId}}.ms"
}
"""
old_condor = """executable = ingest_image.sh
arguments = metadata.json
output = ingest.out
error = ingest.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
should_transfer_files = yes
transfer_input_files = $(SBIN_PATH)/pycapo, $(SBIN_PATH)/conveyor, $(SBIN_PATH)/ingest_envoy, $(SBIN_PATH)/ingest, $(SBIN_PATH)/image-product-collector.sh, ./metadata.json
getenv = True
environment = "CAPO_PATH=/home/casa/capo"
queue
"""
def upgrade():
op.execute(
f"""
INSERT INTO workflow_templates (filename, content, workflow_name)
VALUES ('aux_image_metadata.json', E'{content}', 'ingest_image')
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{metadata_content}' WHERE filename='metadata.json' AND workflow_name = 'std_cms_imaging'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{condor_content}' WHERE filename='ingest_image.condor'
"""
)
def downgrade():
op.execute(
"""
DELETE FROM workflow_templates WHERE filename='aux_image_metadata.json' AND workflow_name='ingest_image'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{old_metadata}' WHERE filename='metadata.json' AND workflow_name = 'std_cms_imaging'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{old_condor}' WHERE filename='ingest_image.condor'
"""
)
...@@ -2,6 +2,10 @@ ...@@ -2,6 +2,10 @@
ARG TAGNAME="tmp" ARG TAGNAME="tmp"
FROM cache:${TAGNAME} FROM cache:${TAGNAME}
# Build arg that sets environment; sets to "dev" if no build arg is given
ARG env=dev
ENV ENV=${env}
# Change working directory to /code # Change working directory to /code
WORKDIR /code WORKDIR /code
...@@ -24,4 +28,4 @@ RUN python setup.py develop --user ...@@ -24,4 +28,4 @@ RUN python setup.py develop --user
# Gets reset to proper environment's profile in the deploy stage # Gets reset to proper environment's profile in the deploy stage
ENV CAPO_PROFILE docker ENV CAPO_PROFILE docker
CMD ["pserve", "--reload", "development.ini"] CMD ["pserve", "--reload", "${ENV}.ini"]
...@@ -14,4 +14,4 @@ RUN python setup.py develop --user ...@@ -14,4 +14,4 @@ RUN python setup.py develop --user
ARG capo_env=docker ARG capo_env=docker
ENV CAPO_PROFILE $capo_env ENV CAPO_PROFILE $capo_env
CMD ["pserve", "--reload", "development.ini"] CMD ["pserve", "--reload", "dev.ini"]
...@@ -8,10 +8,9 @@ Then run: ...@@ -8,10 +8,9 @@ Then run:
Then you should be able to run this with: Then you should be able to run this with:
env CAPO_PROFILE=local pserve --reload development.ini env CAPO_PROFILE=local pserve --reload dev.ini
Once there, try the following URLs: Once there, try the following URLs:
- http://0.0.0.0:3456/capabilities - http://0.0.0.0:3456/capabilities
- http://0.0.0.0:3456/capability/request?capability=null - http://0.0.0.0:3456/capability/request?capability=null
#! /bin/bash
# Set failfast
set -e
set -o pipefail
# FOR USE WITH DOCKER DEVELOPMENT ENVIRONMENT ONLY.
# Capability Service
# Python library installation
pip install -r requirements.txt
python setup.py develop
# Start development server
pserve --reload development.ini
...@@ -19,4 +19,41 @@ pyramid.includes = pyramid_tm ...@@ -19,4 +19,41 @@ pyramid.includes = pyramid_tm
[server:main] [server:main]
use = egg:waitress#main use = egg:waitress#main
listen = 0.0.0.0:6543 listen = 0.0.0.0:3457
###
# logging configuration
# https://docs.pylonsproject.org/projects/pyramid/en/latest/narr/logging.html
###
[loggers]
keys = root, capability, sqlalchemy
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
[logger_capability]
level = WARN
handlers =
qualname = capability
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(asctime)s %(levelname)-5.5s [%(name)s:%(lineno)s][%(threadName)s] %(message)s
...@@ -2,6 +2,10 @@ ...@@ -2,6 +2,10 @@
ARG TAGNAME="tmp" ARG TAGNAME="tmp"
FROM cache:${TAGNAME} FROM cache:${TAGNAME}
# Build arg that sets environment; sets to "dev" if no build arg is given
ARG env=dev
ENV ENV=${env}
# Change working directory to /code # Change working directory to /code
WORKDIR /code WORKDIR /code
...@@ -24,4 +28,4 @@ RUN python setup.py develop --user ...@@ -24,4 +28,4 @@ RUN python setup.py develop --user
# Gets reset to proper environment's profile in the deploy stage # Gets reset to proper environment's profile in the deploy stage
ENV CAPO_PROFILE docker ENV CAPO_PROFILE docker
CMD ["pserve", "--reload", "development.ini"] CMD ["pserve", "--reload", "${ENV}.ini"]
...@@ -14,4 +14,4 @@ RUN python setup.py develop --user ...@@ -14,4 +14,4 @@ RUN python setup.py develop --user
ARG capo_env=docker ARG capo_env=docker
ENV CAPO_PROFILE $capo_env ENV CAPO_PROFILE $capo_env
CMD ["pserve", "--reload", "development.ini"] CMD ["pserve", "--reload", "dev.ini"]
#! /bin/bash
# Set failfast
set -e
set -o pipefail
# FOR USE WITH DOCKER DEVELOPMENT ENVIRONMENT ONLY.
# Workflow Service
# Python library installation
pip install -r requirements.txt
python setup.py develop
# Start development server
pserve --reload development.ini
###
# app configuration
# http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/environment.html
###
[app:main]
use = egg:ssa-notification
session.cookie_expires = true
session.auto = true
pyramid.reload_templates = false
pyramid.debug_authorization = false
pyramid.debug_notfound = false
pyramid.debug_routematch = false
pyramid.default_locale_name = en
pyramid.includes = pyramid_tm
[server:main]
use = egg:waitress#main
listen = 0.0.0.0:3458
###
# logging configuration
# https://docs.pylonsproject.org/projects/pyramid/en/latest/narr/logging.html
###
[loggers]
keys = root, notifcation, sqlalchemy
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
[logger_notification]
level = WARN
handlers =
qualname = notification
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(asctime)s %(levelname)-5.5s [%(name)s:%(lineno)s][%(threadName)s] %(message)s
...@@ -42,4 +42,4 @@ WORKDIR /packages/apps/cli/executables/productfetcher/tests ...@@ -42,4 +42,4 @@ WORKDIR /packages/apps/cli/executables/productfetcher/tests
RUN pytest RUN pytest
# RUN python setup.py develop # RUN python setup.py develop
CMD ["pserve", "--reload", "/code/development.ini"] CMD ["pserve", "--reload", "/code/dev.ini"]