Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • ssa/workspaces
1 result
Show changes
Showing
with 405 additions and 63 deletions
......@@ -81,15 +81,17 @@ class WrestWorkflowMetadata:
"""
sql = f"""
SELECT science_product_locator as spl,
SELECT e.science_product_locator as spl,
e.project_code as projectCode,
p.title as title,
e.starttime as startTime,
(a.firstname || ' ' || a.lastname) as observer,
telescope as telescope
telescope as telescope,
c.science_product_locator as cal_locator
FROM execution_blocks e
JOIN projects p on e.project_code = p.project_code
JOIN authors a on p.project_code = a.project_code
JOIN calibrations c on e.execution_block_id = c.execution_block_id
WHERE ngas_fileset_id = %(sdmId)s AND a.is_pi = true
"""
make_json = {}
......@@ -106,6 +108,7 @@ class WrestWorkflowMetadata:
"startTime": data[0][3],
"observer": data[0][4],
"telescope": data[0][5],
"cal_locator": data[0][6],
"created_at": str(
pendulum.now().in_timezone(TIME_ZONE).format(PENDULUM_FORMAT)
),
......@@ -174,3 +177,45 @@ class WrestWorkflowMetadata:
finally:
self.conn.close()
return make_json
def wrest_aux_imaging_info(self) -> json:
"""
Given a parent workflow request id for an image ingestion, returns the required metadata to run
the image ingestion
:return:
"""
sql = """
SELECT project_code as projectCode,
band_code as bandCodes,
configuration
FROM execution_blocks e
"""
condition = (
f"WHERE science_product_locator = '{self.spl}'"
if self.spl is not None
else f"WHERE ngas_fileset_id = '{self.sdm_id}'"
)
make_json = {}
try:
cursor = self.conn.cursor()
cursor.execute(sql + condition)
data = cursor.fetchall()
if data:
make_json = json.dumps(
{
"projectCode": data[0][0],
"bands": data[0][1],
"configurations": data[0][2],
}
)
else:
self.logger.error(
f"ERROR: aat-wrest query returned no results!"
f" The database appears to be missing information for sdm id {self.sdm_id} or spl id {self.spl}!"
)
finally:
self.conn.close()
return make_json
......@@ -52,6 +52,14 @@ def parser() -> argparse.ArgumentParser:
required=False,
help="Find display metadata for observations by SDM id",
)
arg_parser.add_argument(
"-aux",
"--auxiliary-ingestion",
nargs=1,
action="store",
required=False,
help="Find auxiliary imaging metadata for ingestion",
)
return arg_parser
......@@ -66,6 +74,15 @@ def determine_wrester(connection: MDDBConnector, args: argparse.Namespace):
data = WrestObservationMetadata(
connection, sdm_id=args.observation[0]
).wrest_observation_info()
elif args.auxiliary_ingestion:
if str(args.auxiliary_ingestion[0]).startswith("uid"):
data = WrestWorkflowMetadata(
connection, spl=args.auxiliary_ingestion[0]
).wrest_aux_imaging_info()
else:
data = WrestWorkflowMetadata(
connection, sdm_id=args.auxiliary_ingestion[0]
).wrest_aux_imaging_info()
else:
data = None
......
......@@ -22,6 +22,7 @@
"@ng-bootstrap/ng-bootstrap": "^4.2.1",
"bootstrap": "^4.3.1",
"core-js": "^2.5.4",
"dayjs": "^1.10.6",
"jquery": "^3.4.1",
"ng-angular": "0.0.1",
"ng-bootstrap": "^0.46.0",
......@@ -37,7 +38,7 @@
"@angular/language-service": "~11.1.1",
"@types/jasmine": "~2.8.8",
"@types/jasminewd2": "~2.0.3",
"@types/node": "~8.9.4",
"@types/node": "^8.10.66",
"@typescript-eslint/eslint-plugin": "^4.21.0",
"@typescript-eslint/parser": "^4.21.0",
"codelyzer": "^5.0.1",
......
......@@ -16,16 +16,18 @@
</h5>
</div>
<div id="created-and-updated-timestamps" class="row px-5">
<div class="col text-left p-2">
<h5 id="created-time" class="timestamp">
Created <strong>{{ capabilityRequest.created_at | date: "medium" }}</strong>
</h5>
<div id="created-and-updated-timestamps" class="row px-5 justify-content-between">
<div class="col-4 text-left p-2">
<ng-template class="timestamp" #createdAtToolTip>{{capabilityRequest.created_at | date: "medium"}}</ng-template>
<span id="created-time" class="timestamp" placement="top" [ngbTooltip]="createdAtToolTip">
Created <strong>{{ createdAt }}</strong>
</span>
</div>
<div class="col text-right p-2">
<h5 id="last-updated-time" class="timestamp">
Last Updated <strong>{{ capabilityRequest.updated_at | date: "medium" }}</strong>
</h5>
<div class="col-4 text-right p-2">
<ng-template class="timestamp" #updatedAtToolTip>{{capabilityRequest.updated_at | date: "medium"}}</ng-template>
<span id="last-updated-time" class="timestamp" placement="top" [ngbTooltip]="updatedAtToolTip">
{{ capabilityRequest.state.toUpperCase() === 'COMPLETE' ? 'Completed' : 'Last Updated' }} <strong>{{ updatedAt }}</strong>
</span>
</div>
</div>
</div>
import { Component, Input, OnInit } from "@angular/core";
import { CapabilityRequest } from "../../../../model/capability-request";
import * as dayjs from 'dayjs';
import * as relativeTime from 'dayjs/plugin/relativeTime';
import * as localizedFormat from 'dayjs/plugin/localizedFormat';
@Component({
selector: "app-request-header",
templateUrl: "./request-header.component.html",
......@@ -8,8 +10,28 @@ import { CapabilityRequest } from "../../../../model/capability-request";
})
export class RequestHeaderComponent implements OnInit {
@Input() capabilityRequest: CapabilityRequest;
createdAt;
updatedAt;
constructor() {}
ngOnInit(): void {}
ngOnInit(): void {
this.createdAt = this.formatDate(this.capabilityRequest.created_at)
this.updatedAt = this.formatDate(this.capabilityRequest.updated_at)
}
private formatDate(date): string {
dayjs.extend(relativeTime)
dayjs.extend(localizedFormat)
var dateFromNow = dayjs(date).fromNow()
var afterOneWeekDisplay = dateFromNow + " on " + dayjs(date).format('llll');
return this.isOlderThanOneWeek(date) ? afterOneWeekDisplay : dateFromNow;
}
private isOlderThanOneWeek(date): boolean {
const now = dayjs();
date = dayjs(date)
return now.diff(date, "week") ? true : false
}
}
import { NgModule } from "@angular/core";
import { CommonModule } from "@angular/common";
import { NgbModule } from "@ng-bootstrap/ng-bootstrap";
import { WorkspacesRoutingModule } from "./workspaces-routing.module";
import { WorkspacesComponent } from "./workspaces.component";
import { FormsModule, ReactiveFormsModule } from "@angular/forms";
......@@ -28,6 +29,6 @@ import { RequestOperationsComponent } from "./components/capability-request/comp
FilesComponent,
RequestOperationsComponent,
],
imports: [CommonModule, WorkspacesRoutingModule, ReactiveFormsModule, FormsModule],
imports: [CommonModule, NgbModule, WorkspacesRoutingModule, ReactiveFormsModule, FormsModule],
})
export class WorkspacesModule {}
......@@ -43,6 +43,7 @@ services:
- ./delivery_root:/tmp/delivery_root
- ./lustre/aoc/cluster/pipeline/docker/workspaces:/lustre/aoc/cluster/pipeline/docker/workspaces
# To enable NGAS container, run: `docker compose -f docker-compose.local.yml --profile ngas up`
ngas:
build:
context: .
......@@ -53,6 +54,8 @@ services:
- "7777:7777"
volumes:
- ./local_ngas_root:/home/ngas/NGAS
profiles:
- ngas
schema:
build:
......@@ -65,17 +68,19 @@ services:
- ./docker.properties:/home/ssa/capo/docker.properties
# Used for doing a DB migration on dsoc-dev
# Uncomment this and run the command `docker compose -f docker-compose.local.yml up schema-dev` and the migration will run
# schema-dev:
# build:
# context: ./schema/
# dockerfile: Dockerfile.local
# command: ["./bin/run-migrations.sh", "dsoc-dev"]
# depends_on:
# - db
# volumes:
# - ./schema:/code/schema
# - ~/.capo:/home/casa/capo
# Run the command `docker compose -f docker-compose.local.yml --profile schema-dev up schema-dev` and the migration will run
schema-dev:
build:
context: ./schema/
dockerfile: Dockerfile.local
command: ["./bin/run-migrations.sh", "dsoc-dev"]
depends_on:
- db
volumes:
- ./schema:/code/schema
- ~/.capo:/home/casa/capo
profiles:
- schema-dev
workflow:
build:
......
"""add image ingestion templates
Revision ID: f0f6d7be45e3
Revises: dcbfdfafe16c
Create Date: 2021-07-28 14:24:40.213660
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "f0f6d7be45e3"
down_revision = "dcbfdfafe16c"
branch_labels = None
depends_on = None
content = """{
"project_code": "{{projectCode}}",
"band_code": "{{bands}}",
"configurations": "{{configurations}}",
"starttime": null,
"endtime": null,
"exposure_time": null,
"rms_noise": null,
"image_tags": "",
"product_tags": "",
"collection_name": "",
"calibration_level": 2
}
"""
metadata_content = """{
"fileSetIds": "{{sdmId}}",
"workflowName": "std_cms_imaging",
"systemId": "{{request_id}}",
"creationTime": "{{created_at}}",
"productLocator": "{{product_locator}}",
"calProductLocator": "{{cal_locator}}",
"projectMetadata": {
"projectCode": "{{projectCode}}",
"title": "{{title}}",
"telescope": "{{telescope}}",
"startTime": "{{startTime}}",
"observer": "{{observer}}"
},
"destinationDirectory": "{{root_directory}}/{{relative_path}}",
"calibrationSourceDirectory":"{{cms_path}}",
"cmsName":"{{sdmId}}.ms"
}
"""
condor_content = """executable = ingest_image.sh
arguments = metadata.json
output = ingest.out
error = ingest.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
should_transfer_files = yes
transfer_input_files = $(SBIN_PATH)/pycapo, $(SBIN_PATH)/conveyor, $(SBIN_PATH)/ingest_envoy, $(SBIN_PATH)/ingest, $(SBIN_PATH)/image-product-collector.sh, ./metadata.json, ./aux_image_metadata.json
getenv = True
environment = "CAPO_PATH=/home/casa/capo"
queue
"""
old_metadata = """{
"fileSetIds": "{{sdmId}}",
"workflowName": "std_cms_imaging",
"systemId": "{{request_id}}",
"creationTime": "{{created_at}}",
"productLocator": "{{product_locator}}",
"projectMetadata": {
"projectCode": "{{projectCode}}",
"title": "{{title}}",
"telescope": "{{telescope}}",
"startTime": "{{startTime}}",
"observer": "{{observer}}"
},
"destinationDirectory": "{{root_directory}}/{{relative_path}}",
"calibrationSourceDirectory":"{{cms_path}}",
"cmsName":"{{sdmId}}.ms"
}
"""
old_condor = """executable = ingest_image.sh
arguments = metadata.json
output = ingest.out
error = ingest.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
should_transfer_files = yes
transfer_input_files = $(SBIN_PATH)/pycapo, $(SBIN_PATH)/conveyor, $(SBIN_PATH)/ingest_envoy, $(SBIN_PATH)/ingest, $(SBIN_PATH)/image-product-collector.sh, ./metadata.json
getenv = True
environment = "CAPO_PATH=/home/casa/capo"
queue
"""
def upgrade():
op.execute(
f"""
INSERT INTO workflow_templates (filename, content, workflow_name)
VALUES ('aux_image_metadata.json', E'{content}', 'ingest_image')
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{metadata_content}' WHERE filename='metadata.json' AND workflow_name = 'std_cms_imaging'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{condor_content}' WHERE filename='ingest_image.condor'
"""
)
def downgrade():
op.execute(
"""
DELETE FROM workflow_templates WHERE filename='aux_image_metadata.json' AND workflow_name='ingest_image'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{old_metadata}' WHERE filename='metadata.json' AND workflow_name = 'std_cms_imaging'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{old_condor}' WHERE filename='ingest_image.condor'
"""
)
......@@ -2,6 +2,10 @@
ARG TAGNAME="tmp"
FROM cache:${TAGNAME}
# Build arg that sets environment; sets to "dev" if no build arg is given
ARG env=dev
ENV ENV=${env}
# Change working directory to /code
WORKDIR /code
......@@ -24,4 +28,4 @@ RUN python setup.py develop --user
# Gets reset to proper environment's profile in the deploy stage
ENV CAPO_PROFILE docker
CMD ["pserve", "--reload", "development.ini"]
CMD ["pserve", "--reload", "${ENV}.ini"]
......@@ -14,4 +14,4 @@ RUN python setup.py develop --user
ARG capo_env=docker
ENV CAPO_PROFILE $capo_env
CMD ["pserve", "--reload", "development.ini"]
CMD ["pserve", "--reload", "dev.ini"]
......@@ -8,10 +8,9 @@ Then run:
Then you should be able to run this with:
env CAPO_PROFILE=local pserve --reload development.ini
env CAPO_PROFILE=local pserve --reload dev.ini
Once there, try the following URLs:
- http://0.0.0.0:3456/capabilities
- http://0.0.0.0:3456/capability/request?capability=null
#! /bin/bash
# Set failfast
set -e
set -o pipefail
# FOR USE WITH DOCKER DEVELOPMENT ENVIRONMENT ONLY.
# Capability Service
# Python library installation
pip install -r requirements.txt
python setup.py develop
# Start development server
pserve --reload development.ini
......@@ -19,4 +19,41 @@ pyramid.includes = pyramid_tm
[server:main]
use = egg:waitress#main
listen = 0.0.0.0:6543
listen = 0.0.0.0:3457
###
# logging configuration
# https://docs.pylonsproject.org/projects/pyramid/en/latest/narr/logging.html
###
[loggers]
keys = root, capability, sqlalchemy
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
[logger_capability]
level = WARN
handlers =
qualname = capability
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(asctime)s %(levelname)-5.5s [%(name)s:%(lineno)s][%(threadName)s] %(message)s
......@@ -2,6 +2,10 @@
ARG TAGNAME="tmp"
FROM cache:${TAGNAME}
# Build arg that sets environment; sets to "dev" if no build arg is given
ARG env=dev
ENV ENV=${env}
# Change working directory to /code
WORKDIR /code
......@@ -24,4 +28,4 @@ RUN python setup.py develop --user
# Gets reset to proper environment's profile in the deploy stage
ENV CAPO_PROFILE docker
CMD ["pserve", "--reload", "development.ini"]
CMD ["pserve", "--reload", "${ENV}.ini"]
......@@ -14,4 +14,4 @@ RUN python setup.py develop --user
ARG capo_env=docker
ENV CAPO_PROFILE $capo_env
CMD ["pserve", "--reload", "development.ini"]
CMD ["pserve", "--reload", "dev.ini"]
#! /bin/bash
# Set failfast
set -e
set -o pipefail
# FOR USE WITH DOCKER DEVELOPMENT ENVIRONMENT ONLY.
# Workflow Service
# Python library installation
pip install -r requirements.txt
python setup.py develop
# Start development server
pserve --reload development.ini
###
# app configuration
# http://docs.pylonsproject.org/projects/pyramid/en/latest/narr/environment.html
###
[app:main]
use = egg:ssa-notification
session.cookie_expires = true
session.auto = true
pyramid.reload_templates = false
pyramid.debug_authorization = false
pyramid.debug_notfound = false
pyramid.debug_routematch = false
pyramid.default_locale_name = en
pyramid.includes = pyramid_tm
[server:main]
use = egg:waitress#main
listen = 0.0.0.0:3458
###
# logging configuration
# https://docs.pylonsproject.org/projects/pyramid/en/latest/narr/logging.html
###
[loggers]
keys = root, notifcation, sqlalchemy
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
[logger_notification]
level = WARN
handlers =
qualname = notification
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(asctime)s %(levelname)-5.5s [%(name)s:%(lineno)s][%(threadName)s] %(message)s
......@@ -42,4 +42,4 @@ WORKDIR /packages/apps/cli/executables/productfetcher/tests
RUN pytest
# RUN python setup.py develop
CMD ["pserve", "--reload", "/code/development.ini"]
CMD ["pserve", "--reload", "/code/dev.ini"]