Skip to content
Snippets Groups Projects
Commit 8d2ab3d7 authored by Charlotte Hausman's avatar Charlotte Hausman
Browse files

WS-645: Move images out of subdirectories

parent 7e5424da
No related branches found
No related tags found
1 merge request!498WS-645: Move images out of subdirectories
Pipeline #2862 passed
...@@ -40,13 +40,7 @@ class RedisConnect: ...@@ -40,13 +40,7 @@ class RedisConnect:
proxy = self.settings["reverse_proxy"] proxy = self.settings["reverse_proxy"]
carta_url = f"https://{proxy}/{front_end_id}/?socketUrl=wss://{proxy}/{back_end_id}/" carta_url = f"https://{proxy}/{front_end_id}/?socketUrl=wss://{proxy}/{back_end_id}/"
if self.settings["single_image"]: if self.settings["single_image"]:
carta_url = ( carta_url = carta_url + "&file=" + self.settings["image_name"]
carta_url
+ "&file="
+ self.settings["image_name"]
+ "/"
+ self.settings["image_name"]
)
self.logger.info(f"Carta URL: {carta_url}") self.logger.info(f"Carta URL: {carta_url}")
return carta_url return carta_url
...@@ -168,24 +162,24 @@ class RedisConnect: ...@@ -168,24 +162,24 @@ class RedisConnect:
f"traefik/http/routers/{carta_wrapper}/middlewares/0": "stripPrefixFE@file", f"traefik/http/routers/{carta_wrapper}/middlewares/0": "stripPrefixFE@file",
} }
unique_values = self.check_for_duplicate_values(values, front_end_port, back_end_port) unique_values = self.check_for_duplicate_values(values, front_end_port, back_end_port, wrapper_port)
self.redis_values = unique_values self.redis_values = unique_values
return unique_values return unique_values
def check_for_duplicate_values(self, redis_values: dict, front_port: int, back_port: int): def check_for_duplicate_values(self, redis_values: dict, front_port: int, back_port: int, wrapper_port: int):
self.logger.info("Checking for duplicate values on server...") self.logger.info("Checking for duplicate values on server...")
for key in redis_values: for key in redis_values:
if self.conn.get(key): if self.conn.get(key):
self.logger.warning( self.logger.warning("WARNING: Redis value collision found. Generating new random IDs.")
"WARNING: Redis value collision found. Generating new random IDs."
)
self.generated_ids = self.generate_ids() self.generated_ids = self.generate_ids()
new_values = self.get_redis_values( new_values = self.get_redis_values(
self.settings["reverse_proxy"], self.settings["reverse_proxy"],
self.generated_ids["front_end_id"], self.generated_ids["front_end_id"],
self.generated_ids["back_end_id"], self.generated_ids["back_end_id"],
self.generated_ids["wrapper_id"],
front_port, front_port,
back_port, back_port,
wrapper_port,
) )
return new_values return new_values
...@@ -224,7 +218,9 @@ class ArchiveConnect: ...@@ -224,7 +218,9 @@ class ArchiveConnect:
:param url: URL generated to allow user access to this running CARTA instance :param url: URL generated to allow user access to this running CARTA instance
""" """
send_archive_msg_url = f"{self.settings['workflow_url']}/workflows/carta/requests/{self.settings['wf_request_id']}/send-url-to-aat" send_archive_msg_url = (
f"{self.settings['workflow_url']}/workflows/carta/requests/{self.settings['wf_request_id']}/send-url-to-aat"
)
payload = {"carta_url": url} payload = {"carta_url": url}
self.logger.info("Sending REST call to workflow service for AAT messaging.") self.logger.info("Sending REST call to workflow service for AAT messaging.")
requests.post(send_archive_msg_url, json=payload) requests.post(send_archive_msg_url, json=payload)
......
...@@ -108,13 +108,13 @@ class TestRedisConnect: ...@@ -108,13 +108,13 @@ class TestRedisConnect:
:param redis: Custom fixture that provides a sandbox Redis server :param redis: Custom fixture that provides a sandbox Redis server
""" """
redis_connect.conn = redis redis_connect.conn = redis
redis_connect.generated_ids = {"front_end_id": "1234abcd", "back_end_id": "5678efgh"} redis_connect.generated_ids = {"front_end_id": "1234abcd", "back_end_id": "5678efgh", "wrapper_id": "9876jklm"}
redis.set("duplicate", "value") redis.set("duplicate", "value")
with patch("carta_envoy.connect.RedisConnect.get_redis_values") as values: with patch("carta_envoy.connect.RedisConnect.get_redis_values") as values:
redis_connect.check_for_duplicate_values({"duplicate": "value"}, 9897, 6543) redis_connect.check_for_duplicate_values({"duplicate": "value"}, 9897, 6543, 1234)
assert values.call_count == 1 assert values.call_count == 1
values.call_count = 0 values.call_count = 0
redis_connect.check_for_duplicate_values({"not_duplicate": "value"}, 9897, 6543) redis_connect.check_for_duplicate_values({"not_duplicate": "value"}, 9897, 6543, 1234)
assert values.call_count == 0 assert values.call_count == 0
......
"""move images out of subdirs
Revision ID: 42723a9dd85c
Revises: 01de4f42cf27
Create Date: 2021-09-13 13:52:28.908485
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "42723a9dd85c"
down_revision = "01de4f42cf27"
branch_labels = None
depends_on = None
script_content = """#!/bin/sh
set -o errexit
export HOME=$TMPDIR
{{#need_data}}
./framework.sh -r .
chmod 770 .
cd rawdata/
../productfetcher{{product_locator}} {{^single_image}}&{{/single_image}}
image_subdirs=$(ls)
for i in $image_subdirs; do
if [[ -d "./$i" ]]; then
mv "./$i"/* .
rm -rf "./$i"
fi
done
.{{/need_data}}./carta_envoy {{#need_data}}--parallel{{/need_data}} {{#single_image}}--single -i {{imageName}}{{/single_image}} -d $1 -n $2 -wf $3 -r $4
{{#need_data}}wait{{/need_data}}
"""
old_content = """#!/bin/sh
set -o errexit
export HOME=$TMPDIR
{{#need_data}}
./framework.sh -r .
chmod 770 .
cd rawdata/
../productfetcher{{product_locator}} {{^single_image}}&{{/single_image}}
.{{/need_data}}./carta_envoy {{#need_data}}--parallel{{/need_data}} {{#single_image}}--single -i {{imageName}}{{/single_image}} -d $1 -n $2 -wf $3 -r $4
{{#need_data}}wait{{/need_data}}
"""
def upgrade():
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{script_content}' where filename='carta.sh'
"""
)
def downgrade():
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{old_content}' where filename='carta.sh'
"""
)
...@@ -587,7 +587,7 @@ class WorkflowMessageHandler: ...@@ -587,7 +587,7 @@ class WorkflowMessageHandler:
wf_id = subject["workflow_request_id"] wf_id = subject["workflow_request_id"]
wf_request = self.info.lookup_workflow_request(wf_id) wf_request = self.info.lookup_workflow_request(wf_id)
if wf_request.workflow_name == ArchiveWorkflows.CARTA.value and wf_request.argument["need_data"] is True: if wf_request.workflow_name == ArchiveWorkflows.CARTA.value and wf_request.argument["notify_ready"] is False:
logger.info(f"SENDING FAILED CARTA MESSAGE to AAT Request Handler for request #{wf_id}!") logger.info(f"SENDING FAILED CARTA MESSAGE to AAT Request Handler for request #{wf_id}!")
routing_key = f"ws-workflow.carta-instance-ready.{wf_id}" routing_key = f"ws-workflow.carta-instance-ready.{wf_id}"
carta_url_msg = ArchiveMessageArchitect(routing_key=routing_key, request=wf_request).compose_message( carta_url_msg = ArchiveMessageArchitect(routing_key=routing_key, request=wf_request).compose_message(
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment