diff --git a/apps/cli/executables/pexable/carta_envoy/carta_envoy/connect.py b/apps/cli/executables/pexable/carta_envoy/carta_envoy/connect.py
index 69701ab2ba6da0d6d7563bcd3203c1e5f563adfe..21c3d15ab96a439f55b28657e5d90913d25f3448 100644
--- a/apps/cli/executables/pexable/carta_envoy/carta_envoy/connect.py
+++ b/apps/cli/executables/pexable/carta_envoy/carta_envoy/connect.py
@@ -40,13 +40,7 @@ class RedisConnect:
         proxy = self.settings["reverse_proxy"]
         carta_url = f"https://{proxy}/{front_end_id}/?socketUrl=wss://{proxy}/{back_end_id}/"
         if self.settings["single_image"]:
-            carta_url = (
-                carta_url
-                + "&file="
-                + self.settings["image_name"]
-                + "/"
-                + self.settings["image_name"]
-            )
+            carta_url = carta_url + "&file=" + self.settings["image_name"]
 
         self.logger.info(f"Carta URL: {carta_url}")
         return carta_url
@@ -168,24 +162,24 @@ class RedisConnect:
             f"traefik/http/routers/{carta_wrapper}/middlewares/0": "stripPrefixFE@file",
         }
 
-        unique_values = self.check_for_duplicate_values(values, front_end_port, back_end_port)
+        unique_values = self.check_for_duplicate_values(values, front_end_port, back_end_port, wrapper_port)
         self.redis_values = unique_values
         return unique_values
 
-    def check_for_duplicate_values(self, redis_values: dict, front_port: int, back_port: int):
+    def check_for_duplicate_values(self, redis_values: dict, front_port: int, back_port: int, wrapper_port: int):
         self.logger.info("Checking for duplicate values on server...")
         for key in redis_values:
             if self.conn.get(key):
-                self.logger.warning(
-                    "WARNING: Redis value collision found. Generating new random IDs."
-                )
+                self.logger.warning("WARNING: Redis value collision found. Generating new random IDs.")
                 self.generated_ids = self.generate_ids()
                 new_values = self.get_redis_values(
                     self.settings["reverse_proxy"],
                     self.generated_ids["front_end_id"],
                     self.generated_ids["back_end_id"],
+                    self.generated_ids["wrapper_id"],
                     front_port,
                     back_port,
+                    wrapper_port,
                 )
                 return new_values
 
@@ -224,7 +218,9 @@ class ArchiveConnect:
 
         :param url: URL generated to allow user access to this running CARTA instance
         """
-        send_archive_msg_url = f"{self.settings['workflow_url']}/workflows/carta/requests/{self.settings['wf_request_id']}/send-url-to-aat"
+        send_archive_msg_url = (
+            f"{self.settings['workflow_url']}/workflows/carta/requests/{self.settings['wf_request_id']}/send-url-to-aat"
+        )
         payload = {"carta_url": url}
         self.logger.info("Sending REST call to workflow service for AAT messaging.")
         requests.post(send_archive_msg_url, json=payload)
diff --git a/apps/cli/executables/pexable/carta_envoy/test/test_connect.py b/apps/cli/executables/pexable/carta_envoy/test/test_connect.py
index c34209beb8f4d831ac0aa80a6a9bf03391d974bc..8971b4aa5d1b6be7d308a0a2c470060b3736a726 100644
--- a/apps/cli/executables/pexable/carta_envoy/test/test_connect.py
+++ b/apps/cli/executables/pexable/carta_envoy/test/test_connect.py
@@ -108,13 +108,13 @@ class TestRedisConnect:
         :param redis: Custom fixture that provides a sandbox Redis server
         """
         redis_connect.conn = redis
-        redis_connect.generated_ids = {"front_end_id": "1234abcd", "back_end_id": "5678efgh"}
+        redis_connect.generated_ids = {"front_end_id": "1234abcd", "back_end_id": "5678efgh", "wrapper_id": "9876jklm"}
         redis.set("duplicate", "value")
         with patch("carta_envoy.connect.RedisConnect.get_redis_values") as values:
-            redis_connect.check_for_duplicate_values({"duplicate": "value"}, 9897, 6543)
+            redis_connect.check_for_duplicate_values({"duplicate": "value"}, 9897, 6543, 1234)
             assert values.call_count == 1
             values.call_count = 0
-            redis_connect.check_for_duplicate_values({"not_duplicate": "value"}, 9897, 6543)
+            redis_connect.check_for_duplicate_values({"not_duplicate": "value"}, 9897, 6543, 1234)
             assert values.call_count == 0
 
 
diff --git a/schema/versions/42723a9dd85c_move_images_out_of_subdirs.py b/schema/versions/42723a9dd85c_move_images_out_of_subdirs.py
new file mode 100644
index 0000000000000000000000000000000000000000..bf60903264f663be6b1a122514fe80390dd88887
--- /dev/null
+++ b/schema/versions/42723a9dd85c_move_images_out_of_subdirs.py
@@ -0,0 +1,75 @@
+"""move images out of subdirs
+
+Revision ID: 42723a9dd85c
+Revises: 01de4f42cf27
+Create Date: 2021-09-13 13:52:28.908485
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = "42723a9dd85c"
+down_revision = "01de4f42cf27"
+branch_labels = None
+depends_on = None
+
+script_content = """#!/bin/sh
+set -o errexit
+
+export HOME=$TMPDIR
+
+{{#need_data}}
+./framework.sh -r .
+chmod 770 .
+cd rawdata/
+../productfetcher{{product_locator}} {{^single_image}}&{{/single_image}}
+
+image_subdirs=$(ls)
+
+for i in $image_subdirs; do
+  if [[ -d "./$i" ]]; then
+    mv "./$i"/* .
+    rm -rf "./$i"
+  fi
+done
+
+.{{/need_data}}./carta_envoy {{#need_data}}--parallel{{/need_data}} {{#single_image}}--single -i {{imageName}}{{/single_image}} -d $1 -n $2 -wf $3 -r $4
+{{#need_data}}wait{{/need_data}}
+
+"""
+
+
+old_content = """#!/bin/sh
+set -o errexit
+
+export HOME=$TMPDIR
+
+{{#need_data}}
+./framework.sh -r .
+chmod 770 .
+cd rawdata/
+../productfetcher{{product_locator}} {{^single_image}}&{{/single_image}}
+.{{/need_data}}./carta_envoy {{#need_data}}--parallel{{/need_data}} {{#single_image}}--single -i {{imageName}}{{/single_image}} -d $1 -n $2 -wf $3 -r $4
+{{#need_data}}wait{{/need_data}}
+
+"""
+
+
+def upgrade():
+    op.execute(
+        f"""
+        UPDATE workflow_templates
+        SET content=E'{script_content}' where filename='carta.sh'
+        """
+    )
+
+
+def downgrade():
+    op.execute(
+        f"""
+        UPDATE workflow_templates
+        SET content=E'{old_content}' where filename='carta.sh'
+        """
+    )
diff --git a/shared/workspaces/workspaces/workflow/services/workflow_service.py b/shared/workspaces/workspaces/workflow/services/workflow_service.py
index 9f83327c0de7ec724af01ccfd6215effcb8400be..478184e6b49988699fe218d4d03ad87f5684b70b 100644
--- a/shared/workspaces/workspaces/workflow/services/workflow_service.py
+++ b/shared/workspaces/workspaces/workflow/services/workflow_service.py
@@ -587,7 +587,7 @@ class WorkflowMessageHandler:
         wf_id = subject["workflow_request_id"]
         wf_request = self.info.lookup_workflow_request(wf_id)
 
-        if wf_request.workflow_name == ArchiveWorkflows.CARTA.value and wf_request.argument["need_data"] is True:
+        if wf_request.workflow_name == ArchiveWorkflows.CARTA.value and wf_request.argument["notify_ready"] is False:
             logger.info(f"SENDING FAILED CARTA MESSAGE to AAT Request Handler for request #{wf_id}!")
             routing_key = f"ws-workflow.carta-instance-ready.{wf_id}"
             carta_url_msg = ArchiveMessageArchitect(routing_key=routing_key, request=wf_request).compose_message(