From 0e86b77a8442d39fcf5a8c43d9e8b81eaa0b44db Mon Sep 17 00:00:00 2001
From: Christian Schiffer <c.schiffer@fz-juelich.de>
Date: Thu, 8 Apr 2021 15:51:50 +0200
Subject: [PATCH] Implemented some functions to retrieve prediction annotations

---
 atlas_controller/atlas_controller/app.py | 38 +++++++++++++++++-------
 atlas_server/src/app.py                  | 35 ++++++++++++++++++----
 2 files changed, 57 insertions(+), 16 deletions(-)

diff --git a/atlas_controller/atlas_controller/app.py b/atlas_controller/atlas_controller/app.py
index fcea2d9..1e90721 100644
--- a/atlas_controller/atlas_controller/app.py
+++ b/atlas_controller/atlas_controller/app.py
@@ -3,7 +3,7 @@ import os
 import base64
 from subprocess import CalledProcessError
 
-from flask import Flask
+from flask import Flask, request
 from flask_restx import Api, Resource, fields, abort
 
 from models import JobSchema
@@ -187,6 +187,7 @@ class Predictions(Resource):
         import os
         import glob
         import h5py
+        import json
 
         work_dir = "/".join([WORK_DIR_ROOT, work_dir])
         if not os.path.exists(work_dir):
@@ -198,17 +199,32 @@ class Predictions(Resource):
             abort(404, f"Could not find prediction directory {prediction_dir}")
             return
 
+        # If this is True, return annotations instead of information on the files
+        annotations = request.args.get("annotations", "").lower() in ("true", )
         results = []
-        prediction_files = glob.glob(os.path.join(prediction_dir, "*.hdf5"))
-        for prediction_file in prediction_files:
-            with h5py.File(prediction_file, "r") as f:
-                hw = f["pyramid/00"].shape[:2]
-            results.append(
-                {
-                    "path": prediction_file.replace(WORK_DIR_ROOT, ""),
-                    "shape": hw,
-                }
-            )
+
+        if annotations:
+            # Search for annotation files
+            prediction_files = glob.glob(os.path.join(prediction_dir, ".coordinates.json"))
+            for prediction_file in prediction_files:
+                with open(prediction_file, "r") as f:
+                    prediction_files.append(
+                        {
+                            "path": prediction_file.replace(WORK_DIR_ROOT, ""),
+                            "annotations": json.load(f)
+                        })
+        else:
+            # Search for prediction files
+            prediction_files = glob.glob(os.path.join(prediction_dir, "*.hdf5"))
+            for prediction_file in prediction_files:
+                with h5py.File(prediction_file, "r") as f:
+                    hw = f["pyramid/00"].shape[:2]
+                results.append(
+                    {
+                        "path": prediction_file.replace(WORK_DIR_ROOT, ""),
+                        "shape": hw,
+                    }
+                )
 
         return results
 
diff --git a/atlas_server/src/app.py b/atlas_server/src/app.py
index 5d2de66..6278ac0 100644
--- a/atlas_server/src/app.py
+++ b/atlas_server/src/app.py
@@ -317,7 +317,7 @@ class TaskJob(Resource):
         except JobStatusNotFoundError:
             # If the job status could not be found, it could mean the job has finished to long ago
             result["status"] = f"Could not determine job status for job with id {job_id}. " \
-                f"This could mean that the job does not exist or finished too long ago."
+                               f"This could mean that the job does not exist or finished too long ago."
 
         return result
 
@@ -368,15 +368,15 @@ class TaskTileServerConfig(Resource):
             with Database() as db:
                 project = db.get_project_by_id(project_id=project_id)
         except ProjectNotFoundError:
-                abort(404, f"No project found for id {project_id}")
-                return
+            abort(404, f"No project found for id {project_id}")
+            return
 
         try:
             with Database() as db:
                 task = db.get_task_by_id(project_id=project_id, task_id=task_id)
         except ProjectNotFoundError:
-                abort(404, f"No task found for id {task_id}")
-                return
+            abort(404, f"No task found for id {task_id}")
+            return
 
         # Determine
         work_dir = f"atlas_ui_project{project_id}_task{task_id}"
@@ -517,6 +517,31 @@ class AnnotationImportExport(Resource):
             "status": "ok",
         }
 
+
+@annotation_namespace.route("/<int:project_id>/tasks/<int:task_id>")
+class PredictionAnnotationExport(Resource):
+    @annotation_namespace.doc("Retrieve annotations based on predictions of a specific task")
+    def get(self, project_id, task_id):
+        import requests
+        from .config import ATLAS_CONTROLLER_URL
+
+        # Determine
+        work_dir = f"atlas_ui_project{project_id}_task{task_id}"
+        # Query predictions
+        result = requests.get(url=f"{ATLAS_CONTROLLER_URL}/predictions/{work_dir}?annotations=true")
+        if result.status_code >= 400:
+            abort(404, f"Failed to find predictions")
+            return
+        result = result.json()
+
+        return {
+            "project_id": project_id,
+            "task_id": task_id,
+            "path": result["path"],
+            "annotations": result["annotations"],
+        }
+
+
 # -----------------------------------
 # Main entry point
 # -----------------------------------
-- 
GitLab