You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@liminal.apache.org by li...@apache.org on 2021/07/14 08:12:39 UTC

[incubator-liminal] branch master updated: [LIMINAL-78] (#56)

This is an automated email from the ASF dual-hosted git repository.

lior pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-liminal.git


The following commit(s) were added to refs/heads/master by this push:
     new 3dad9fd  [LIMINAL-78] (#56)
3dad9fd is described below

commit 3dad9fdc401979f010201af0fd27afb86ed8b5dc
Author: Simon Levin <si...@naturalint.com>
AuthorDate: Wed Jul 14 11:12:30 2021 +0300

    [LIMINAL-78] (#56)
    
    Looks good
    * Update serving.py
    
    * Update liminal.yml
    
    * Update serving.py
---
 examples/aws-ml-app-demo/liminal.yml |  3 +++
 examples/aws-ml-app-demo/serving.py  | 29 +++++++++++++++++++++++------
 2 files changed, 26 insertions(+), 6 deletions(-)

diff --git a/examples/aws-ml-app-demo/liminal.yml b/examples/aws-ml-app-demo/liminal.yml
index 7efa083..5972aeb 100644
--- a/examples/aws-ml-app-demo/liminal.yml
+++ b/examples/aws-ml-app-demo/liminal.yml
@@ -20,6 +20,9 @@ services:
       - endpoint: /healthcheck
         module: serving
         function: healthcheck
+      - endpoint: /version
+        module: serving
+        function: version
 pipelines:
   - pipeline: my_datascience_pipeline
     start_date: 1970-01-01
diff --git a/examples/aws-ml-app-demo/serving.py b/examples/aws-ml-app-demo/serving.py
index 125a6f5..7aa8886 100644
--- a/examples/aws-ml-app-demo/serving.py
+++ b/examples/aws-ml-app-demo/serving.py
@@ -6,13 +6,30 @@ from model_store import ModelStore
 _MODEL_STORE = ModelStore(model_store.PRODUCTION)
 _PETAL_WIDTH = 'petal_width'
 
+
 def predict(input_json):
-    print(f'input_json={input_json}')
-    input_dict = json.loads(input_json)
-    model, version = _MODEL_STORE.load_latest_model()
-    result = str(model.predict_proba([[float(input_dict[_PETAL_WIDTH])]])[0][1])
-    print(f'result={result}')
-    return result
+    try:
+        input_dict = json.loads(input_json)
+        model, version = _MODEL_STORE.load_latest_model()
+        result = str(model.predict_proba([[float(input_dict[_PETAL_WIDTH])]])[0][1])
+        return json.dumps({"result": result, "version": version})
+
+    except IndexError:
+        return 'Failure: the model is not ready yet'
+
+    except Exception as e:
+        print(e)
+        return 'Failure'
+
 
 def healthcheck(self):
     return 'Server is up!'
+
+
+def version(self):
+    try:
+        model, version = _MODEL_STORE.load_latest_model()
+        print(f'version={version}')
+        return version
+    except Exception as e:
+        return e