You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@predictionio.apache.org by do...@apache.org on 2016/08/09 21:43:45 UTC

[37/52] [abbrv] incubator-predictionio git commit: Move pio_tests files to testing/

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/data/eventserver_test/signup_events_51.json
----------------------------------------------------------------------
diff --git a/testing/pio_tests/data/eventserver_test/signup_events_51.json b/testing/pio_tests/data/eventserver_test/signup_events_51.json
new file mode 100644
index 0000000..d8c31bd
--- /dev/null
+++ b/testing/pio_tests/data/eventserver_test/signup_events_51.json
@@ -0,0 +1,257 @@
+[
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "1"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "2"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "3"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "4"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "5"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "6"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "7"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "8"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "9"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "10"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "11"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "12"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "13"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "14"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "15"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "16"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "17"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "18"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "19"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "20"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "21"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "22"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "23"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "24"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "25"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "26"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "27"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "28"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "29"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "30"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "31"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "32"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "33"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "34"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "35"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "36"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "37"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "38"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "39"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "40"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "41"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "42"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "43"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "44"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "45"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "46"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "47"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "48"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "49"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "50"
+  },
+  { 
+    "event" : "sign-up",
+    "entityType" : "user",
+    "entityId" : "51"
+  }
+]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/data/quickstart_test/engine.json
----------------------------------------------------------------------
diff --git a/testing/pio_tests/data/quickstart_test/engine.json b/testing/pio_tests/data/quickstart_test/engine.json
new file mode 100644
index 0000000..c7b6b4b
--- /dev/null
+++ b/testing/pio_tests/data/quickstart_test/engine.json
@@ -0,0 +1,21 @@
+{
+  "id": "default",
+  "description": "Default settings",
+  "engineFactory": "org.template.recommendation.RecommendationEngine",
+  "datasource": {
+    "params" : {
+      "appName": "MyRecommender"
+    }
+  },
+  "algorithms": [
+    {
+      "name": "als",
+      "params": {
+        "rank": 10,
+        "numIterations": 10,
+        "lambda": 0.01,
+        "seed": 3
+      }
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/README.md
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/README.md b/testing/pio_tests/engines/recommendation-engine/README.md
new file mode 100644
index 0000000..6566db4
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/README.md
@@ -0,0 +1,42 @@
+# Recommendation Template
+
+## Documentation
+
+Please refer to http://docs.prediction.io/templates/recommendation/quickstart/
+
+## Versions
+
+### develop
+
+### v0.3.2
+
+- Fix incorrect top items in batchPredict() (issue #5)
+
+### v0.3.1
+
+- Add Evaluation module and modify DataSource for it
+
+### v0.3.0
+
+- update for PredictionIO 0.9.2, including:
+
+  - use new PEventStore API
+  - use appName in DataSource parameter
+
+### v0.2.0
+
+- update build.sbt and template.json for PredictionIO 0.9.2
+
+### v0.1.2
+
+- update for PredictionIO 0.9.0
+
+### v0.1.1
+
+- Persist RDD to memory (.cache()) in DataSource for better performance and quick fix for new user/item ID BiMap error issue.
+
+### v0.1.0
+
+- initial version
+- known issue:
+  * If importing new events of new users/itesm during training, the new user/item id can't be found in the BiMap.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/build.sbt
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/build.sbt b/testing/pio_tests/engines/recommendation-engine/build.sbt
new file mode 100644
index 0000000..c7413bb
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/build.sbt
@@ -0,0 +1,12 @@
+import AssemblyKeys._
+
+assemblySettings
+
+name := "template-scala-parallel-recommendation"
+
+organization := "org.apache.predictionio"
+
+libraryDependencies ++= Seq(
+  "org.apache.predictionio"    %% "core"          % pioVersion.value % "provided",
+  "org.apache.spark" %% "spark-core"    % "1.3.0" % "provided",
+  "org.apache.spark" %% "spark-mllib"   % "1.3.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/data/import_eventserver.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/data/import_eventserver.py b/testing/pio_tests/engines/recommendation-engine/data/import_eventserver.py
new file mode 100644
index 0000000..0a1e109
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/data/import_eventserver.py
@@ -0,0 +1,56 @@
+"""
+Import sample data for recommendation engine
+"""
+
+import predictionio
+import argparse
+import random
+
+RATE_ACTIONS_DELIMITER = "::"
+SEED = 3
+
+def import_events(client, file):
+  f = open(file, 'r')
+  random.seed(SEED)
+  count = 0
+  print "Importing data..."
+  for line in f:
+    data = line.rstrip('\r\n').split(RATE_ACTIONS_DELIMITER)
+    # For demonstration purpose, randomly mix in some buy events
+    if (random.randint(0, 1) == 1):
+      client.create_event(
+        event="rate",
+        entity_type="user",
+        entity_id=data[0],
+        target_entity_type="item",
+        target_entity_id=data[1],
+        properties= { "rating" : float(data[2]) }
+      )
+    else:
+      client.create_event(
+        event="buy",
+        entity_type="user",
+        entity_id=data[0],
+        target_entity_type="item",
+        target_entity_id=data[1]
+      )
+    count += 1
+  f.close()
+  print "%s events are imported." % count
+
+if __name__ == '__main__':
+  parser = argparse.ArgumentParser(
+    description="Import sample data for recommendation engine")
+  parser.add_argument('--access_key', default='invald_access_key')
+  parser.add_argument('--url', default="http://localhost:7070")
+  parser.add_argument('--file', default="./data/sample_movielens_data.txt")
+
+  args = parser.parse_args()
+  print args
+
+  client = predictionio.EventClient(
+    access_key=args.access_key,
+    url=args.url,
+    threads=5,
+    qsize=500)
+  import_events(client, args.file)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/data/sample_movielens_data.txt
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/data/sample_movielens_data.txt b/testing/pio_tests/engines/recommendation-engine/data/sample_movielens_data.txt
new file mode 100644
index 0000000..f0eee19
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/data/sample_movielens_data.txt
@@ -0,0 +1,1501 @@
+0::2::3
+0::3::1
+0::5::2
+0::9::4
+0::11::1
+0::12::2
+0::15::1
+0::17::1
+0::19::1
+0::21::1
+0::23::1
+0::26::3
+0::27::1
+0::28::1
+0::29::1
+0::30::1
+0::31::1
+0::34::1
+0::37::1
+0::41::2
+0::44::1
+0::45::2
+0::46::1
+0::47::1
+0::48::1
+0::50::1
+0::51::1
+0::54::1
+0::55::1
+0::59::2
+0::61::2
+0::64::1
+0::67::1
+0::68::1
+0::69::1
+0::71::1
+0::72::1
+0::77::2
+0::79::1
+0::83::1
+0::87::1
+0::89::2
+0::91::3
+0::92::4
+0::94::1
+0::95::2
+0::96::1
+0::98::1
+0::99::1
+1::2::2
+1::3::1
+1::4::2
+1::6::1
+1::9::3
+1::12::1
+1::13::1
+1::14::1
+1::16::1
+1::19::1
+1::21::3
+1::27::1
+1::28::3
+1::33::1
+1::36::2
+1::37::1
+1::40::1
+1::41::2
+1::43::1
+1::44::1
+1::47::1
+1::50::1
+1::54::1
+1::56::2
+1::57::1
+1::58::1
+1::60::1
+1::62::4
+1::63::1
+1::67::1
+1::68::4
+1::70::2
+1::72::1
+1::73::1
+1::74::2
+1::76::1
+1::77::3
+1::78::1
+1::81::1
+1::82::1
+1::85::3
+1::86::2
+1::88::2
+1::91::1
+1::92::2
+1::93::1
+1::94::2
+1::96::1
+1::97::1
+2::4::3
+2::6::1
+2::8::5
+2::9::1
+2::10::1
+2::12::3
+2::13::1
+2::15::2
+2::18::2
+2::19::4
+2::22::1
+2::26::1
+2::28::1
+2::34::4
+2::35::1
+2::37::5
+2::38::1
+2::39::5
+2::40::4
+2::47::1
+2::50::1
+2::52::2
+2::54::1
+2::55::1
+2::57::2
+2::58::2
+2::59::1
+2::61::1
+2::62::1
+2::64::1
+2::65::1
+2::66::3
+2::68::1
+2::71::3
+2::76::1
+2::77::1
+2::78::1
+2::80::1
+2::83::5
+2::85::1
+2::87::2
+2::88::1
+2::89::4
+2::90::1
+2::92::4
+2::93::5
+3::0::1
+3::1::1
+3::2::1
+3::7::3
+3::8::3
+3::9::1
+3::14::1
+3::15::1
+3::16::1
+3::18::4
+3::19::1
+3::24::3
+3::26::1
+3::29::3
+3::33::1
+3::34::3
+3::35::1
+3::36::3
+3::37::1
+3::38::2
+3::43::1
+3::44::1
+3::46::1
+3::47::1
+3::51::5
+3::52::3
+3::56::1
+3::58::1
+3::60::3
+3::62::1
+3::65::2
+3::66::1
+3::67::1
+3::68::2
+3::70::1
+3::72::2
+3::76::3
+3::79::3
+3::80::4
+3::81::1
+3::83::1
+3::84::1
+3::86::1
+3::87::2
+3::88::4
+3::89::1
+3::91::1
+3::94::3
+4::1::1
+4::6::1
+4::8::1
+4::9::1
+4::10::1
+4::11::1
+4::12::1
+4::13::1
+4::14::2
+4::15::1
+4::17::1
+4::20::1
+4::22::1
+4::23::1
+4::24::1
+4::29::4
+4::30::1
+4::31::1
+4::34::1
+4::35::1
+4::36::1
+4::39::2
+4::40::3
+4::41::4
+4::43::2
+4::44::1
+4::45::1
+4::46::1
+4::47::1
+4::49::2
+4::50::1
+4::51::1
+4::52::4
+4::54::1
+4::55::1
+4::60::3
+4::61::1
+4::62::4
+4::63::3
+4::65::1
+4::67::2
+4::69::1
+4::70::4
+4::71::1
+4::73::1
+4::78::1
+4::84::1
+4::85::1
+4::87::3
+4::88::3
+4::89::2
+4::96::1
+4::97::1
+4::98::1
+4::99::1
+5::0::1
+5::1::1
+5::4::1
+5::5::1
+5::8::1
+5::9::3
+5::10::2
+5::13::3
+5::15::1
+5::19::1
+5::20::3
+5::21::2
+5::23::3
+5::27::1
+5::28::1
+5::29::1
+5::31::1
+5::36::3
+5::38::2
+5::39::1
+5::42::1
+5::48::3
+5::49::4
+5::50::3
+5::51::1
+5::52::1
+5::54::1
+5::55::5
+5::56::3
+5::58::1
+5::60::1
+5::61::1
+5::64::3
+5::65::2
+5::68::4
+5::70::1
+5::71::1
+5::72::1
+5::74::1
+5::79::1
+5::81::2
+5::84::1
+5::85::1
+5::86::1
+5::88::1
+5::90::4
+5::91::2
+5::95::2
+5::99::1
+6::0::1
+6::1::1
+6::2::3
+6::5::1
+6::6::1
+6::9::1
+6::10::1
+6::15::2
+6::16::2
+6::17::1
+6::18::1
+6::20::1
+6::21::1
+6::22::1
+6::24::1
+6::25::5
+6::26::1
+6::28::1
+6::30::1
+6::33::1
+6::38::1
+6::39::1
+6::43::4
+6::44::1
+6::45::1
+6::48::1
+6::49::1
+6::50::1
+6::53::1
+6::54::1
+6::55::1
+6::56::1
+6::58::4
+6::59::1
+6::60::1
+6::61::3
+6::63::3
+6::66::1
+6::67::3
+6::68::1
+6::69::1
+6::71::2
+6::73::1
+6::75::1
+6::77::1
+6::79::1
+6::81::1
+6::84::1
+6::85::3
+6::86::1
+6::87::1
+6::88::1
+6::89::1
+6::91::2
+6::94::1
+6::95::2
+6::96::1
+7::1::1
+7::2::2
+7::3::1
+7::4::1
+7::7::1
+7::10::1
+7::11::2
+7::14::2
+7::15::1
+7::16::1
+7::18::1
+7::21::1
+7::22::1
+7::23::1
+7::25::5
+7::26::1
+7::29::4
+7::30::1
+7::31::3
+7::32::1
+7::33::1
+7::35::1
+7::37::2
+7::39::3
+7::40::2
+7::42::2
+7::44::1
+7::45::2
+7::47::4
+7::48::1
+7::49::1
+7::53::1
+7::54::1
+7::55::1
+7::56::1
+7::59::1
+7::61::2
+7::62::3
+7::63::2
+7::66::1
+7::67::3
+7::74::1
+7::75::1
+7::76::3
+7::77::1
+7::81::1
+7::82::1
+7::84::2
+7::85::4
+7::86::1
+7::92::2
+7::96::1
+7::97::1
+7::98::1
+8::0::1
+8::2::4
+8::3::2
+8::4::2
+8::5::1
+8::7::1
+8::9::1
+8::11::1
+8::15::1
+8::18::1
+8::19::1
+8::21::1
+8::29::5
+8::31::3
+8::33::1
+8::35::1
+8::36::1
+8::40::2
+8::44::1
+8::45::1
+8::50::1
+8::51::1
+8::52::5
+8::53::5
+8::54::1
+8::55::1
+8::56::1
+8::58::4
+8::60::3
+8::62::4
+8::64::1
+8::67::3
+8::69::1
+8::71::1
+8::72::3
+8::77::3
+8::78::1
+8::79::1
+8::83::1
+8::85::5
+8::86::1
+8::88::1
+8::90::1
+8::92::2
+8::95::4
+8::96::3
+8::97::1
+8::98::1
+8::99::1
+9::2::3
+9::3::1
+9::4::1
+9::5::1
+9::6::1
+9::7::5
+9::9::1
+9::12::1
+9::14::3
+9::15::1
+9::19::1
+9::21::1
+9::22::1
+9::24::1
+9::25::1
+9::26::1
+9::30::3
+9::32::4
+9::35::2
+9::36::2
+9::37::2
+9::38::1
+9::39::1
+9::43::3
+9::49::5
+9::50::3
+9::53::1
+9::54::1
+9::58::1
+9::59::1
+9::60::1
+9::61::1
+9::63::3
+9::64::3
+9::68::1
+9::69::1
+9::70::3
+9::71::1
+9::73::2
+9::75::1
+9::77::2
+9::81::2
+9::82::1
+9::83::1
+9::84::1
+9::86::1
+9::87::4
+9::88::1
+9::90::3
+9::94::2
+9::95::3
+9::97::2
+9::98::1
+10::0::3
+10::2::4
+10::4::3
+10::7::1
+10::8::1
+10::10::1
+10::13::2
+10::14::1
+10::16::2
+10::17::1
+10::18::1
+10::21::1
+10::22::1
+10::24::1
+10::25::3
+10::28::1
+10::35::1
+10::36::1
+10::37::1
+10::38::1
+10::39::1
+10::40::4
+10::41::2
+10::42::3
+10::43::1
+10::49::3
+10::50::1
+10::51::1
+10::52::1
+10::55::2
+10::56::1
+10::58::1
+10::63::1
+10::66::1
+10::67::2
+10::68::1
+10::75::1
+10::77::1
+10::79::1
+10::86::1
+10::89::3
+10::90::1
+10::97::1
+10::98::1
+11::0::1
+11::6::2
+11::9::1
+11::10::1
+11::11::1
+11::12::1
+11::13::4
+11::16::1
+11::18::5
+11::19::4
+11::20::1
+11::21::1
+11::22::1
+11::23::5
+11::25::1
+11::27::5
+11::30::5
+11::32::5
+11::35::3
+11::36::2
+11::37::2
+11::38::4
+11::39::1
+11::40::1
+11::41::1
+11::43::2
+11::45::1
+11::47::1
+11::48::5
+11::50::4
+11::51::3
+11::59::1
+11::61::1
+11::62::1
+11::64::1
+11::66::4
+11::67::1
+11::69::5
+11::70::1
+11::71::3
+11::72::3
+11::75::3
+11::76::1
+11::77::1
+11::78::1
+11::79::5
+11::80::3
+11::81::4
+11::82::1
+11::86::1
+11::88::1
+11::89::1
+11::90::4
+11::94::2
+11::97::3
+11::99::1
+12::2::1
+12::4::1
+12::6::1
+12::7::3
+12::8::1
+12::14::1
+12::15::2
+12::16::4
+12::17::5
+12::18::2
+12::21::1
+12::22::2
+12::23::3
+12::24::1
+12::25::1
+12::27::5
+12::30::2
+12::31::4
+12::35::5
+12::38::1
+12::41::1
+12::44::2
+12::45::1
+12::50::4
+12::51::1
+12::52::1
+12::53::1
+12::54::1
+12::56::2
+12::57::1
+12::60::1
+12::63::1
+12::64::5
+12::66::3
+12::67::1
+12::70::1
+12::72::1
+12::74::1
+12::75::1
+12::77::1
+12::78::1
+12::79::3
+12::82::2
+12::83::1
+12::84::1
+12::85::1
+12::86::1
+12::87::1
+12::88::1
+12::91::3
+12::92::1
+12::94::4
+12::95::2
+12::96::1
+12::98::2
+13::0::1
+13::3::1
+13::4::2
+13::5::1
+13::6::1
+13::12::1
+13::14::2
+13::15::1
+13::17::1
+13::18::3
+13::20::1
+13::21::1
+13::22::1
+13::26::1
+13::27::1
+13::29::3
+13::31::1
+13::33::1
+13::40::2
+13::43::2
+13::44::1
+13::45::1
+13::49::1
+13::51::1
+13::52::2
+13::53::3
+13::54::1
+13::62::1
+13::63::2
+13::64::1
+13::68::1
+13::71::1
+13::72::3
+13::73::1
+13::74::3
+13::77::2
+13::78::1
+13::79::2
+13::83::3
+13::85::1
+13::86::1
+13::87::2
+13::88::2
+13::90::1
+13::93::4
+13::94::1
+13::98::1
+13::99::1
+14::1::1
+14::3::3
+14::4::1
+14::5::1
+14::6::1
+14::7::1
+14::9::1
+14::10::1
+14::11::1
+14::12::1
+14::13::1
+14::14::3
+14::15::1
+14::16::1
+14::17::1
+14::20::1
+14::21::1
+14::24::1
+14::25::2
+14::27::1
+14::28::1
+14::29::5
+14::31::3
+14::34::1
+14::36::1
+14::37::2
+14::39::2
+14::40::1
+14::44::1
+14::45::1
+14::47::3
+14::48::1
+14::49::1
+14::51::1
+14::52::5
+14::53::3
+14::54::1
+14::55::1
+14::56::1
+14::62::4
+14::63::5
+14::67::3
+14::68::1
+14::69::3
+14::71::1
+14::72::4
+14::73::1
+14::76::5
+14::79::1
+14::82::1
+14::83::1
+14::88::1
+14::93::3
+14::94::1
+14::95::2
+14::96::4
+14::98::1
+15::0::1
+15::1::4
+15::2::1
+15::5::2
+15::6::1
+15::7::1
+15::13::1
+15::14::1
+15::15::1
+15::17::2
+15::19::2
+15::22::2
+15::23::2
+15::25::1
+15::26::3
+15::27::1
+15::28::2
+15::29::1
+15::32::1
+15::33::2
+15::34::1
+15::35::2
+15::36::1
+15::37::1
+15::39::1
+15::42::1
+15::46::5
+15::48::2
+15::50::2
+15::51::1
+15::52::1
+15::58::1
+15::62::1
+15::64::3
+15::65::2
+15::72::1
+15::73::1
+15::74::1
+15::79::1
+15::80::1
+15::81::1
+15::82::2
+15::85::1
+15::87::1
+15::91::2
+15::96::1
+15::97::1
+15::98::3
+16::2::1
+16::5::3
+16::6::2
+16::7::1
+16::9::1
+16::12::1
+16::14::1
+16::15::1
+16::19::1
+16::21::2
+16::29::4
+16::30::2
+16::32::1
+16::34::1
+16::36::1
+16::38::1
+16::46::1
+16::47::3
+16::48::1
+16::49::1
+16::50::1
+16::51::5
+16::54::5
+16::55::1
+16::56::2
+16::57::1
+16::60::1
+16::63::2
+16::65::1
+16::67::1
+16::72::1
+16::74::1
+16::80::1
+16::81::1
+16::82::1
+16::85::5
+16::86::1
+16::90::5
+16::91::1
+16::93::1
+16::94::3
+16::95::2
+16::96::3
+16::98::3
+16::99::1
+17::2::1
+17::3::1
+17::6::1
+17::10::4
+17::11::1
+17::13::2
+17::17::5
+17::19::1
+17::20::5
+17::22::4
+17::28::1
+17::29::1
+17::33::1
+17::34::1
+17::35::2
+17::37::1
+17::38::1
+17::45::1
+17::46::5
+17::47::1
+17::49::3
+17::51::1
+17::55::5
+17::56::3
+17::57::1
+17::58::1
+17::59::1
+17::60::1
+17::63::1
+17::66::1
+17::68::4
+17::69::1
+17::70::1
+17::72::1
+17::73::3
+17::78::1
+17::79::1
+17::82::2
+17::84::1
+17::90::5
+17::91::3
+17::92::1
+17::93::1
+17::94::4
+17::95::2
+17::97::1
+18::1::1
+18::4::3
+18::5::2
+18::6::1
+18::7::1
+18::10::1
+18::11::4
+18::12::2
+18::13::1
+18::15::1
+18::18::1
+18::20::1
+18::21::2
+18::22::1
+18::23::2
+18::25::1
+18::26::1
+18::27::1
+18::28::5
+18::29::1
+18::31::1
+18::32::1
+18::36::1
+18::38::5
+18::39::5
+18::40::1
+18::42::1
+18::43::1
+18::44::4
+18::46::1
+18::47::1
+18::48::1
+18::51::2
+18::55::1
+18::56::1
+18::57::1
+18::62::1
+18::63::1
+18::66::3
+18::67::1
+18::70::1
+18::75::1
+18::76::3
+18::77::1
+18::80::3
+18::81::3
+18::82::1
+18::83::5
+18::84::1
+18::97::1
+18::98::1
+18::99::2
+19::0::1
+19::1::1
+19::2::1
+19::4::1
+19::6::2
+19::11::1
+19::12::1
+19::14::1
+19::23::1
+19::26::1
+19::31::1
+19::32::4
+19::33::1
+19::34::1
+19::37::1
+19::38::1
+19::41::1
+19::43::1
+19::45::1
+19::48::1
+19::49::1
+19::50::2
+19::53::2
+19::54::3
+19::55::1
+19::56::2
+19::58::1
+19::61::1
+19::62::1
+19::63::1
+19::64::1
+19::65::1
+19::69::2
+19::72::1
+19::74::3
+19::76::1
+19::78::1
+19::79::1
+19::81::1
+19::82::1
+19::84::1
+19::86::1
+19::87::2
+19::90::4
+19::93::1
+19::94::4
+19::95::2
+19::96::1
+19::98::4
+20::0::1
+20::1::1
+20::2::2
+20::4::2
+20::6::1
+20::8::1
+20::12::1
+20::21::2
+20::22::5
+20::24::2
+20::25::1
+20::26::1
+20::29::2
+20::30::2
+20::32::2
+20::39::1
+20::40::1
+20::41::2
+20::45::2
+20::48::1
+20::50::1
+20::51::3
+20::53::3
+20::55::1
+20::57::2
+20::60::1
+20::61::1
+20::64::1
+20::66::1
+20::70::2
+20::72::1
+20::73::2
+20::75::4
+20::76::1
+20::77::4
+20::78::1
+20::79::1
+20::84::2
+20::85::2
+20::88::3
+20::89::1
+20::90::3
+20::91::1
+20::92::2
+20::93::1
+20::94::4
+20::97::1
+21::0::1
+21::2::4
+21::3::1
+21::7::2
+21::11::1
+21::12::1
+21::13::1
+21::14::3
+21::17::1
+21::19::1
+21::20::1
+21::21::1
+21::22::1
+21::23::1
+21::24::1
+21::27::1
+21::29::5
+21::30::2
+21::38::1
+21::40::2
+21::43::3
+21::44::1
+21::45::1
+21::46::1
+21::48::1
+21::51::1
+21::53::5
+21::54::1
+21::55::1
+21::56::1
+21::58::3
+21::59::3
+21::64::1
+21::66::1
+21::68::1
+21::71::1
+21::73::1
+21::74::4
+21::80::1
+21::81::1
+21::83::1
+21::84::1
+21::85::3
+21::87::4
+21::89::2
+21::92::2
+21::96::3
+21::99::1
+22::0::1
+22::3::2
+22::5::2
+22::6::2
+22::9::1
+22::10::1
+22::11::1
+22::13::1
+22::14::1
+22::16::1
+22::18::3
+22::19::1
+22::22::5
+22::25::1
+22::26::1
+22::29::3
+22::30::5
+22::32::4
+22::33::1
+22::35::1
+22::36::3
+22::37::1
+22::40::1
+22::41::3
+22::44::1
+22::45::2
+22::48::1
+22::51::5
+22::55::1
+22::56::2
+22::60::3
+22::61::1
+22::62::4
+22::63::1
+22::65::1
+22::66::1
+22::68::4
+22::69::4
+22::70::3
+22::71::1
+22::74::5
+22::75::5
+22::78::1
+22::80::3
+22::81::1
+22::82::1
+22::84::1
+22::86::1
+22::87::3
+22::88::5
+22::90::2
+22::92::3
+22::95::2
+22::96::2
+22::98::4
+22::99::1
+23::0::1
+23::2::1
+23::4::1
+23::6::2
+23::10::4
+23::12::1
+23::13::4
+23::14::1
+23::15::1
+23::18::4
+23::22::2
+23::23::4
+23::24::1
+23::25::1
+23::26::1
+23::27::5
+23::28::1
+23::29::1
+23::30::4
+23::32::5
+23::33::2
+23::36::3
+23::37::1
+23::38::1
+23::39::1
+23::43::1
+23::48::5
+23::49::5
+23::50::4
+23::53::1
+23::55::5
+23::57::1
+23::59::1
+23::60::1
+23::61::1
+23::64::4
+23::65::5
+23::66::2
+23::67::1
+23::68::3
+23::69::1
+23::72::1
+23::73::3
+23::77::1
+23::82::2
+23::83::1
+23::84::1
+23::85::1
+23::87::3
+23::88::1
+23::95::2
+23::97::1
+24::4::1
+24::6::3
+24::7::1
+24::10::2
+24::12::1
+24::15::1
+24::19::1
+24::24::1
+24::27::3
+24::30::5
+24::31::1
+24::32::3
+24::33::1
+24::37::1
+24::39::1
+24::40::1
+24::42::1
+24::43::3
+24::45::2
+24::46::1
+24::47::1
+24::48::1
+24::49::1
+24::50::1
+24::52::5
+24::57::1
+24::59::4
+24::63::4
+24::65::1
+24::66::1
+24::67::1
+24::68::3
+24::69::5
+24::71::1
+24::72::4
+24::77::4
+24::78::1
+24::80::1
+24::82::1
+24::84::1
+24::86::1
+24::87::1
+24::88::2
+24::89::1
+24::90::5
+24::91::1
+24::92::1
+24::94::2
+24::95::1
+24::96::5
+24::98::1
+24::99::1
+25::1::3
+25::2::1
+25::7::1
+25::9::1
+25::12::3
+25::16::3
+25::17::1
+25::18::1
+25::20::1
+25::22::1
+25::23::1
+25::26::2
+25::29::1
+25::30::1
+25::31::2
+25::33::4
+25::34::3
+25::35::2
+25::36::1
+25::37::1
+25::40::1
+25::41::1
+25::43::1
+25::47::4
+25::50::1
+25::51::1
+25::53::1
+25::56::1
+25::58::2
+25::64::2
+25::67::2
+25::68::1
+25::70::1
+25::71::4
+25::73::1
+25::74::1
+25::76::1
+25::79::1
+25::82::1
+25::84::2
+25::85::1
+25::91::3
+25::92::1
+25::94::1
+25::95::1
+25::97::2
+26::0::1
+26::1::1
+26::2::1
+26::3::1
+26::4::4
+26::5::2
+26::6::3
+26::7::5
+26::13::3
+26::14::1
+26::16::1
+26::18::3
+26::20::1
+26::21::3
+26::22::5
+26::23::5
+26::24::5
+26::27::1
+26::31::1
+26::35::1
+26::36::4
+26::40::1
+26::44::1
+26::45::2
+26::47::1
+26::48::1
+26::49::3
+26::50::2
+26::52::1
+26::54::4
+26::55::1
+26::57::3
+26::58::1
+26::61::1
+26::62::2
+26::66::1
+26::68::4
+26::71::1
+26::73::4
+26::76::1
+26::81::3
+26::85::1
+26::86::3
+26::88::5
+26::91::1
+26::94::5
+26::95::1
+26::96::1
+26::97::1
+27::0::1
+27::9::1
+27::10::1
+27::18::4
+27::19::3
+27::20::1
+27::22::2
+27::24::2
+27::25::1
+27::27::3
+27::28::1
+27::29::1
+27::31::1
+27::33::3
+27::40::1
+27::42::1
+27::43::1
+27::44::3
+27::45::1
+27::51::3
+27::52::1
+27::55::3
+27::57::1
+27::59::1
+27::60::1
+27::61::1
+27::64::1
+27::66::3
+27::68::1
+27::70::1
+27::71::2
+27::72::1
+27::75::3
+27::78::1
+27::80::3
+27::82::1
+27::83::3
+27::86::1
+27::87::2
+27::90::1
+27::91::1
+27::92::1
+27::93::1
+27::94::2
+27::95::1
+27::98::1
+28::0::3
+28::1::1
+28::2::4
+28::3::1
+28::6::1
+28::7::1
+28::12::5
+28::13::2
+28::14::1
+28::15::1
+28::17::1
+28::19::3
+28::20::1
+28::23::3
+28::24::3
+28::27::1
+28::29::1
+28::33::1
+28::34::1
+28::36::1
+28::38::2
+28::39::2
+28::44::1
+28::45::1
+28::49::4
+28::50::1
+28::52::1
+28::54::1
+28::56::1
+28::57::3
+28::58::1
+28::59::1
+28::60::1
+28::62::3
+28::63::1
+28::65::1
+28::75::1
+28::78::1
+28::81::5
+28::82::4
+28::83::1
+28::85::1
+28::88::2
+28::89::4
+28::90::1
+28::92::5
+28::94::1
+28::95::2
+28::98::1
+28::99::1
+29::3::1
+29::4::1
+29::5::1
+29::7::2
+29::9::1
+29::10::3
+29::11::1
+29::13::3
+29::14::1
+29::15::1
+29::17::3
+29::19::3
+29::22::3
+29::23::4
+29::25::1
+29::29::1
+29::31::1
+29::32::4
+29::33::2
+29::36::2
+29::38::3
+29::39::1
+29::42::1
+29::46::5
+29::49::3
+29::51::2
+29::59::1
+29::61::1
+29::62::1
+29::67::1
+29::68::3
+29::69::1
+29::70::1
+29::74::1
+29::75::1
+29::79::2
+29::80::1
+29::81::2
+29::83::1
+29::85::1
+29::86::1
+29::90::4
+29::93::1
+29::94::4
+29::97::1
+29::99::1

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/data/send_query.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/data/send_query.py b/testing/pio_tests/engines/recommendation-engine/data/send_query.py
new file mode 100644
index 0000000..7eaf53e
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/data/send_query.py
@@ -0,0 +1,7 @@
+"""
+Send sample query to prediction engine
+"""
+
+import predictionio
+engine_client = predictionio.EngineClient(url="http://localhost:8000")
+print engine_client.send_query({"user": "1", "num": 4})

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/engine.json
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/engine.json b/testing/pio_tests/engines/recommendation-engine/engine.json
new file mode 100644
index 0000000..8d53d56
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/engine.json
@@ -0,0 +1,21 @@
+{
+  "id": "default",
+  "description": "Default settings",
+  "engineFactory": "org.template.recommendation.RecommendationEngine",
+  "datasource": {
+    "params" : {
+      "appName": "MyApp1"
+    }
+  },
+  "algorithms": [
+    {
+      "name": "als",
+      "params": {
+        "rank": 10,
+        "numIterations": 10,
+        "lambda": 0.01,
+        "seed": 3
+      }
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/manifest.json
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/manifest.json b/testing/pio_tests/engines/recommendation-engine/manifest.json
new file mode 100644
index 0000000..9c545ce
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/manifest.json
@@ -0,0 +1 @@
+{"id":"yDBpzjz39AjIxlOAh8W4t3QSc75uPCuJ","version":"ee98ff9c009ef0d9fb1bc6b78750b83a0ceb37b2","name":"recommendation-engine","description":"pio-autogen-manifest","files":[],"engineFactory":""}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/project/assembly.sbt
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/project/assembly.sbt b/testing/pio_tests/engines/recommendation-engine/project/assembly.sbt
new file mode 100644
index 0000000..54c3252
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/project/assembly.sbt
@@ -0,0 +1 @@
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/project/pio-build.sbt b/testing/pio_tests/engines/recommendation-engine/project/pio-build.sbt
new file mode 100644
index 0000000..8346a96
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/project/pio-build.sbt
@@ -0,0 +1 @@
+addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSAlgorithm.scala b/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSAlgorithm.scala
new file mode 100644
index 0000000..17c2806
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSAlgorithm.scala
@@ -0,0 +1,138 @@
+package org.template.recommendation
+
+import org.apache.predictionio.controller.PAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.mllib.recommendation.ALS
+import org.apache.spark.mllib.recommendation.{Rating => MLlibRating}
+import org.apache.spark.mllib.recommendation.ALSModel
+
+import grizzled.slf4j.Logger
+
+case class ALSAlgorithmParams(
+  rank: Int,
+  numIterations: Int,
+  lambda: Double,
+  seed: Option[Long]) extends Params
+
+class ALSAlgorithm(val ap: ALSAlgorithmParams)
+  extends PAlgorithm[PreparedData, ALSModel, Query, PredictedResult] {
+
+  @transient lazy val logger = Logger[this.type]
+
+  if (ap.numIterations > 30) {
+    logger.warn(
+      s"ALSAlgorithmParams.numIterations > 30, current: ${ap.numIterations}. " +
+      s"There is a chance of running to StackOverflowException. Lower this number to remedy it")
+  }
+
+  def train(sc: SparkContext, data: PreparedData): ALSModel = {
+    // MLLib ALS cannot handle empty training data.
+    require(!data.ratings.take(1).isEmpty,
+      s"RDD[Rating] in PreparedData cannot be empty." +
+      " Please check if DataSource generates TrainingData" +
+      " and Preprator generates PreparedData correctly.")
+    // Convert user and item String IDs to Int index for MLlib
+
+    val userStringIntMap = BiMap.stringInt(data.ratings.map(_.user))
+    val itemStringIntMap = BiMap.stringInt(data.ratings.map(_.item))
+    val mllibRatings = data.ratings.map( r =>
+      // MLlibRating requires integer index for user and item
+      MLlibRating(userStringIntMap(r.user), itemStringIntMap(r.item), r.rating)
+    )
+
+    // seed for MLlib ALS
+    val seed = ap.seed.getOrElse(System.nanoTime)
+
+    // If you only have one type of implicit event (Eg. "view" event only),
+    // replace ALS.train(...) with
+    //val m = ALS.trainImplicit(
+      //ratings = mllibRatings,
+      //rank = ap.rank,
+      //iterations = ap.numIterations,
+      //lambda = ap.lambda,
+      //blocks = -1,
+      //alpha = 1.0,
+      //seed = seed)
+
+    val m = ALS.train(
+      ratings = mllibRatings,
+      rank = ap.rank,
+      iterations = ap.numIterations,
+      lambda = ap.lambda,
+      blocks = -1,
+      seed = seed)
+
+    new ALSModel(
+      rank = m.rank,
+      userFeatures = m.userFeatures,
+      productFeatures = m.productFeatures,
+      userStringIntMap = userStringIntMap,
+      itemStringIntMap = itemStringIntMap)
+  }
+
+  def predict(model: ALSModel, query: Query): PredictedResult = {
+    // Convert String ID to Int index for Mllib
+    model.userStringIntMap.get(query.user).map { userInt =>
+      // create inverse view of itemStringIntMap
+      val itemIntStringMap = model.itemStringIntMap.inverse
+      // recommendProducts() returns Array[MLlibRating], which uses item Int
+      // index. Convert it to String ID for returning PredictedResult
+      val itemScores = model.recommendProducts(userInt, query.num)
+        .map (r => ItemScore(itemIntStringMap(r.product), r.rating))
+      new PredictedResult(itemScores)
+    }.getOrElse{
+      logger.info(s"No prediction for unknown user ${query.user}.")
+      new PredictedResult(Array.empty)
+    }
+  }
+
+  // This function is used by the evaluation module, where a batch of queries is sent to this engine
+  // for evaluation purpose.
+  override def batchPredict(model: ALSModel, queries: RDD[(Long, Query)]): RDD[(Long, PredictedResult)] = {
+    val userIxQueries: RDD[(Int, (Long, Query))] = queries
+    .map { case (ix, query) => {
+      // If user not found, then the index is -1
+      val userIx = model.userStringIntMap.get(query.user).getOrElse(-1)
+      (userIx, (ix, query))
+    }}
+
+    // Cross product of all valid users from the queries and products in the model.
+    val usersProducts: RDD[(Int, Int)] = userIxQueries
+      .keys
+      .filter(_ != -1)
+      .cartesian(model.productFeatures.map(_._1))
+
+    // Call mllib ALS's predict function.
+    val ratings: RDD[MLlibRating] = model.predict(usersProducts)
+
+    // The following code construct predicted results from mllib's ratings.
+    // Not optimal implementation. Instead of groupBy, should use combineByKey with a PriorityQueue
+    val userRatings: RDD[(Int, Iterable[MLlibRating])] = ratings.groupBy(_.user)
+
+    userIxQueries.leftOuterJoin(userRatings)
+    .map {
+      // When there are ratings
+      case (userIx, ((ix, query), Some(ratings))) => {
+        val topItemScores: Array[ItemScore] = ratings
+        .toArray
+        .sortBy(_.rating)(Ordering.Double.reverse) // note: from large to small ordering
+        .take(query.num)
+        .map { rating => ItemScore(
+          model.itemStringIntMap.inverse(rating.product),
+          rating.rating) }
+
+        (ix, PredictedResult(itemScores = topItemScores))
+      }
+      // When user doesn't exist in training data
+      case (userIx, ((ix, query), None)) => {
+        require(userIx == -1)
+        (ix, PredictedResult(itemScores = Array.empty))
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSModel.scala
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSModel.scala b/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSModel.scala
new file mode 100644
index 0000000..243c1d1
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/src/main/scala/ALSModel.scala
@@ -0,0 +1,63 @@
+package org.apache.spark.mllib.recommendation
+// This must be the same package as Spark's MatrixFactorizationModel because
+// MatrixFactorizationModel's constructor is private and we are using
+// its constructor in order to save and load the model
+
+import org.template.recommendation.ALSAlgorithmParams
+
+import org.apache.predictionio.controller.IPersistentModel
+import org.apache.predictionio.controller.IPersistentModelLoader
+import org.apache.predictionio.data.storage.BiMap
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+class ALSModel(
+    override val rank: Int,
+    override val userFeatures: RDD[(Int, Array[Double])],
+    override val productFeatures: RDD[(Int, Array[Double])],
+    val userStringIntMap: BiMap[String, Int],
+    val itemStringIntMap: BiMap[String, Int])
+  extends MatrixFactorizationModel(rank, userFeatures, productFeatures)
+  with IPersistentModel[ALSAlgorithmParams] {
+
+  def save(id: String, params: ALSAlgorithmParams,
+    sc: SparkContext): Boolean = {
+
+    sc.parallelize(Seq(rank)).saveAsObjectFile(s"/tmp/${id}/rank")
+    userFeatures.saveAsObjectFile(s"/tmp/${id}/userFeatures")
+    productFeatures.saveAsObjectFile(s"/tmp/${id}/productFeatures")
+    sc.parallelize(Seq(userStringIntMap))
+      .saveAsObjectFile(s"/tmp/${id}/userStringIntMap")
+    sc.parallelize(Seq(itemStringIntMap))
+      .saveAsObjectFile(s"/tmp/${id}/itemStringIntMap")
+    true
+  }
+
+  override def toString = {
+    s"userFeatures: [${userFeatures.count()}]" +
+    s"(${userFeatures.take(2).toList}...)" +
+    s" productFeatures: [${productFeatures.count()}]" +
+    s"(${productFeatures.take(2).toList}...)" +
+    s" userStringIntMap: [${userStringIntMap.size}]" +
+    s"(${userStringIntMap.take(2)}...)" +
+    s" itemStringIntMap: [${itemStringIntMap.size}]" +
+    s"(${itemStringIntMap.take(2)}...)"
+  }
+}
+
+object ALSModel
+  extends IPersistentModelLoader[ALSAlgorithmParams, ALSModel] {
+  def apply(id: String, params: ALSAlgorithmParams,
+    sc: Option[SparkContext]) = {
+    new ALSModel(
+      rank = sc.get.objectFile[Int](s"/tmp/${id}/rank").first,
+      userFeatures = sc.get.objectFile(s"/tmp/${id}/userFeatures"),
+      productFeatures = sc.get.objectFile(s"/tmp/${id}/productFeatures"),
+      userStringIntMap = sc.get
+        .objectFile[BiMap[String, Int]](s"/tmp/${id}/userStringIntMap").first,
+      itemStringIntMap = sc.get
+        .objectFile[BiMap[String, Int]](s"/tmp/${id}/itemStringIntMap").first)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/src/main/scala/DataSource.scala b/testing/pio_tests/engines/recommendation-engine/src/main/scala/DataSource.scala
new file mode 100644
index 0000000..eea3ae6
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/src/main/scala/DataSource.scala
@@ -0,0 +1,103 @@
+package org.template.recommendation
+
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.store.PEventStore
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+import grizzled.slf4j.Logger
+
+case class DataSourceEvalParams(kFold: Int, queryNum: Int)
+
+case class DataSourceParams(
+  appName: String,
+  evalParams: Option[DataSourceEvalParams]) extends Params
+
+class DataSource(val dsp: DataSourceParams)
+  extends PDataSource[TrainingData,
+      EmptyEvaluationInfo, Query, ActualResult] {
+
+  @transient lazy val logger = Logger[this.type]
+
+  def getRatings(sc: SparkContext): RDD[Rating] = {
+
+    val eventsRDD: RDD[Event] = PEventStore.find(
+      appName = dsp.appName,
+      entityType = Some("user"),
+      eventNames = Some(List("rate", "buy")), // read "rate" and "buy" event
+      // targetEntityType is optional field of an event.
+      targetEntityType = Some(Some("item")))(sc)
+
+    val ratingsRDD: RDD[Rating] = eventsRDD.map { event =>
+      val rating = try {
+        val ratingValue: Double = event.event match {
+          case "rate" => event.properties.get[Double]("rating")
+          case "buy" => 4.0 // map buy event to rating value of 4
+          case _ => throw new Exception(s"Unexpected event ${event} is read.")
+        }
+        // entityId and targetEntityId is String
+        Rating(event.entityId,
+          event.targetEntityId.get,
+          ratingValue)
+      } catch {
+        case e: Exception => {
+          logger.error(s"Cannot convert ${event} to Rating. Exception: ${e}.")
+          throw e
+        }
+      }
+      rating
+    }.cache()
+
+    ratingsRDD
+  }
+
+  override
+  def readTraining(sc: SparkContext): TrainingData = {
+    new TrainingData(getRatings(sc))
+  }
+
+  override
+  def readEval(sc: SparkContext)
+  : Seq[(TrainingData, EmptyEvaluationInfo, RDD[(Query, ActualResult)])] = {
+    require(!dsp.evalParams.isEmpty, "Must specify evalParams")
+    val evalParams = dsp.evalParams.get
+
+    val kFold = evalParams.kFold
+    val ratings: RDD[(Rating, Long)] = getRatings(sc).zipWithUniqueId
+    ratings.cache
+
+    (0 until kFold).map { idx => {
+      val trainingRatings = ratings.filter(_._2 % kFold != idx).map(_._1)
+      val testingRatings = ratings.filter(_._2 % kFold == idx).map(_._1)
+
+      val testingUsers: RDD[(String, Iterable[Rating])] = testingRatings.groupBy(_.user)
+
+      (new TrainingData(trainingRatings),
+        new EmptyEvaluationInfo(),
+        testingUsers.map {
+          case (user, ratings) => (Query(user, evalParams.queryNum), ActualResult(ratings.toArray))
+        }
+      )
+    }}
+  }
+}
+
+case class Rating(
+  user: String,
+  item: String,
+  rating: Double
+)
+
+class TrainingData(
+  val ratings: RDD[Rating]
+) extends Serializable {
+  override def toString = {
+    s"ratings: [${ratings.count()}] (${ratings.take(2).toList}...)"
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Engine.scala b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Engine.scala
new file mode 100644
index 0000000..79840dc
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Engine.scala
@@ -0,0 +1,32 @@
+package org.template.recommendation
+
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
+
+case class Query(
+  user: String,
+  num: Int
+) extends Serializable
+
+case class PredictedResult(
+  itemScores: Array[ItemScore]
+) extends Serializable
+
+case class ActualResult(
+  ratings: Array[Rating]
+) extends Serializable
+
+case class ItemScore(
+  item: String,
+  score: Double
+) extends Serializable
+
+object RecommendationEngine extends IEngineFactory {
+  def apply() = {
+    new Engine(
+      classOf[DataSource],
+      classOf[Preparator],
+      Map("als" -> classOf[ALSAlgorithm]),
+      classOf[Serving])
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Evaluation.scala
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Evaluation.scala b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Evaluation.scala
new file mode 100644
index 0000000..34e5689
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Evaluation.scala
@@ -0,0 +1,89 @@
+package org.template.recommendation
+
+import org.apache.predictionio.controller.Evaluation
+import org.apache.predictionio.controller.OptionAverageMetric
+import org.apache.predictionio.controller.AverageMetric
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EngineParamsGenerator
+import org.apache.predictionio.controller.EngineParams
+import org.apache.predictionio.controller.MetricEvaluator
+
+// Usage:
+// $ pio eval org.template.recommendation.RecommendationEvaluation \
+//   org.template.recommendation.EngineParamsList
+
+case class PrecisionAtK(k: Int, ratingThreshold: Double = 2.0)
+    extends OptionAverageMetric[EmptyEvaluationInfo, Query, PredictedResult, ActualResult] {
+  require(k > 0, "k must be greater than 0")
+
+  override def header = s"Precision@K (k=$k, threshold=$ratingThreshold)"
+
+  def calculate(q: Query, p: PredictedResult, a: ActualResult): Option[Double] = {
+    val positives: Set[String] = a.ratings.filter(_.rating >= ratingThreshold).map(_.item).toSet
+
+    // If there is no positive results, Precision is undefined. We don't consider this case in the
+    // metrics, hence we return None.
+    if (positives.size == 0) {
+      return None
+    }
+
+    val tpCount: Int = p.itemScores.take(k).filter(is => positives(is.item)).size
+
+    Some(tpCount.toDouble / math.min(k, positives.size))
+  }
+}
+
+case class PositiveCount(ratingThreshold: Double = 2.0)
+    extends AverageMetric[EmptyEvaluationInfo, Query, PredictedResult, ActualResult] {
+  override def header = s"PositiveCount (threshold=$ratingThreshold)"
+
+  def calculate(q: Query, p: PredictedResult, a: ActualResult): Double = {
+    a.ratings.filter(_.rating >= ratingThreshold).size
+  }
+}
+
+object RecommendationEvaluation extends Evaluation {
+  engineEvaluator = (
+    RecommendationEngine(),
+    MetricEvaluator(
+      metric = PrecisionAtK(k = 10, ratingThreshold = 4.0),
+      otherMetrics = Seq(
+        PositiveCount(ratingThreshold = 4.0),
+        PrecisionAtK(k = 10, ratingThreshold = 2.0),
+        PositiveCount(ratingThreshold = 2.0),
+        PrecisionAtK(k = 10, ratingThreshold = 1.0),
+        PositiveCount(ratingThreshold = 1.0)
+      )))
+}
+
+
+object ComprehensiveRecommendationEvaluation extends Evaluation {
+  val ratingThresholds = Seq(0.0, 2.0, 4.0)
+  val ks = Seq(1, 3, 10)
+
+  engineEvaluator = (
+    RecommendationEngine(),
+    MetricEvaluator(
+      metric = PrecisionAtK(k = 3, ratingThreshold = 2.0),
+      otherMetrics = (
+        (for (r <- ratingThresholds) yield PositiveCount(ratingThreshold = r)) ++
+        (for (r <- ratingThresholds; k <- ks) yield PrecisionAtK(k = k, ratingThreshold = r))
+      )))
+}
+
+
+trait BaseEngineParamsList extends EngineParamsGenerator {
+  protected val baseEP = EngineParams(
+    dataSourceParams = DataSourceParams(
+      appName = "INVALID_APP_NAME",
+      evalParams = Some(DataSourceEvalParams(kFold = 5, queryNum = 10))))
+}
+
+object EngineParamsList extends BaseEngineParamsList {
+  engineParamsList = for(
+    rank <- Seq(5, 10, 20);
+    numIterations <- Seq(1, 5, 10))
+    yield baseEP.copy(
+      algorithmParamsList = Seq(
+        ("als", ALSAlgorithmParams(rank, numIterations, 0.01, Some(3)))))
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Preparator.scala b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Preparator.scala
new file mode 100644
index 0000000..8f2f7e4
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Preparator.scala
@@ -0,0 +1,19 @@
+package org.template.recommendation
+
+import org.apache.predictionio.controller.PPreparator
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+class Preparator
+  extends PPreparator[TrainingData, PreparedData] {
+
+  def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
+    new PreparedData(ratings = trainingData.ratings)
+  }
+}
+
+class PreparedData(
+  val ratings: RDD[Rating]
+) extends Serializable

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/src/main/scala/Serving.scala b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Serving.scala
new file mode 100644
index 0000000..38ba8b9
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/src/main/scala/Serving.scala
@@ -0,0 +1,13 @@
+package org.template.recommendation
+
+import org.apache.predictionio.controller.LServing
+
+class Serving
+  extends LServing[Query, PredictedResult] {
+
+  override
+  def serve(query: Query,
+    predictedResults: Seq[PredictedResult]): PredictedResult = {
+    predictedResults.head
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/engines/recommendation-engine/template.json
----------------------------------------------------------------------
diff --git a/testing/pio_tests/engines/recommendation-engine/template.json b/testing/pio_tests/engines/recommendation-engine/template.json
new file mode 100644
index 0000000..fb4a50b
--- /dev/null
+++ b/testing/pio_tests/engines/recommendation-engine/template.json
@@ -0,0 +1 @@
+{"pio": {"version": { "min": "0.9.2" }}}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/globals.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/globals.py b/testing/pio_tests/globals.py
new file mode 100644
index 0000000..1134501
--- /dev/null
+++ b/testing/pio_tests/globals.py
@@ -0,0 +1,17 @@
+import subprocess
+
+SUPPRESS_STDOUT=False
+SUPPRESS_STDERR=False
+LOGGER_NAME='INT_TESTS'
+
+def std_out():
+  if SUPPRESS_STDOUT:
+    return subprocess.DEVNULL
+  else:
+    return None
+
+def std_err():
+  if SUPPRESS_STDERR:
+    return subprocess.DEVNULL
+  else:
+    return None

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/integration.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/integration.py b/testing/pio_tests/integration.py
new file mode 100644
index 0000000..441365e
--- /dev/null
+++ b/testing/pio_tests/integration.py
@@ -0,0 +1,46 @@
+import unittest
+import logging
+import pio_tests.globals as globals
+
+class TestContext:
+  """Class representing the settings provided for every test"""
+
+  def __init__(self, engine_directory, data_directory, es_ip='0.0.0.0', es_port=7070):
+    """
+    Args:
+      engine_directory (str): path to the directory where the engines are stored
+      data_directory (str):   path to the directory where tests can keep their data
+      es_ip (str):            ip of the eventserver
+      es_port (int):          port of the eventserver
+    """
+    self.engine_directory = engine_directory
+    self.data_directory = data_directory
+    self.es_ip = es_ip
+    self.es_port = es_port
+
+class BaseTestCase(unittest.TestCase):
+  """This is the base class for all integration tests
+
+  This class sets up a `TestContext` object and a logger for every test case
+  """
+  def __init__(self, test_context, methodName='runTest'):
+    super(BaseTestCase, self).__init__(methodName)
+    self.test_context = test_context
+    self.log = logging.getLogger(globals.LOGGER_NAME)
+
+class AppContext:
+  """ This class is a description of an instance of the engine"""
+
+  def __init__(self, name, template, engine_json_path=None):
+    """
+    Args:
+      name (str): application name
+      template (str): either the name of an engine from the engines directory
+          or a link to repository with the engine
+      engine_json_path (str): path to json file describing an engine (a custom engine.json)
+          to be used for the application. If `None`, engine.json from the engine's directory
+          will be used
+    """
+    self.name = name
+    self.template = template
+    self.engine_json_path = engine_json_path

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/scenarios/basic_app_usecases.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/scenarios/basic_app_usecases.py b/testing/pio_tests/scenarios/basic_app_usecases.py
new file mode 100644
index 0000000..d8b3a1e
--- /dev/null
+++ b/testing/pio_tests/scenarios/basic_app_usecases.py
@@ -0,0 +1,154 @@
+import os
+import unittest
+import random
+import logging
+import time
+from subprocess import CalledProcessError
+from pio_tests.integration import BaseTestCase, AppContext
+from utils import *
+
+ITEMS_COUNT = 12
+
+def get_buy_events(users, per_user=2):
+  events = []
+  for u in range(users):
+    items = set([random.randint(0, ITEMS_COUNT) for i in range(per_user)])
+    for item in items:
+      events.append({
+        "event": "buy",
+        "entityType": "user",
+        "entityId": u,
+        "targetEntityType": "item",
+        "targetEntityId": item })
+
+  return events
+
+def get_rate_events(users, per_user=2):
+  events = []
+  for u in range(users):
+    items = set([random.randint(0, ITEMS_COUNT) for i in range(per_user)])
+    for item in items:
+      events.append( {
+        "event": "rate",
+        "entityType": "user",
+        "entityId": u,
+        "targetEntityType": "item",
+        "targetEntityId": item,
+        "properties": { "rating" : float(random.randint(1,5)) } })
+
+  return events
+
+
+class BasicAppUsecases(BaseTestCase):
+
+  def setUp(self):
+    random.seed(3)
+    self.log.info("Setting up the engine")
+
+    template_path = pjoin(
+        self.test_context.engine_directory, "recommendation-engine")
+    engine_json_path = pjoin(
+        self.test_context.data_directory, "quickstart_test/engine.json")
+
+    app_context = AppContext(
+        name="MyRecommender",
+        template=template_path,
+        engine_json_path=engine_json_path)
+
+    self.app = AppEngine(self.test_context, app_context)
+
+  def runTest(self):
+    self.app_creation()
+    self.check_app_list()
+    self.check_data()
+    self.check_build()
+    self.check_train_and_deploy()
+
+  def app_creation(self):
+    self.log.info("Adding a new application")
+    description = "SomeDescription"
+    self.app.new(description=description)
+    self.assertEqual(description, self.app.description)
+
+    self.log.info("Creating an app again - should fail")
+    self.assertRaises(CalledProcessError, lambda : self.app.new())
+
+  def check_app_list(self):
+    self.log.info("Checking if app is on the list")
+    apps = pio_app_list()
+    self.assertEqual(1,
+        len([a for a in apps if a['name'] == self.app.app_context.name]))
+
+  def check_data(self):
+    self.log.info("Importing events")
+    buy_events = get_buy_events(20, 1)
+    rate_events = get_rate_events(20, 1)
+
+    for ev in buy_events + rate_events:
+      self.assertEquals(201, self.app.send_event(ev).status_code)
+
+    self.log.info("Checking imported events")
+    r = self.app.get_events(params={'limit': -1})
+    self.assertEqual(200, r.status_code)
+    self.assertEqual(len(buy_events) + len(rate_events), len(r.json()))
+
+    self.log.info("Deleting entire data")
+    self.app.delete_data()
+    self.log.info("Checking if there are no events at all")
+    r = self.app.get_events(params={'limit': -1})
+    self.assertEqual(404, r.status_code)
+
+  def check_build(self):
+    self.log.info("Clean build")
+    self.app.build(clean=True)
+    self.log.info("Second build")
+    self.app.build()
+
+  def check_train_and_deploy(self):
+    self.log.info("import some data first")
+    buy_events = get_buy_events(20, 5)
+    rate_events = get_rate_events(20, 5)
+    for ev in buy_events + rate_events:
+      self.assertEquals(201, self.app.send_event(ev).status_code)
+
+    self.log.info("Training")
+    self.app.train()
+    self.log.info("Deploying")
+    self.app.deploy()
+    self.assertFalse(self.app.deployed_process.poll())
+
+    self.log.info("Importing more events")
+    buy_events = get_buy_events(60, 5)
+    rate_events = get_rate_events(60, 5)
+    for ev in buy_events + rate_events:
+      self.assertEquals(201, self.app.send_event(ev).status_code)
+
+    self.log.info("Training again")
+    self.app.train()
+
+    time.sleep(7)
+
+    self.log.info("Check serving")
+    r = self.app.query({"user": 1, "num": 5})
+    self.assertEqual(200, r.status_code)
+    result = r.json()
+    self.assertEqual(5, len(result['itemScores']))
+    r = self.app.query({"user": 5, "num": 3})
+    self.assertEqual(200, r.status_code)
+    result = r.json()
+    self.assertEqual(3, len(result['itemScores']))
+
+    self.log.info("Remove data")
+    self.app.delete_data()
+    self.log.info("Retraining should fail")
+    self.assertRaises(CalledProcessError, lambda: self.app.train())
+
+
+  def tearDown(self):
+    self.log.info("Stopping deployed engine")
+    self.app.stop()
+    self.log.info("Deleting all related data")
+    self.app.delete_data()
+    self.log.info("Removing an app")
+    self.app.delete()
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/scenarios/eventserver_test.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/scenarios/eventserver_test.py b/testing/pio_tests/scenarios/eventserver_test.py
new file mode 100644
index 0000000..2cd3d87
--- /dev/null
+++ b/testing/pio_tests/scenarios/eventserver_test.py
@@ -0,0 +1,155 @@
+import unittest
+import requests
+import json
+import argparse
+from subprocess import Popen
+from utils import AppEngine, pjoin
+from pio_tests.integration import BaseTestCase, AppContext
+
+class EventserverTest(BaseTestCase):
+  """ Integration test for PredictionIO Eventserver API
+  Refer to below for further information:
+    http://docs.prediction.io/datacollection/eventmodel/
+    http://docs.prediction.io/datacollection/eventapi/
+  """
+  # Helper methods
+  def eventserver_url(self, path=None):
+    url = 'http://{}:{}'.format(
+            self.test_context.es_ip, self.test_context.es_port)
+    if path: url += '/{}'.format(path)
+    return url
+
+  def load_events(self, json_file):
+    file_path = pjoin(self.test_context.data_directory,
+        'eventserver_test/{}'.format(json_file))
+    return json.loads(open(file_path).read())
+
+
+  def setUp(self):
+    template_path = pjoin(
+        self.test_context.engine_directory, "recommendation-engine")
+    app_context = AppContext(
+        name="MyRecommender",
+        template=template_path)
+    self.app = AppEngine(self.test_context, app_context)
+
+  def runTest(self):
+    self.log.info("Eventserver is alive and running")
+    r = requests.get(self.eventserver_url())
+    self.assertDictEqual(r.json(), {"status": "alive"})
+
+    self.log.info("Cannot view events with empty accessKey")
+    r = requests.get(self.eventserver_url(path='events.json'))
+    self.assertDictEqual(r.json(), {"message": "Missing accessKey."})
+
+    self.log.info("Cannot view events with invalid accessKey")
+    r = requests.get(self.eventserver_url(path='events.json'),
+        params={'accessKey': ''})
+    self.assertDictEqual(r.json(), {"message": "Invalid accessKey."})
+
+    self.log.info("Adding new pio application")
+    self.app.new()
+
+    self.log.info("No events have been sent yet")
+    r = self.app.get_events()
+    self.assertDictEqual(r.json(), {"message": "Not Found"})
+
+    # Testing POST
+    self.log.info("Sending single event")
+    event1 = {
+      'event' : 'test',
+      'entityType' : 'test',
+      'entityId' : 't1'
+    }
+    r = self.app.send_event(event1)
+    self.assertEqual(201, r.status_code)
+
+    self.log.info("Sending batch of events")
+    r = self.app.send_events_batch(
+        self.load_events("rate_events_25.json"))
+    self.assertEqual(200, r.status_code)
+
+    self.log.info("Cannot send more than 50 events per batch")
+    r = self.app.send_events_batch(
+        self.load_events("signup_events_51.json"))
+    self.assertEqual(400, r.status_code)
+
+    self.log.info("Importing events from file does not have batch size limit")
+    self.app.import_events_batch(
+        self.load_events("signup_events_51.json"))
+
+    self.log.info("Individual events may fail when sending events as batch")
+    r = self.app.send_events_batch(
+        self.load_events("partially_malformed_events.json"))
+    self.assertEqual(200, r.status_code)
+    self.assertEqual(201, r.json()[0]['status'])
+    self.assertEqual(400, r.json()[1]['status'])
+
+    # Testing GET for different parameters
+    params = {'event': 'rate'}
+    r = self.app.get_events(params=params)
+    self.assertEqual(20, len(r.json()))
+    self.assertEqual('rate', r.json()[0]['event'])
+
+    params = {
+      'event': 'rate',
+      'limit': -1 }
+    r = self.app.get_events(params=params)
+    self.assertEqual(25, len(r.json()))
+    self.assertEqual('rate', r.json()[0]['event'])
+
+    params = {
+      'event': 'rate',
+      'limit': 10 }
+    r = self.app.get_events(params=params)
+    self.assertEqual(10, len(r.json()))
+    self.assertEqual('rate', r.json()[0]['event'])
+
+    params = {
+      'event': 'rate',
+      'entityType': 'user',
+      'entityId': '1' }
+    r = self.app.get_events(params=params)
+    self.assertEqual(5, len(r.json()))
+    self.assertEqual('1', r.json()[0]['entityId'])
+
+    params = {
+      'event': 'rate',
+      'targetEntityType': 'item',
+      'targetEntityId': '1' }
+    r = self.app.get_events(params=params)
+    self.assertEqual(5, len(r.json()))
+    self.assertEqual('1', r.json()[0]['targetEntityId'])
+
+    params = {
+      'event': 'rate',
+      'entityType': 'user',
+      'entityId': '1',
+      'startTime': '2014-11-01T09:39:45.618-08:00',
+      'untilTime': '2014-11-04T09:39:45.618-08:00' }
+    r = self.app.get_events(params=params)
+    self.assertEqual(3, len(r.json()))
+    self.assertEqual('1', r.json()[0]['entityId'])
+
+    params = {
+      'event': 'rate',
+      'entityType': 'user',
+      'entityId': '1',
+      'reversed': 'true' }
+    r = self.app.get_events(params=params)
+    self.assertEqual(5, len(r.json()))
+    self.assertEqual('2014-11-05T09:39:45.618-08:00', r.json()[0]['eventTime'])
+
+  def tearDown(self):
+    self.log.info("Deleting all app data")
+    self.app.delete_data()
+    self.log.info("Deleting app")
+    self.app.delete()
+
+
+if __name__ == '__main__':
+  suite = unittest.TestSuite([BasicEventserverTest])
+  result = unittest.TextTestRunner(verbosity=2).run(suite)
+  if not result.wasSuccessful():
+    sys.exit(1)
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/scenarios/quickstart_test.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/scenarios/quickstart_test.py b/testing/pio_tests/scenarios/quickstart_test.py
new file mode 100644
index 0000000..a083c2b
--- /dev/null
+++ b/testing/pio_tests/scenarios/quickstart_test.py
@@ -0,0 +1,125 @@
+import os
+import unittest
+import random
+import logging
+from pio_tests.integration import BaseTestCase, AppContext
+from utils import AppEngine, srun, pjoin
+
+def read_events(file_path):
+  RATE_ACTIONS_DELIMITER = "::"
+  with open(file_path, 'r') as f:
+    events = []
+    for line in f:
+      data = line.rstrip('\r\n').split(RATE_ACTIONS_DELIMITER)
+      if random.randint(0, 1) == 1:
+        events.append( {
+          "event": "rate",
+          "entityType": "user",
+          "entityId": data[0],
+          "targetEntityType": "item",
+          "targetEntityId": data[1],
+          "properties": { "rating" : float(data[2]) } })
+      else:
+        events.append({
+          "event": "buy",
+          "entityType": "user",
+          "entityId": data[0],
+          "targetEntityType": "item",
+          "targetEntityId": data[1] })
+
+    return events
+
+
+class QuickStartTest(BaseTestCase):
+
+  def setUp(self):
+    self.log.info("Setting up the engine")
+
+    template_path = pjoin(
+        self.test_context.engine_directory, "recommendation-engine")
+    engine_json_path = pjoin(
+        self.test_context.data_directory, "quickstart_test/engine.json")
+
+    self.training_data_path = pjoin(
+        self.test_context.data_directory,
+        "quickstart_test/training_data.txt")
+
+    # downloading training data
+    srun('curl https://raw.githubusercontent.com/apache/spark/master/' \
+            'data/mllib/sample_movielens_data.txt --create-dirs -o {}'
+            .format(self.training_data_path))
+
+    app_context = AppContext(
+        name="MyRecommender",
+        template=template_path,
+        engine_json_path=engine_json_path)
+
+    self.app = AppEngine(self.test_context, app_context)
+
+  def runTest(self):
+    self.log.info("Adding a new application")
+    self.app.new()
+
+    event1 = {
+      "event" : "rate",
+      "entityType" : "user",
+      "entityId" : "u0",
+      "targetEntityType" : "item",
+      "targetEntityId" : "i0",
+      "properties" : {
+        "rating" : 5
+      },
+      "eventTime" : "2014-11-02T09:39:45.618-08:00" }
+
+    event2 = {
+      "event" : "buy",
+      "entityType" : "user",
+      "entityId" : "u1",
+      "targetEntityType" : "item",
+      "targetEntityId" : "i2",
+      "eventTime" : "2014-11-10T12:34:56.123-08:00" }
+
+    self.log.info("Sending two test events")
+    self.assertListEqual(
+        [201, 201],
+        [self.app.send_event(e).status_code for e in [event1, event2]])
+
+    self.log.info("Checking the number of events stored on the server")
+    r = self.app.get_events()
+    self.assertEquals(200, r.status_code)
+    stored_events = r.json()
+    self.assertEqual(2, len(stored_events))
+
+    self.log.info("Importing many events")
+    new_events = read_events(self.training_data_path)
+    for ev in new_events:
+      r = self.app.send_event(ev)
+      self.assertEqual(201, r.status_code)
+
+    self.log.info("Checking the number of events stored on the server after the update")
+    r = self.app.get_events(params={'limit': -1})
+    self.assertEquals(200, r.status_code)
+    stored_events = r.json()
+    self.assertEquals(len(new_events) + 2, len(stored_events))
+
+    self.log.info("Building an engine...")
+    self.app.build()
+    self.log.info("Training...")
+    self.app.train()
+    self.log.info("Deploying and waiting 15s for it to start...")
+    self.app.deploy(wait_time=15)
+
+    self.log.info("Sending a single query and checking results")
+    user_query = { "user": 1, "num": 4 }
+    r = self.app.query(user_query)
+    self.assertEqual(200, r.status_code)
+    result = r.json()
+    self.assertEqual(4, len(result['itemScores']))
+
+  def tearDown(self):
+    self.log.info("Stopping deployed engine")
+    self.app.stop()
+    self.log.info("Deleting all related data")
+    self.app.delete_data()
+    self.log.info("Removing an app")
+    self.app.delete()

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/tests.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/tests.py b/testing/pio_tests/tests.py
new file mode 100755
index 0000000..6364e08
--- /dev/null
+++ b/testing/pio_tests/tests.py
@@ -0,0 +1,77 @@
+import os
+import sys
+import unittest
+import argparse
+import logging
+import time
+from xmlrunner import XMLTestRunner
+import pio_tests.globals as globals
+from utils import srun_bg
+from pio_tests.integration import TestContext
+from pio_tests.scenarios.quickstart_test import QuickStartTest
+from pio_tests.scenarios.basic_app_usecases import BasicAppUsecases
+from pio_tests.scenarios.eventserver_test import EventserverTest
+
+parser = argparse.ArgumentParser(description='Integration tests for PredictionIO')
+parser.add_argument('--eventserver-ip', default='0.0.0.0')
+parser.add_argument('--eventserver-port', type=int, default=7070)
+parser.add_argument('--no-shell-stdout', action='store_true',
+    help='Suppress STDOUT output from shell executed commands')
+parser.add_argument('--no-shell-stderr', action='store_true',
+    help='Suppress STDERR output from shell executed commands')
+parser.add_argument('--logging', action='store', choices=['INFO', 'DEBUG', 'NO_LOGGING'],
+    default='INFO', help='Choose the logging level')
+parser.add_argument('--tests', nargs='*', type=str,
+    default=None, help='Names of the tests to execute. By default all tests will be checked')
+
+TESTS_DIRECTORY = os.path.abspath(os.path.dirname(__file__))
+ENGINE_DIRECTORY = os.path.join(TESTS_DIRECTORY, "engines")
+DATA_DIRECTORY = os.path.join(TESTS_DIRECTORY, "data")
+
+LOGGING_FORMAT = '[%(levelname)s] %(module)s %(asctime)-15s: %(message)s'
+logging.basicConfig(format=LOGGING_FORMAT)
+
+def get_tests(test_context):
+  # ========= ADD TESTS HERE!!! ================================
+  return {'QuickStart': QuickStartTest(test_context),
+          'BasicAppUsecases': BasicAppUsecases(test_context),
+          'EventserverTest': EventserverTest(test_context)}
+
+if __name__ == "__main__":
+  args = vars(parser.parse_args())
+
+  if args.get('no_shell_stdout'):
+    globals.SUPPRESS_STDOUT = True
+  if args.get('no_shell_stderr'):
+    globals.SUPPRESS_STDERR = True
+
+  # setting up logging
+  log_opt = args['logging']
+  logger = logging.getLogger(globals.LOGGER_NAME)
+  if log_opt == 'INFO':
+    logger.level = logging.INFO
+  elif log_opt == 'DEBUG':
+    logger.level = logging.DEBUG
+
+  test_context = TestContext(
+      ENGINE_DIRECTORY, DATA_DIRECTORY,
+      args['eventserver_ip'], int(args['eventserver_port']))
+
+  tests_dict = get_tests(test_context)
+  test_names = args['tests']
+  tests = []
+  if test_names is not None:
+    tests = [t for name, t in tests_dict.items() if name in test_names]
+  else:
+    tests = tests_dict.values()
+
+  # Actual tests execution
+  event_server_process = srun_bg('pio eventserver --ip {} --port {}'
+      .format(test_context.es_ip, test_context.es_port))
+  time.sleep(5)
+  result = XMLTestRunner(verbosity=2, output='test-reports').run(
+                unittest.TestSuite(tests))
+  event_server_process.kill()
+
+  if not result.wasSuccessful():
+    sys.exit(1)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/testing/pio_tests/utils.py
----------------------------------------------------------------------
diff --git a/testing/pio_tests/utils.py b/testing/pio_tests/utils.py
new file mode 100644
index 0000000..629729e
--- /dev/null
+++ b/testing/pio_tests/utils.py
@@ -0,0 +1,309 @@
+import re
+import time
+import os
+import requests
+import json
+from shutil import copyfile
+from subprocess import run, Popen, check_output
+from os.path import join as pjoin
+import pio_tests.globals as globals
+
+def srun(command):
+  """ Runs a shell command given as a `str`
+  Raises: `subprocess.CalledProcessError` when exit code != 0
+  """
+  return run(command, shell=True, stdout=globals.std_out(),
+      stderr=globals.std_err(), check=True)
+
+def srun_out(command):
+  """ Runs a shell command given as a `str`
+  Returns: string with command's output
+  Raises: `subprocess.CalledProcessError` when exit code != 0
+  """
+  return check_output(command, shell=True, universal_newlines=True,
+      stderr=globals.std_err())
+
+def srun_bg(command):
+  """ Runs a shell command given as a `str` in the background
+  Returns: (obj: `subprocess.Popen`) for executed process
+  """
+  return Popen(command, shell=True, stdout=globals.std_out(),
+      stderr=globals.std_err())
+
+def repository_dirname(template):
+  """ Utility function getting repository name from the link
+  Example: for "https://github.com/user/SomeRepo" should return "SomeRepo"
+  """
+  return template.split('/')[-1]
+
+def obtain_template(engine_dir, template):
+  """Given a directory with engines and a template downloads an engine
+  if neccessary
+  Args:
+    engine_dir (str): directory where engines are stored
+    template (str): either the name of an engine from the engines directory
+        or a link to repository with the engine
+  Returns: str with the engine's path
+  """
+  if re.match('^https?:\/\/', template):
+    dest_dir = pjoin(engine_dir, repository_dirname(template))
+    if not os.path.exists(dest_dir):
+      srun('git clone --depth=1 {0} {1}'.format(template, dest_dir))
+    return dest_dir
+  else:
+    # check if exists
+    dest_dir = pjoin(engine_dir, template)
+    if not os.path.exists(dest_dir):
+      raise ValueError('Engine {0} does not exist in {1}'
+          .format(template, engine_dir))
+
+    return dest_dir
+
+def pio_app_list():
+  """Returns: a list of dicts for every application with the following keys:
+      `name`, `id`, `access_key`, `allowed_events`
+  """
+  output = srun_out('pio app list').rstrip()
+  return [ { 'name': line[2], 'id': int(line[4]),
+             'access_key': line[6], 'allowed_events': line[8] }
+          for line in [x.split() for x in output.split('\n')[1:-1]] ]
+
+def get_app_eventserver_url_json(test_context):
+  return 'http://{}:{}/events.json'.format(
+      test_context.es_ip, test_context.es_port)
+
+def get_engine_url_json(engine_ip, engine_port):
+  return 'http://{}:{}/queries.json'.format(
+      engine_ip, engine_port)
+
+def send_event(event, test_context, access_key, channel=None):
+  """ Sends an event to the eventserver
+  Args:
+    event: json-like dictionary describing an event
+    test_context (obj: `TestContext`):
+    access_key: application's access key
+    channel (str): custom channel for storing event
+  Returns: `requests.Response`
+  """
+  url = get_app_eventserver_url_json(test_context)
+  params = { 'accessKey': access_key }
+  if channel: params['channel'] = channel
+  return requests.post(
+      url,
+      params=params,
+      json=event)
+
+def send_events_batch(events, test_context, access_key, channel=None):
+  """ Send events in batch via REST to the eventserver
+  Args:
+    events: a list of json-like dictionaries for events
+    test_context (obj: `TestContext`):
+    access_key: application's access key
+    channel (str): custom channel for storing event
+  Returns: `requests.Response`
+  Requires: Events length must not exceed length of 50
+    http://docs.prediction.io/datacollection/eventmodel/#3.-batch-events-to-the-eventserver
+  """
+  url = 'http://{}:{}/batch/events.json'.format(
+      test_context.es_ip, test_context.es_port)
+  params = { 'accessKey': access_key }
+  if channel: params['channel'] = channel
+  return requests.post(
+      url,
+      params=params,
+      json=events)
+
+
+def import_events_batch(events, test_context, appid, channel=None):
+  """ Imports events in batch from file with `pio import`
+  Args:
+    events: a list of json-like dictionaries for events
+    test_context (obj: `TestContext`)
+    appid (int): application's id
+    channel (str): custom channel for storing event
+  """
+  # Writing events list to temporary file.
+  # `pio import` requires each line of input file to be a JSON string
+  # representing an event. Empty lines are not allowed.
+  contents = ''
+  for ev in events:
+      contents += '{}\n'.format(json.dumps(ev))
+  contents.rstrip('\n')
+
+  file_path = pjoin(test_context.data_directory, 'events.json.tmp')
+  try:
+      with open(file_path, 'w') as f:
+          f.write(contents)
+      srun('pio import --appid {} --input {} {}'.format(
+          appid,
+          file_path,
+          '--channel {}'.format(channel) if channel else ''))
+  finally:
+      os.remove(file_path)
+
+def get_events(test_context, access_key, params={}):
+  """ Gets events for some application
+  Args:
+    test_context (obj: `TestContext`)
+    access_key (str):
+    params (dict): special parameters for eventserver's GET, e.g:
+        'limit', 'reversed', 'event'. See the docs
+  Returns: `requests.Response`
+  """
+  url = get_app_eventserver_url_json(test_context)
+  return requests.get(url, params=dict({'accessKey': access_key}, **params))
+
+def query_engine(data, engine_ip='localhost', engine_port=8000):
+  """ Send a query to deployed engine
+  Args:
+    data (dict): json-like dictionary being an input to an engine
+    access_key (str):
+    engine_ip (str): ip of deployed engine
+    engine_port (int): port of deployed engine
+  Returns: `requests.Response`
+  """
+  url = get_engine_url_json(engine_ip, engine_port)
+  return requests.post(url, json=data)
+
+class AppEngine:
+  """ This is a utility class simplifying all app related interactions.
+  Basically it is just a wrapper on other utility functions and shell
+  scripts.
+  """
+
+  def __init__(self, test_context, app_context, already_created=False):
+    """ Args:
+        test_context (obj: `TestContext`)
+        app_context (obj: `AppContext`)
+        already_created (bool): True if the given app has been already added
+    """
+    self.test_context = test_context
+    self.app_context = app_context
+    self.engine_path = obtain_template(
+        self.test_context.engine_directory, app_context.template)
+    self.deployed_process = None
+    if already_created:
+      self.__init_info()
+    else:
+      self.id = None
+      self.access_key = None
+      self.description = None
+
+    if self.app_context.engine_json_path:
+      self.__copy_engine_json()
+
+  def __copy_engine_json(self):
+    to_path = pjoin(self.engine_path, 'engine.json')
+    copyfile(self.app_context.engine_json_path, to_path)
+
+  def __init_info(self):
+    info = self.show()
+    self.id = info['id']
+    self.access_key = info['access_key']
+    self.description = info['description']
+
+  def new(self, id=None, description=None, access_key=None):
+    """ Creates a new application with given parameters """
+    srun('pio app new {} {} {} {}'.format(
+        '--id {}'.format(id) if id else '',
+        '--description \"{}\"'.format(description) if description else '',
+        '--access-key {}'.format(access_key) if access_key else '',
+        self.app_context.name))
+
+    self.__init_info()
+
+
+  def show(self):
+    """ Returns: application info in dictionary with the keys:
+         `name`: str, `id`: int, `description`: str,
+         `access_key`: str, `allowed_events`: str
+    """
+    output = srun_out('pio app show {}'.format(self.app_context.name)).rstrip()
+    lines = [x.split() for x in output.split('\n')]
+    return { 'name': lines[0][3],
+             'id': int(lines[1][4]),
+             'description': lines[2][3] if len(lines[2]) >= 4 else '',
+             'access_key': lines[3][4],
+             'allowed_events': lines[3][5] }
+
+
+  # deletes this app from pio
+  def delete(self):
+    srun('pio app delete {0} --force'.format(self.app_context.name))
+
+  def build(self, sbt_extra=None, clean=False, no_asm=True):
+    srun('cd {0}; pio build {1} {2} {3}'.format(
+        self.engine_path,
+        '--sbt-extra {}'.format(sbt_extra) if sbt_extra else '',
+        '--clean' if clean else '',
+        '--no-asm' if no_asm else ''))
+
+  def train(self, batch=None, skip_sanity_check=False, stop_after_read=False,
+          stop_after_prepare=False, engine_factory=None,
+          engine_params_key=None, scratch_uri=None):
+
+    srun('cd {}; pio train {} {} {} {} {} {} {}'.format(
+        self.engine_path,
+        '--batch {}'.format(batch) if batch else '',
+        '--skip-sanity-check' if skip_sanity_check else '',
+        '--stop-after-read' if stop_after_read else '',
+        '--stop-after-prepare' if stop_after_prepare else '',
+        '--engine_factory {}'.format(engine_factory) if engine_factory else '',
+        '--engine-params-key {}'.format(engine_params_key) if engine_params_key else '',
+        '--scratch-uri {}'.format(scratch_uri) if scratch_uri else ''))
+
+  def deploy(self, wait_time=0, ip=None, port=None, engine_instance_id=None,
+          feedback=False, accesskey=None, event_server_ip=None, event_server_port=None,
+          batch=None, scratch_uri=None):
+
+    command = 'cd {}; pio deploy {} {} {} {} {} {} {} {} {}'.format(
+            self.engine_path,
+            '--ip {}'.format(ip) if ip else '',
+            '--port {}'.format(port) if port else '',
+            '--engine-instance-id {}'.format(engine_instance_id) if engine_instance_id else '',
+            '--feedback' if feedback else '',
+            '--accesskey {}'.format(accesskey) if accesskey else '',
+            '--event-server-ip {}'.format(event_server_ip) if event_server_ip else '',
+            '--event-server-port {}'.format(event_server_port) if event_server_port else '',
+            '--batch {}'.format(bach) if batch else '',
+            '--scratch-uri {}'.format(scratch_uri) if scratch_uri else '')
+
+    self.deployed_process = srun_bg(command)
+    time.sleep(wait_time)
+    if self.deployed_process.poll() is not None:
+      raise Exception('Application engine terminated')
+    self.ip = ip if ip else 'localhost'
+    self.port = port if port else 8000
+
+  def stop(self):
+    """ Kills deployed engine """
+    if self.deployed_process:
+      self.deployed_process.kill()
+
+  def new_channel(self, channel):
+    srun('pio app channel-new {0}'.format(channel))
+
+  def delete_channel(self, channel):
+    srun('pio app channel-delete {0} --force'.format(channel))
+
+  def send_event(self, event):
+    return send_event(event, self.test_context, self.access_key)
+
+  def send_events_batch(self, events):
+    return send_events_batch(events, self.test_context, self.access_key)
+
+  def import_events_batch(self, events):
+    return import_events_batch(events, self.test_context, self.id)
+
+  def get_events(self, params={}):
+    return get_events(self.test_context, self.access_key, params)
+
+  def delete_data(self, delete_all=True, channel=None):
+    srun('pio app data-delete {0} {1} {2} --force'
+        .format(
+            self.app_context.name,
+            '--all' if delete_all else '',
+            '--channel ' + channel if channel is not None else ''))
+
+  def query(self, data):
+    return query_engine(data, self.ip, self.port)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/3648ba1b/tests.py
----------------------------------------------------------------------
diff --git a/tests.py b/tests.py
deleted file mode 100755
index 6364e08..0000000
--- a/tests.py
+++ /dev/null
@@ -1,77 +0,0 @@
-import os
-import sys
-import unittest
-import argparse
-import logging
-import time
-from xmlrunner import XMLTestRunner
-import pio_tests.globals as globals
-from utils import srun_bg
-from pio_tests.integration import TestContext
-from pio_tests.scenarios.quickstart_test import QuickStartTest
-from pio_tests.scenarios.basic_app_usecases import BasicAppUsecases
-from pio_tests.scenarios.eventserver_test import EventserverTest
-
-parser = argparse.ArgumentParser(description='Integration tests for PredictionIO')
-parser.add_argument('--eventserver-ip', default='0.0.0.0')
-parser.add_argument('--eventserver-port', type=int, default=7070)
-parser.add_argument('--no-shell-stdout', action='store_true',
-    help='Suppress STDOUT output from shell executed commands')
-parser.add_argument('--no-shell-stderr', action='store_true',
-    help='Suppress STDERR output from shell executed commands')
-parser.add_argument('--logging', action='store', choices=['INFO', 'DEBUG', 'NO_LOGGING'],
-    default='INFO', help='Choose the logging level')
-parser.add_argument('--tests', nargs='*', type=str,
-    default=None, help='Names of the tests to execute. By default all tests will be checked')
-
-TESTS_DIRECTORY = os.path.abspath(os.path.dirname(__file__))
-ENGINE_DIRECTORY = os.path.join(TESTS_DIRECTORY, "engines")
-DATA_DIRECTORY = os.path.join(TESTS_DIRECTORY, "data")
-
-LOGGING_FORMAT = '[%(levelname)s] %(module)s %(asctime)-15s: %(message)s'
-logging.basicConfig(format=LOGGING_FORMAT)
-
-def get_tests(test_context):
-  # ========= ADD TESTS HERE!!! ================================
-  return {'QuickStart': QuickStartTest(test_context),
-          'BasicAppUsecases': BasicAppUsecases(test_context),
-          'EventserverTest': EventserverTest(test_context)}
-
-if __name__ == "__main__":
-  args = vars(parser.parse_args())
-
-  if args.get('no_shell_stdout'):
-    globals.SUPPRESS_STDOUT = True
-  if args.get('no_shell_stderr'):
-    globals.SUPPRESS_STDERR = True
-
-  # setting up logging
-  log_opt = args['logging']
-  logger = logging.getLogger(globals.LOGGER_NAME)
-  if log_opt == 'INFO':
-    logger.level = logging.INFO
-  elif log_opt == 'DEBUG':
-    logger.level = logging.DEBUG
-
-  test_context = TestContext(
-      ENGINE_DIRECTORY, DATA_DIRECTORY,
-      args['eventserver_ip'], int(args['eventserver_port']))
-
-  tests_dict = get_tests(test_context)
-  test_names = args['tests']
-  tests = []
-  if test_names is not None:
-    tests = [t for name, t in tests_dict.items() if name in test_names]
-  else:
-    tests = tests_dict.values()
-
-  # Actual tests execution
-  event_server_process = srun_bg('pio eventserver --ip {} --port {}'
-      .format(test_context.es_ip, test_context.es_port))
-  time.sleep(5)
-  result = XMLTestRunner(verbosity=2, output='test-reports').run(
-                unittest.TestSuite(tests))
-  event_server_process.kill()
-
-  if not result.wasSuccessful():
-    sys.exit(1)