You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@predictionio.apache.org by do...@apache.org on 2016/07/18 20:17:32 UTC

[01/34] incubator-predictionio git commit: rename all except examples

Repository: incubator-predictionio
Updated Branches:
  refs/heads/develop 23c21d6ca -> 02a5655fc


http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/dashboard.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/dashboard.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/dashboard.scala.txt
deleted file mode 100644
index 85eacda..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/dashboard.scala.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Usage: pio dashboard [--ip <value>] [--port <value>]
-
-  --ip <value>
-      IP to bind to. Default: localhost
-  --port <value>
-      Port to bind to. Default: 9000

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/deploy.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/deploy.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/deploy.scala.txt
deleted file mode 100644
index 4ce7985..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/deploy.scala.txt
+++ /dev/null
@@ -1,29 +0,0 @@
-Usage: pio deploy [--ip <value>] [--port <value>]
-                  [--engine-instance-id <value>]
-                  [--feedback] [--accesskey <value>]
-                  [--event-server-ip <value>] [--event-server-port <value>]
-                  [--batch <value>] [--scratch-uri <value>]
-
-Deploy an engine instance as a prediction server. This command will pass all
-pass-through arguments to its underlying spark-submit command.
-
-  --ip <value>
-      IP to bind to. Default: 0.0.0.0
-  --port <value>
-      Port to bind to. Default: 8000
-  --engine-instance-id <value>
-      Engine instance ID.
-  --feedback
-      Enable feedback loop to event server.
-  --accesskey <value>
-      Access key of the App where feedback data will be stored.
-  --event-server-ip <value>
-      Event server IP. Default: 0.0.0.0
-  --event-server-port <value>
-      Event server port. Default: 7070
-  --batch <value>
-      Batch label of the deployment.
-  --scratch-uri
-      URI of the working scratch space. Specify this when you want to have all
-      necessary files transferred to a remote location. You will usually want to
-      specify this when you use --deploy-mode cluster.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/eval.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/eval.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/eval.scala.txt
deleted file mode 100644
index 499498c..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/eval.scala.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-Usage: pio eval <evaluation-class> <engine-parameters-generator-class>
-                [--batch <value>]
-                [common options...]
-
-Kick off an evaluation using specified evaluation and engine parameters
-generator class. This command will pass all pass-through arguments to its
-underlying spark-submit command.
-
-  --batch <value>
-      Batch label of the run.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/eventserver.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/eventserver.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/eventserver.scala.txt
deleted file mode 100644
index beda337..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/eventserver.scala.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-Usage: pio eventserver [--ip <value>] [--port <value>] [--stats]
-
-  --ip <value>
-      IP to bind to. Default: 0.0.0.0
-  --port <value>
-      Port to bind to. Default: 7070
-  --stats
-      Enable Event Server internal statistics and its API endpoint.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/export.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/export.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/export.scala.txt
deleted file mode 100644
index 28eb665..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/export.scala.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-Usage: pio export --appid <value> --output <value> [--format <value>] [--channel <value>]
-
-Exports all events of an app to a file. If Hadoop configuration is present, the
-file will be exported to HDFS instead of local filesystem.
-
-  --appid <value>
-      App ID of events to be exported.
-  --channel <value>
-      Channel Name (default if this is not specified)
-  --output <value>
-      Output path of the exported file.
-  --format <value>
-      The format of the exported file. Valid values are "json" and "parquet".
-      The default format is "json".

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/imprt.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/imprt.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/imprt.scala.txt
deleted file mode 100644
index 245d749..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/imprt.scala.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-Usage: pio import --appid <value> --input <value> [--channel <value>]
-
-Imports all events from a file to an app. Each line of the file should be a JSON
-object that represent a single event. If Hadoop configuration is present, the
-file will be imported from HDFS instead of local filesystem.
-
-  --appid <value>
-      App ID of events to be imported.
-  --channel <value>
-      Channel Name (default if this is not specified)
-  --input <value>
-      Input path of the import file.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/main.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/main.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/main.scala.txt
deleted file mode 100644
index a97ecb3..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/main.scala.txt
+++ /dev/null
@@ -1,52 +0,0 @@
-Usage: pio <command> [options] <args>...
-
-Options common to all commands:
-  [--pio-home <value>] [--spark-home <value>] [--sbt <value>]
-  [-ei <value>] [-ev <value>] [-v <value>] [-m <value>]
-  [-sk | --spark-kryo] [--verbose]
-  [<args>] [-- [<args passed to Spark>] [-- [<args passed to runner]]]
-
-  --sbt <value>
-      Full path of sbt. Default: sbt
-  -ei <value> | --engine-id <value>
-      Specify an engine ID. Usually used by distributed deployment.
-  -ev <value> | --engine-version <value>
-      Specify an engine version. Usually used by distributed deployment.
-  -v <value> | --variant <value>
-      Path to an engine variant JSON file. Default: engine.json
-  -m <value> | --manifest <value>
-      Path to an engine manifest JSON file. Default: manifest.json
-  -sk | --spark-kryo
-      Shorthand for setting the spark.serializer property to
-      org.apache.spark.serializer.KryoSerializer.
-  --verbose
-      Enable third-party informational messages.
-
-Note that it is possible to supply pass-through arguments at the en
-of the command by using a '--' separator, e.g.
-
-  pio train -v my-variant -- --master spark://mycluster:7077
-
-In the example above, the '--master' argument will be passed to the underlying
-spark-submit command. Please refer to the usage section for each command for
-more information.
-
-The most commonly used pio commands are:
-    status        Displays status information about PredictionIO
-    version       Displays the version of this command line console
-    template      Creates a new engine based on an engine template
-    build         Build an engine at the current directory
-    train         Kick off a training using an engine
-    deploy        Deploy an engine as an engine server
-    eventserver   Launch an Event Server
-    app           Manage apps that are used by the Event Server
-    accesskey     Manage app access keys
-    export        Export events from the Event Server
-
-The following are experimental development commands:
-    run           Launch a driver program
-    eval          Kick off an evaluation using an engine
-    dashboard     Launch an evaluation dashboard
-    adminserver   Launch an Admin Server
-
-See 'pio help <command>' to read about a specific subcommand.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/run.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/run.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/run.scala.txt
deleted file mode 100644
index faef308..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/run.scala.txt
+++ /dev/null
@@ -1,17 +0,0 @@
-Usage: pio run [--sbt-extra <value>] [--clean] [--no-asm]
-               [common options...] <main class>
-
-Launch a driver program. This command will pass all pass-through arguments to
-its underlying spark-submit command. In addition, it also supports a second
-level of pass-through arguments to the driver program, e.g.
-
-  pio run -- --master spark://localhost:7077 -- --driver-arg foo
-
-  <main class>
-      Main class name of the driver program.
-  --sbt-extra <value>
-      Extra command to pass to SBT when it builds your engine.
-  --clean
-      Clean build.
-  --no-asm
-      Skip building external dependencies assembly.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/status.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/status.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/status.scala.txt
deleted file mode 100644
index 3ca4af5..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/status.scala.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Usage: pio status
-
-Displays status information about the PredictionIO system.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/template.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/template.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/template.scala.txt
deleted file mode 100644
index f97c8ce..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/template.scala.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-Usage: pio template list
-
-Retrieves a list of available template IDs.
-
-Usage: pio template get <template ID> <new engine directory>
-                        [--version <version>]
-                        [--name <value>] [--package <value>] [--email <value>]
-
-Seeds a directory with an engine template.
-
-  <template ID>
-      Engine template ID.
-  <new engine directory>
-      Location of the new engine.
-  --version <value>
-      The template version to get. By default, the most recently tagged version
-      will be downloaded.
-  --name <value>
-      Name of the author of the new engine.
-  --package <value>
-      Scala package name of the new engine.
-  --email <value>
-      E-mail address of the author of the new engine. Specify this if you want
-      to receive updates (critical bug fixes, etc) about the engine template
-      that you are going to use.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/train.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/train.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/train.scala.txt
deleted file mode 100644
index 582e54e..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/train.scala.txt
+++ /dev/null
@@ -1,28 +0,0 @@
-Usage: pio train [--batch <value>] [--skip-sanity-check]
-                 [--stop-after-read] [--stop-after-prepare]
-                 [--engine-factory <value>] [--engine-params-key <value>]
-                 [--scratch-uri <value>]
-                 [common options...]
-
-Kick off a training using an engine (variant) to produce an engine instance.
-This command will pass all pass-through arguments to its underlying spark-submit
-command.
-
-  --batch <value>
-      Batch label of the run.
-  --skip-sanity-check
-      Disable all data sanity check. Useful for speeding up training in
-      production.
-  --stop-after-read
-      Stop the training process after DataSource.read(). Useful for debugging.
-  --stop-after-prepare
-      Stop the training process after Preparator.prepare(). Useful for
-      debugging.
-  --engine-factory
-      Override engine factory class.
-  --engine-params-key
-      Retrieve engine parameters programmatically from the engine factory class.
-  --scratch-uri
-      URI of the working scratch space. Specify this when you want to have all
-      necessary files transferred to a remote location. You will usually want to
-      specify this when you use --deploy-mode cluster.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/upgrade.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/upgrade.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/upgrade.scala.txt
deleted file mode 100644
index 48ea23e..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/upgrade.scala.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-Usage: pio upgrade <from version> <to version> <old App ID> <new app ID>
-
-Migrate the event data from old App ID to new App ID which can be used with <to version> of PredictionIO.
-
-  <from version>
-    The version upgraded from.
-
-  <to version>
-    The version upgraded to.
-
-  <old App ID>
-    Old App ID.
-
-  <new app ID>
-    Mew App ID.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/version.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/version.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/version.scala.txt
deleted file mode 100644
index f9b2ab9..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/version.scala.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Usage: pio version
-
-Displays the version of this command line console.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/dashboard/index.scala.html
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/dashboard/index.scala.html b/tools/src/main/twirl/io/prediction/tools/dashboard/index.scala.html
deleted file mode 100644
index a9cadde..0000000
--- a/tools/src/main/twirl/io/prediction/tools/dashboard/index.scala.html
+++ /dev/null
@@ -1,99 +0,0 @@
-@import io.prediction.data.storage.EvaluationInstance
-@import io.prediction.tools.dashboard.DashboardConfig
-@import org.joda.time.DateTime
-@import org.joda.time.format.DateTimeFormat
-@(dc: DashboardConfig,
-  dashboardStartTime: DateTime,
-  env: Map[String, String],
-  completedInstances: Seq[EvaluationInstance])
-<!DOCTYPE html>
-<html lang="en">
-  <head>
-    <title>PredictionIO Dashboard at @{dc.ip}:@{dc.port}</title>
-    <link href="/assets/favicon.png" rel="shortcut icon" />
-    <link href="/assets/bootstrap-3.2.0-dist/css/bootstrap.min.css" rel="stylesheet">
-    <style type="text/css">
-    .string { color: green; }
-    .number { color: darkorange; }
-    .boolean { color: blue; }
-    .null { color: magenta; }
-    .key { color: red; }
-    </style>
-    <script type="text/javascript">
-      function syntaxHighlight(json) {
-        if (typeof json != 'string') {
-          json = JSON.stringify(json, undefined, 2);
-        }
-        json = json.replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;');
-        return json.replace(/("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?)/g, function (match) {
-          var cls = 'number';
-          if (/^"/.test(match)) {
-            if (/:$/.test(match)) {
-              cls = 'key';
-            } else {
-              cls = 'string';
-            }
-          } else if (/true|false/.test(match)) {
-            cls = 'boolean';
-          } else if (/null/.test(match)) {
-            cls = 'null';
-          }
-          return '<span class="' + cls + '">' + match + '</span>';
-        });
-      }
-      function shorternClassName(className) {
-        return className.replace(/(\w)\w*\./g, "$1."); 
-      }
-    </script>
-  </head>
-  <body>
-    <div class="container-fluid">
-      <div class="page-header">
-        <h1>PredictionIO Dashboard at @{dc.ip}:@{dc.port}</h1>
-        <p class="lead">Started on: @{DateTimeFormat.forStyle("FF").print(dashboardStartTime)}</p>
-      </div>
-      <h2>Completed Evaluations</h2>
-      <table class="table table-bordered table-striped">
-        <tr>
-          <th></th>
-          <th>Evaluator Result (One-liner)</th>
-          <th>Evaluation Class</th>
-          <th>Engine Parameters Generator Class</th>
-          <th>Start Time</th>
-          <th>End Time</th>
-          <th>Batch</th>
-        </tr>
-        @for(i <- completedInstances) {
-        <tr>
-          <td>
-            <div>
-              <a href="/engine_instances/@{i.id}/evaluator_results.html" class="btn btn-primary">HTML</a>
-            </div>
-          </td>
-          <td>
-            <div>@{i.evaluatorResults}</div>
-          </td>
-          <td>
-            <span title="@{i.evaluationClass}"><script type="text/javascript">document.write(shorternClassName("@{i.evaluationClass}"));</script></span>
-          </td>
-          <td>
-            <span title="@{i.engineParamsGeneratorClass}"><script type="text/javascript">document.write(shorternClassName("@{i.engineParamsGeneratorClass}"));</script></span>
-          </td>
-          <td>@{DateTimeFormat.forStyle("MM").print(i.startTime)}</td>
-          <td>@{DateTimeFormat.forStyle("MM").print(i.endTime)}</td>
-          <td>@{i.batch}</td>
-        </tr>
-        }
-      </table>
-      <h2>Environment</h2>
-      <p>The following values are accurate up to the time when the dashboard was launched.</p>
-      <table class="table table-bordered table-striped">
-        @for(k <- env.keys.toSeq.sorted) {
-        <tr><th>@{k}</th><td>@{env(k)}</td></tr>
-        }
-      </table>
-    </div>
-    <script src="/assets/jquery-1.11.1.min.js"></script>
-    <script src="/assets/bootstrap-3.2.0-dist/js/bootstrap.min.js"></script>
-  </body>
-</html>

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/algorithmsJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/algorithmsJson.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/algorithmsJson.scala.txt
deleted file mode 100644
index 02abbe5..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/algorithmsJson.scala.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-[
-  {
-    "name": "mahoutItemBased",
-    "params": {
-      "booleanData": true,
-      "itemSimilarity": "LogLikelihoodSimilarity",
-      "weighted": false,
-      "nearestN": 10,
-      "threshold": 4.9E-324,
-      "numSimilarItems": 50,
-      "numUserActions": 50,
-      "freshness" : 0,
-      "freshnessTimeUnit" : 86400
-    }
-  }
-]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/datasourceJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/datasourceJson.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/datasourceJson.scala.txt
deleted file mode 100644
index 7d219d3..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/datasourceJson.scala.txt
+++ /dev/null
@@ -1,26 +0,0 @@
-{
-  "appId": 1,
-  "actions": [
-    "view",
-    "like",
-    "dislike",
-    "conversion",
-    "rate"
-  ],
-  "attributeNames": {
-    "user" : "pio_user",
-    "item" : "pio_item",
-    "u2iActions": [
-      "view",
-      "like",
-      "dislike",
-      "conversion",
-      "rate"
-    ],
-    "itypes" : "pio_itypes",
-    "starttime" : "pio_starttime",
-    "endtime" : "pio_endtime",
-    "inactive" : "pio_inactive",
-    "rating" : "pio_rating"
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/preparatorJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/preparatorJson.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/preparatorJson.scala.txt
deleted file mode 100644
index 0f55d6e..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/preparatorJson.scala.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-  "actions": {
-    "view": 3,
-    "like": 5,
-    "dislike": 1,
-    "conversion": 4,
-    "rate": null
-  },
-  "conflict": "latest"
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/servingJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/servingJson.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/servingJson.scala.txt
deleted file mode 100644
index 0967ef4..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/itemrank/params/servingJson.scala.txt
+++ /dev/null
@@ -1 +0,0 @@
-{}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/algorithmsJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/algorithmsJson.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/algorithmsJson.scala.txt
deleted file mode 100644
index 2c58916..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/algorithmsJson.scala.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-[
-  {
-    "name": "ncMahoutItemBased",
-    "params": {
-      "booleanData": true,
-      "itemSimilarity": "LogLikelihoodSimilarity",
-      "weighted": false,
-      "threshold": 4.9E-324,
-      "nearestN": 10,
-      "unseenOnly": false,
-      "freshness" : 0,
-      "freshnessTimeUnit" : 86400
-    }
-  }
-]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/datasourceJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/datasourceJson.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/datasourceJson.scala.txt
deleted file mode 100644
index 8d4d010..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/datasourceJson.scala.txt
+++ /dev/null
@@ -1,26 +0,0 @@
-{
-  "appId": 2,
-  "actions": [
-    "view",
-    "like",
-    "dislike",
-    "conversion",
-    "rate"
-  ],
-  "attributeNames": {
-    "user" : "pio_user",
-    "item" : "pio_item",
-    "u2iActions": [
-      "view",
-      "like",
-      "dislike",
-      "conversion",
-      "rate"
-    ],
-    "itypes" : "pio_itypes",
-    "starttime" : "pio_starttime",
-    "endtime" : "pio_endtime",
-    "inactive" : "pio_inactive",
-    "rating" : "pio_rating"
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/preparatorJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/preparatorJson.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/preparatorJson.scala.txt
deleted file mode 100644
index 0f55d6e..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/preparatorJson.scala.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-  "actions": {
-    "view": 3,
-    "like": 5,
-    "dislike": 1,
-    "conversion": 4,
-    "rate": null
-  },
-  "conflict": "latest"
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/servingJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/servingJson.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/servingJson.scala.txt
deleted file mode 100644
index 0967ef4..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/itemrec/params/servingJson.scala.txt
+++ /dev/null
@@ -1 +0,0 @@
-{}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/algorithmsJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/algorithmsJson.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/algorithmsJson.scala.txt
deleted file mode 100644
index fb8e9ac..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/algorithmsJson.scala.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-[
-  {
-    "name": "ncMahoutItemBased",
-    "params": {
-      "booleanData": true,
-      "itemSimilarity": "LogLikelihoodSimilarity",
-      "weighted": false,
-      "threshold": 4.9E-324,
-      "freshness" : 0,
-      "freshnessTimeUnit" : 86400
-    }
-  }
-]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/datasourceJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/datasourceJson.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/datasourceJson.scala.txt
deleted file mode 100644
index 8d4d010..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/datasourceJson.scala.txt
+++ /dev/null
@@ -1,26 +0,0 @@
-{
-  "appId": 2,
-  "actions": [
-    "view",
-    "like",
-    "dislike",
-    "conversion",
-    "rate"
-  ],
-  "attributeNames": {
-    "user" : "pio_user",
-    "item" : "pio_item",
-    "u2iActions": [
-      "view",
-      "like",
-      "dislike",
-      "conversion",
-      "rate"
-    ],
-    "itypes" : "pio_itypes",
-    "starttime" : "pio_starttime",
-    "endtime" : "pio_endtime",
-    "inactive" : "pio_inactive",
-    "rating" : "pio_rating"
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/preparatorJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/preparatorJson.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/preparatorJson.scala.txt
deleted file mode 100644
index 0f55d6e..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/preparatorJson.scala.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-  "actions": {
-    "view": 3,
-    "like": 5,
-    "dislike": 1,
-    "conversion": 4,
-    "rate": null
-  },
-  "conflict": "latest"
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/servingJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/servingJson.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/servingJson.scala.txt
deleted file mode 100644
index 0967ef4..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/itemsim/params/servingJson.scala.txt
+++ /dev/null
@@ -1 +0,0 @@
-{}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/scala/buildSbt.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/scala/buildSbt.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/scala/buildSbt.scala.txt
deleted file mode 100644
index 9343010..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/scala/buildSbt.scala.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-@(name: String, pioVersion: String, sparkVersion: String)
-import AssemblyKeys._
-
-assemblySettings
-
-name := "@{name}"
-
-organization := "myorg"
-
-version := "0.0.1-SNAPSHOT"
-
-libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "@{pioVersion}" % "provided",
-  "org.apache.spark" %% "spark-core"    % "@{sparkVersion}" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/scala/engineJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/scala/engineJson.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/scala/engineJson.scala.txt
deleted file mode 100644
index c95e753..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/scala/engineJson.scala.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-@(name: String, engineFactory: String)
-{
-  "id": "default",
-  "description": "@{name}",
-  "engineFactory": "@{engineFactory}",
-  "datasource": {
-    "multiplier": 2
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/scala/manifestJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/scala/manifestJson.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/scala/manifestJson.scala.txt
deleted file mode 100644
index 015d62b..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/scala/manifestJson.scala.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-@(id: String, version: String, name: String)
-{
-  "id": "@{id}",
-  "version": "@{version}",
-  "name": "@{name}",
-  "description": "@{name}"
-  "engineFactory": "",
-  "files": []
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/scala/project/assemblySbt.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/scala/project/assemblySbt.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/scala/project/assemblySbt.scala.txt
deleted file mode 100644
index 54c3252..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/scala/project/assemblySbt.scala.txt
+++ /dev/null
@@ -1 +0,0 @@
-addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/templates/scala/src/main/scala/engine.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/templates/scala/src/main/scala/engine.scala.txt b/tools/src/main/twirl/io/prediction/tools/templates/scala/src/main/scala/engine.scala.txt
deleted file mode 100644
index 2ee0f63..0000000
--- a/tools/src/main/twirl/io/prediction/tools/templates/scala/src/main/scala/engine.scala.txt
+++ /dev/null
@@ -1,76 +0,0 @@
-package myorg
-
-// Pulls in necessary PredictionIO controller components
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.LAlgorithm
-import io.prediction.controller.LDataSource
-import io.prediction.controller.Params
-import io.prediction.controller.SimpleEngine
-
-// All data classes must be an instance of Serializable
-class MyTrainingData(
-  val multiplier: Int
-) extends Serializable
-
-class MyQuery(
-  val multiplicand: Int
-) extends Serializable
-
-class MyModel(
-  val multiplier: Int
-) extends Serializable {
-  override def toString = s"MyModel's multiplier: ${multiplier.toString}"
-}
-
-class MyPredictedResult(
-  val product: Int
-) extends Serializable
-
-case class MyDataSourceParams(
-  val multiplier: Int
-) extends Params
-
-// Your controller components
-class MyDataSource(val dsp: MyDataSourceParams) extends LDataSource[
-    MyTrainingData,
-    EmptyEvaluationInfo,
-    MyQuery,
-    EmptyActualResult] {
-
-  /** Implement readTraining() when you are not concerned about evaluation.
-    *
-    */
-  override def readTraining(): MyTrainingData = {
-    new MyTrainingData(dsp.multiplier)
-  }
-}
-
-class MyAlgorithm extends LAlgorithm[
-    MyTrainingData,
-    MyModel,
-    MyQuery,
-    MyPredictedResult] {
-
-  override def train(pd: MyTrainingData): MyModel = {
-    // Our model is simply one integer...
-    new MyModel(pd.multiplier)
-  }
-
-  override def predict(model: MyModel, query: MyQuery): MyPredictedResult = {
-    new MyPredictedResult(query.multiplicand * model.multiplier)
-  }
-}
-
-/** Engine factory that pieces everything together. SimpleEngine only requires
-  * one DataSource and one Algorithm. Preparator is an identity function, and
-  * Serving simply outputs Algorithm's prediction without further processing.
-  */
-object MyEngineFactory extends IEngineFactory {
-  override def apply() = {
-    new SimpleEngine(
-      classOf[MyDataSource],
-      classOf[MyAlgorithm])
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/accesskey.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/accesskey.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/accesskey.scala.txt
new file mode 100644
index 0000000..651dbaf
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/accesskey.scala.txt
@@ -0,0 +1,20 @@
+Usage: pio accesskey new [--key] <app name> [<event1> <event2>...]
+
+Add allowed event(s) to an access key.
+
+  --key <value>
+      Specify a custom key.
+  <app name>
+      App to be associated with the new access key.
+  <event1> <event2>...
+      Allowed event name(s) to be added to the access key.
+
+Usage: pio accesskey list [<app name>]
+
+  <app name>
+      App name.
+
+Usage: pio accesskey delete <access key>
+
+  <access key>
+      The access key to be deleted.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/adminserver.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/adminserver.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/adminserver.scala.txt
new file mode 100644
index 0000000..4ec0237
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/adminserver.scala.txt
@@ -0,0 +1,6 @@
+(Experimental Only!) Usage: pio adminserver [--ip <value>] [--port <value>]
+
+  --ip <value>
+      IP to bind to. Default: localhost
+  --port <value>
+      Port to bind to. Default: 7071

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/app.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/app.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/app.scala.txt
new file mode 100644
index 0000000..49f21b1
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/app.scala.txt
@@ -0,0 +1,74 @@
+Usage: pio app new [--id <value>] [--description <value>] [--access-key <value>]
+                   <name>
+
+Create a new app key to app ID mapping.
+
+  --id <value>
+      Specify this if you already have data under an app ID.
+  --description <value>
+      Description of the new app.
+  --access-key <value>
+      Specify a custom default access key.
+  <name>
+      App name.
+
+
+Usage: pio app list
+
+List all apps.
+
+
+Usage: pio app show <name>
+
+Show details of an app.
+
+  <name>
+      App name.
+
+
+Usage: pio app delete <name> [--force]
+
+Name of the app to be deleted.
+
+  <name>
+      App name.
+  --force, -f
+      Delete data without prompting for confirmation
+
+
+Usage: pio app data-delete <name> [--channel <name>] [--all] [--force]
+
+Delete data of an app.
+
+  <name>
+      App name.
+  --channel <name>
+      Delete data of the specified channel (default channel if not specified)
+  --all
+      Delete all data of this app (including both default and all channels)
+  --force, -f
+      Delete data without prompting for confirmation
+
+
+Usage: pio app channel-new <name> <channel>
+
+Create a new channel for the app.
+
+  <name>
+      App name.
+
+  <channel>
+      Channel name to be created.
+
+
+Usage: pio app channel-delete <name> <channel> [--force]
+
+Delete a channel for the app.
+
+  <name>
+      App name.
+
+  <channel>
+      Channel name to be deleted.
+  --force, -f
+      Delete data without prompting for confirmation

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/build.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/build.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/build.scala.txt
new file mode 100644
index 0000000..be80c50
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/build.scala.txt
@@ -0,0 +1,11 @@
+Usage: pio build [--sbt-extra <value>] [--clean] [--no-asm]
+                 [common options...]
+                 
+Build an engine at the current directory.
+
+  --sbt-extra <value>
+      Extra command to pass to SBT when it builds your engine.
+  --clean
+      Clean build.
+  --no-asm
+      Skip building external dependencies assembly.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/dashboard.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/dashboard.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/dashboard.scala.txt
new file mode 100644
index 0000000..85eacda
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/dashboard.scala.txt
@@ -0,0 +1,6 @@
+Usage: pio dashboard [--ip <value>] [--port <value>]
+
+  --ip <value>
+      IP to bind to. Default: localhost
+  --port <value>
+      Port to bind to. Default: 9000

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/deploy.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/deploy.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/deploy.scala.txt
new file mode 100644
index 0000000..4ce7985
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/deploy.scala.txt
@@ -0,0 +1,29 @@
+Usage: pio deploy [--ip <value>] [--port <value>]
+                  [--engine-instance-id <value>]
+                  [--feedback] [--accesskey <value>]
+                  [--event-server-ip <value>] [--event-server-port <value>]
+                  [--batch <value>] [--scratch-uri <value>]
+
+Deploy an engine instance as a prediction server. This command will pass all
+pass-through arguments to its underlying spark-submit command.
+
+  --ip <value>
+      IP to bind to. Default: 0.0.0.0
+  --port <value>
+      Port to bind to. Default: 8000
+  --engine-instance-id <value>
+      Engine instance ID.
+  --feedback
+      Enable feedback loop to event server.
+  --accesskey <value>
+      Access key of the App where feedback data will be stored.
+  --event-server-ip <value>
+      Event server IP. Default: 0.0.0.0
+  --event-server-port <value>
+      Event server port. Default: 7070
+  --batch <value>
+      Batch label of the deployment.
+  --scratch-uri
+      URI of the working scratch space. Specify this when you want to have all
+      necessary files transferred to a remote location. You will usually want to
+      specify this when you use --deploy-mode cluster.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/eval.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/eval.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/eval.scala.txt
new file mode 100644
index 0000000..499498c
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/eval.scala.txt
@@ -0,0 +1,10 @@
+Usage: pio eval <evaluation-class> <engine-parameters-generator-class>
+                [--batch <value>]
+                [common options...]
+
+Kick off an evaluation using specified evaluation and engine parameters
+generator class. This command will pass all pass-through arguments to its
+underlying spark-submit command.
+
+  --batch <value>
+      Batch label of the run.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/eventserver.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/eventserver.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/eventserver.scala.txt
new file mode 100644
index 0000000..beda337
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/eventserver.scala.txt
@@ -0,0 +1,8 @@
+Usage: pio eventserver [--ip <value>] [--port <value>] [--stats]
+
+  --ip <value>
+      IP to bind to. Default: 0.0.0.0
+  --port <value>
+      Port to bind to. Default: 7070
+  --stats
+      Enable Event Server internal statistics and its API endpoint.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/export.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/export.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/export.scala.txt
new file mode 100644
index 0000000..28eb665
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/export.scala.txt
@@ -0,0 +1,14 @@
+Usage: pio export --appid <value> --output <value> [--format <value>] [--channel <value>]
+
+Exports all events of an app to a file. If Hadoop configuration is present, the
+file will be exported to HDFS instead of local filesystem.
+
+  --appid <value>
+      App ID of events to be exported.
+  --channel <value>
+      Channel Name (default if this is not specified)
+  --output <value>
+      Output path of the exported file.
+  --format <value>
+      The format of the exported file. Valid values are "json" and "parquet".
+      The default format is "json".

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/imprt.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/imprt.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/imprt.scala.txt
new file mode 100644
index 0000000..245d749
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/imprt.scala.txt
@@ -0,0 +1,12 @@
+Usage: pio import --appid <value> --input <value> [--channel <value>]
+
+Imports all events from a file to an app. Each line of the file should be a JSON
+object that represent a single event. If Hadoop configuration is present, the
+file will be imported from HDFS instead of local filesystem.
+
+  --appid <value>
+      App ID of events to be imported.
+  --channel <value>
+      Channel Name (default if this is not specified)
+  --input <value>
+      Input path of the import file.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/main.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/main.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/main.scala.txt
new file mode 100644
index 0000000..a97ecb3
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/main.scala.txt
@@ -0,0 +1,52 @@
+Usage: pio <command> [options] <args>...
+
+Options common to all commands:
+  [--pio-home <value>] [--spark-home <value>] [--sbt <value>]
+  [-ei <value>] [-ev <value>] [-v <value>] [-m <value>]
+  [-sk | --spark-kryo] [--verbose]
+  [<args>] [-- [<args passed to Spark>] [-- [<args passed to runner]]]
+
+  --sbt <value>
+      Full path of sbt. Default: sbt
+  -ei <value> | --engine-id <value>
+      Specify an engine ID. Usually used by distributed deployment.
+  -ev <value> | --engine-version <value>
+      Specify an engine version. Usually used by distributed deployment.
+  -v <value> | --variant <value>
+      Path to an engine variant JSON file. Default: engine.json
+  -m <value> | --manifest <value>
+      Path to an engine manifest JSON file. Default: manifest.json
+  -sk | --spark-kryo
+      Shorthand for setting the spark.serializer property to
+      org.apache.spark.serializer.KryoSerializer.
+  --verbose
+      Enable third-party informational messages.
+
+Note that it is possible to supply pass-through arguments at the en
+of the command by using a '--' separator, e.g.
+
+  pio train -v my-variant -- --master spark://mycluster:7077
+
+In the example above, the '--master' argument will be passed to the underlying
+spark-submit command. Please refer to the usage section for each command for
+more information.
+
+The most commonly used pio commands are:
+    status        Displays status information about PredictionIO
+    version       Displays the version of this command line console
+    template      Creates a new engine based on an engine template
+    build         Build an engine at the current directory
+    train         Kick off a training using an engine
+    deploy        Deploy an engine as an engine server
+    eventserver   Launch an Event Server
+    app           Manage apps that are used by the Event Server
+    accesskey     Manage app access keys
+    export        Export events from the Event Server
+
+The following are experimental development commands:
+    run           Launch a driver program
+    eval          Kick off an evaluation using an engine
+    dashboard     Launch an evaluation dashboard
+    adminserver   Launch an Admin Server
+
+See 'pio help <command>' to read about a specific subcommand.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/run.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/run.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/run.scala.txt
new file mode 100644
index 0000000..faef308
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/run.scala.txt
@@ -0,0 +1,17 @@
+Usage: pio run [--sbt-extra <value>] [--clean] [--no-asm]
+               [common options...] <main class>
+
+Launch a driver program. This command will pass all pass-through arguments to
+its underlying spark-submit command. In addition, it also supports a second
+level of pass-through arguments to the driver program, e.g.
+
+  pio run -- --master spark://localhost:7077 -- --driver-arg foo
+
+  <main class>
+      Main class name of the driver program.
+  --sbt-extra <value>
+      Extra command to pass to SBT when it builds your engine.
+  --clean
+      Clean build.
+  --no-asm
+      Skip building external dependencies assembly.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/status.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/status.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/status.scala.txt
new file mode 100644
index 0000000..3ca4af5
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/status.scala.txt
@@ -0,0 +1,3 @@
+Usage: pio status
+
+Displays status information about the PredictionIO system.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/template.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/template.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/template.scala.txt
new file mode 100644
index 0000000..f97c8ce
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/template.scala.txt
@@ -0,0 +1,25 @@
+Usage: pio template list
+
+Retrieves a list of available template IDs.
+
+Usage: pio template get <template ID> <new engine directory>
+                        [--version <version>]
+                        [--name <value>] [--package <value>] [--email <value>]
+
+Seeds a directory with an engine template.
+
+  <template ID>
+      Engine template ID.
+  <new engine directory>
+      Location of the new engine.
+  --version <value>
+      The template version to get. By default, the most recently tagged version
+      will be downloaded.
+  --name <value>
+      Name of the author of the new engine.
+  --package <value>
+      Scala package name of the new engine.
+  --email <value>
+      E-mail address of the author of the new engine. Specify this if you want
+      to receive updates (critical bug fixes, etc) about the engine template
+      that you are going to use.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/train.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/train.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/train.scala.txt
new file mode 100644
index 0000000..582e54e
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/train.scala.txt
@@ -0,0 +1,28 @@
+Usage: pio train [--batch <value>] [--skip-sanity-check]
+                 [--stop-after-read] [--stop-after-prepare]
+                 [--engine-factory <value>] [--engine-params-key <value>]
+                 [--scratch-uri <value>]
+                 [common options...]
+
+Kick off a training using an engine (variant) to produce an engine instance.
+This command will pass all pass-through arguments to its underlying spark-submit
+command.
+
+  --batch <value>
+      Batch label of the run.
+  --skip-sanity-check
+      Disable all data sanity check. Useful for speeding up training in
+      production.
+  --stop-after-read
+      Stop the training process after DataSource.read(). Useful for debugging.
+  --stop-after-prepare
+      Stop the training process after Preparator.prepare(). Useful for
+      debugging.
+  --engine-factory
+      Override engine factory class.
+  --engine-params-key
+      Retrieve engine parameters programmatically from the engine factory class.
+  --scratch-uri
+      URI of the working scratch space. Specify this when you want to have all
+      necessary files transferred to a remote location. You will usually want to
+      specify this when you use --deploy-mode cluster.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/upgrade.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/upgrade.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/upgrade.scala.txt
new file mode 100644
index 0000000..48ea23e
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/upgrade.scala.txt
@@ -0,0 +1,15 @@
+Usage: pio upgrade <from version> <to version> <old App ID> <new app ID>
+
+Migrate the event data from old App ID to new App ID which can be used with <to version> of PredictionIO.
+
+  <from version>
+    The version upgraded from.
+
+  <to version>
+    The version upgraded to.
+
+  <old App ID>
+    Old App ID.
+
+  <new app ID>
+    Mew App ID.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/console/version.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/console/version.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/console/version.scala.txt
new file mode 100644
index 0000000..f9b2ab9
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/console/version.scala.txt
@@ -0,0 +1,3 @@
+Usage: pio version
+
+Displays the version of this command line console.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/dashboard/index.scala.html
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/dashboard/index.scala.html b/tools/src/main/twirl/org/apache/predictionio/tools/dashboard/index.scala.html
new file mode 100644
index 0000000..2e1719c
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/dashboard/index.scala.html
@@ -0,0 +1,99 @@
+@import org.apache.predictionio.data.storage.EvaluationInstance
+@import org.apache.predictionio.tools.dashboard.DashboardConfig
+@import org.joda.time.DateTime
+@import org.joda.time.format.DateTimeFormat
+@(dc: DashboardConfig,
+  dashboardStartTime: DateTime,
+  env: Map[String, String],
+  completedInstances: Seq[EvaluationInstance])
+<!DOCTYPE html>
+<html lang="en">
+  <head>
+    <title>PredictionIO Dashboard at @{dc.ip}:@{dc.port}</title>
+    <link href="/assets/favicon.png" rel="shortcut icon" />
+    <link href="/assets/bootstrap-3.2.0-dist/css/bootstrap.min.css" rel="stylesheet">
+    <style type="text/css">
+    .string { color: green; }
+    .number { color: darkorange; }
+    .boolean { color: blue; }
+    .null { color: magenta; }
+    .key { color: red; }
+    </style>
+    <script type="text/javascript">
+      function syntaxHighlight(json) {
+        if (typeof json != 'string') {
+          json = JSON.stringify(json, undefined, 2);
+        }
+        json = json.replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;');
+        return json.replace(/("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?)/g, function (match) {
+          var cls = 'number';
+          if (/^"/.test(match)) {
+            if (/:$/.test(match)) {
+              cls = 'key';
+            } else {
+              cls = 'string';
+            }
+          } else if (/true|false/.test(match)) {
+            cls = 'boolean';
+          } else if (/null/.test(match)) {
+            cls = 'null';
+          }
+          return '<span class="' + cls + '">' + match + '</span>';
+        });
+      }
+      function shorternClassName(className) {
+        return className.replace(/(\w)\w*\./g, "$1."); 
+      }
+    </script>
+  </head>
+  <body>
+    <div class="container-fluid">
+      <div class="page-header">
+        <h1>PredictionIO Dashboard at @{dc.ip}:@{dc.port}</h1>
+        <p class="lead">Started on: @{DateTimeFormat.forStyle("FF").print(dashboardStartTime)}</p>
+      </div>
+      <h2>Completed Evaluations</h2>
+      <table class="table table-bordered table-striped">
+        <tr>
+          <th></th>
+          <th>Evaluator Result (One-liner)</th>
+          <th>Evaluation Class</th>
+          <th>Engine Parameters Generator Class</th>
+          <th>Start Time</th>
+          <th>End Time</th>
+          <th>Batch</th>
+        </tr>
+        @for(i <- completedInstances) {
+        <tr>
+          <td>
+            <div>
+              <a href="/engine_instances/@{i.id}/evaluator_results.html" class="btn btn-primary">HTML</a>
+            </div>
+          </td>
+          <td>
+            <div>@{i.evaluatorResults}</div>
+          </td>
+          <td>
+            <span title="@{i.evaluationClass}"><script type="text/javascript">document.write(shorternClassName("@{i.evaluationClass}"));</script></span>
+          </td>
+          <td>
+            <span title="@{i.engineParamsGeneratorClass}"><script type="text/javascript">document.write(shorternClassName("@{i.engineParamsGeneratorClass}"));</script></span>
+          </td>
+          <td>@{DateTimeFormat.forStyle("MM").print(i.startTime)}</td>
+          <td>@{DateTimeFormat.forStyle("MM").print(i.endTime)}</td>
+          <td>@{i.batch}</td>
+        </tr>
+        }
+      </table>
+      <h2>Environment</h2>
+      <p>The following values are accurate up to the time when the dashboard was launched.</p>
+      <table class="table table-bordered table-striped">
+        @for(k <- env.keys.toSeq.sorted) {
+        <tr><th>@{k}</th><td>@{env(k)}</td></tr>
+        }
+      </table>
+    </div>
+    <script src="/assets/jquery-1.11.1.min.js"></script>
+    <script src="/assets/bootstrap-3.2.0-dist/js/bootstrap.min.js"></script>
+  </body>
+</html>

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/algorithmsJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/algorithmsJson.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/algorithmsJson.scala.txt
new file mode 100644
index 0000000..02abbe5
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/algorithmsJson.scala.txt
@@ -0,0 +1,16 @@
+[
+  {
+    "name": "mahoutItemBased",
+    "params": {
+      "booleanData": true,
+      "itemSimilarity": "LogLikelihoodSimilarity",
+      "weighted": false,
+      "nearestN": 10,
+      "threshold": 4.9E-324,
+      "numSimilarItems": 50,
+      "numUserActions": 50,
+      "freshness" : 0,
+      "freshnessTimeUnit" : 86400
+    }
+  }
+]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/datasourceJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/datasourceJson.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/datasourceJson.scala.txt
new file mode 100644
index 0000000..7d219d3
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/datasourceJson.scala.txt
@@ -0,0 +1,26 @@
+{
+  "appId": 1,
+  "actions": [
+    "view",
+    "like",
+    "dislike",
+    "conversion",
+    "rate"
+  ],
+  "attributeNames": {
+    "user" : "pio_user",
+    "item" : "pio_item",
+    "u2iActions": [
+      "view",
+      "like",
+      "dislike",
+      "conversion",
+      "rate"
+    ],
+    "itypes" : "pio_itypes",
+    "starttime" : "pio_starttime",
+    "endtime" : "pio_endtime",
+    "inactive" : "pio_inactive",
+    "rating" : "pio_rating"
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/preparatorJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/preparatorJson.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/preparatorJson.scala.txt
new file mode 100644
index 0000000..0f55d6e
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/preparatorJson.scala.txt
@@ -0,0 +1,10 @@
+{
+  "actions": {
+    "view": 3,
+    "like": 5,
+    "dislike": 1,
+    "conversion": 4,
+    "rate": null
+  },
+  "conflict": "latest"
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/servingJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/servingJson.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/servingJson.scala.txt
new file mode 100644
index 0000000..0967ef4
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrank/params/servingJson.scala.txt
@@ -0,0 +1 @@
+{}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/algorithmsJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/algorithmsJson.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/algorithmsJson.scala.txt
new file mode 100644
index 0000000..2c58916
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/algorithmsJson.scala.txt
@@ -0,0 +1,15 @@
+[
+  {
+    "name": "ncMahoutItemBased",
+    "params": {
+      "booleanData": true,
+      "itemSimilarity": "LogLikelihoodSimilarity",
+      "weighted": false,
+      "threshold": 4.9E-324,
+      "nearestN": 10,
+      "unseenOnly": false,
+      "freshness" : 0,
+      "freshnessTimeUnit" : 86400
+    }
+  }
+]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/datasourceJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/datasourceJson.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/datasourceJson.scala.txt
new file mode 100644
index 0000000..8d4d010
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/datasourceJson.scala.txt
@@ -0,0 +1,26 @@
+{
+  "appId": 2,
+  "actions": [
+    "view",
+    "like",
+    "dislike",
+    "conversion",
+    "rate"
+  ],
+  "attributeNames": {
+    "user" : "pio_user",
+    "item" : "pio_item",
+    "u2iActions": [
+      "view",
+      "like",
+      "dislike",
+      "conversion",
+      "rate"
+    ],
+    "itypes" : "pio_itypes",
+    "starttime" : "pio_starttime",
+    "endtime" : "pio_endtime",
+    "inactive" : "pio_inactive",
+    "rating" : "pio_rating"
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/preparatorJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/preparatorJson.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/preparatorJson.scala.txt
new file mode 100644
index 0000000..0f55d6e
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/preparatorJson.scala.txt
@@ -0,0 +1,10 @@
+{
+  "actions": {
+    "view": 3,
+    "like": 5,
+    "dislike": 1,
+    "conversion": 4,
+    "rate": null
+  },
+  "conflict": "latest"
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/servingJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/servingJson.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/servingJson.scala.txt
new file mode 100644
index 0000000..0967ef4
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemrec/params/servingJson.scala.txt
@@ -0,0 +1 @@
+{}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/algorithmsJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/algorithmsJson.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/algorithmsJson.scala.txt
new file mode 100644
index 0000000..fb8e9ac
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/algorithmsJson.scala.txt
@@ -0,0 +1,13 @@
+[
+  {
+    "name": "ncMahoutItemBased",
+    "params": {
+      "booleanData": true,
+      "itemSimilarity": "LogLikelihoodSimilarity",
+      "weighted": false,
+      "threshold": 4.9E-324,
+      "freshness" : 0,
+      "freshnessTimeUnit" : 86400
+    }
+  }
+]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/datasourceJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/datasourceJson.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/datasourceJson.scala.txt
new file mode 100644
index 0000000..8d4d010
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/datasourceJson.scala.txt
@@ -0,0 +1,26 @@
+{
+  "appId": 2,
+  "actions": [
+    "view",
+    "like",
+    "dislike",
+    "conversion",
+    "rate"
+  ],
+  "attributeNames": {
+    "user" : "pio_user",
+    "item" : "pio_item",
+    "u2iActions": [
+      "view",
+      "like",
+      "dislike",
+      "conversion",
+      "rate"
+    ],
+    "itypes" : "pio_itypes",
+    "starttime" : "pio_starttime",
+    "endtime" : "pio_endtime",
+    "inactive" : "pio_inactive",
+    "rating" : "pio_rating"
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/preparatorJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/preparatorJson.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/preparatorJson.scala.txt
new file mode 100644
index 0000000..0f55d6e
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/preparatorJson.scala.txt
@@ -0,0 +1,10 @@
+{
+  "actions": {
+    "view": 3,
+    "like": 5,
+    "dislike": 1,
+    "conversion": 4,
+    "rate": null
+  },
+  "conflict": "latest"
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/servingJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/servingJson.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/servingJson.scala.txt
new file mode 100644
index 0000000..0967ef4
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/itemsim/params/servingJson.scala.txt
@@ -0,0 +1 @@
+{}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/buildSbt.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/buildSbt.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/buildSbt.scala.txt
new file mode 100644
index 0000000..9343010
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/buildSbt.scala.txt
@@ -0,0 +1,14 @@
+@(name: String, pioVersion: String, sparkVersion: String)
+import AssemblyKeys._
+
+assemblySettings
+
+name := "@{name}"
+
+organization := "myorg"
+
+version := "0.0.1-SNAPSHOT"
+
+libraryDependencies ++= Seq(
+  "io.prediction"    %% "core"          % "@{pioVersion}" % "provided",
+  "org.apache.spark" %% "spark-core"    % "@{sparkVersion}" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/engineJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/engineJson.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/engineJson.scala.txt
new file mode 100644
index 0000000..c95e753
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/engineJson.scala.txt
@@ -0,0 +1,9 @@
+@(name: String, engineFactory: String)
+{
+  "id": "default",
+  "description": "@{name}",
+  "engineFactory": "@{engineFactory}",
+  "datasource": {
+    "multiplier": 2
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/manifestJson.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/manifestJson.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/manifestJson.scala.txt
new file mode 100644
index 0000000..015d62b
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/manifestJson.scala.txt
@@ -0,0 +1,9 @@
+@(id: String, version: String, name: String)
+{
+  "id": "@{id}",
+  "version": "@{version}",
+  "name": "@{name}",
+  "description": "@{name}"
+  "engineFactory": "",
+  "files": []
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/project/assemblySbt.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/project/assemblySbt.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/project/assemblySbt.scala.txt
new file mode 100644
index 0000000..54c3252
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/project/assemblySbt.scala.txt
@@ -0,0 +1 @@
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/src/main/scala/engine.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/src/main/scala/engine.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/src/main/scala/engine.scala.txt
new file mode 100644
index 0000000..37dcf3e
--- /dev/null
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/src/main/scala/engine.scala.txt
@@ -0,0 +1,76 @@
+package myorg
+
+// Pulls in necessary PredictionIO controller components
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.LAlgorithm
+import org.apache.predictionio.controller.LDataSource
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.SimpleEngine
+
+// All data classes must be an instance of Serializable
+class MyTrainingData(
+  val multiplier: Int
+) extends Serializable
+
+class MyQuery(
+  val multiplicand: Int
+) extends Serializable
+
+class MyModel(
+  val multiplier: Int
+) extends Serializable {
+  override def toString = s"MyModel's multiplier: ${multiplier.toString}"
+}
+
+class MyPredictedResult(
+  val product: Int
+) extends Serializable
+
+case class MyDataSourceParams(
+  val multiplier: Int
+) extends Params
+
+// Your controller components
+class MyDataSource(val dsp: MyDataSourceParams) extends LDataSource[
+    MyTrainingData,
+    EmptyEvaluationInfo,
+    MyQuery,
+    EmptyActualResult] {
+
+  /** Implement readTraining() when you are not concerned about evaluation.
+    *
+    */
+  override def readTraining(): MyTrainingData = {
+    new MyTrainingData(dsp.multiplier)
+  }
+}
+
+class MyAlgorithm extends LAlgorithm[
+    MyTrainingData,
+    MyModel,
+    MyQuery,
+    MyPredictedResult] {
+
+  override def train(pd: MyTrainingData): MyModel = {
+    // Our model is simply one integer...
+    new MyModel(pd.multiplier)
+  }
+
+  override def predict(model: MyModel, query: MyQuery): MyPredictedResult = {
+    new MyPredictedResult(query.multiplicand * model.multiplier)
+  }
+}
+
+/** Engine factory that pieces everything together. SimpleEngine only requires
+  * one DataSource and one Algorithm. Preparator is an identity function, and
+  * Serving simply outputs Algorithm's prediction without further processing.
+  */
+object MyEngineFactory extends IEngineFactory {
+  override def apply() = {
+    new SimpleEngine(
+      classOf[MyDataSource],
+      classOf[MyAlgorithm])
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/test/scala/io/prediction/tools/admin/AdminAPISpec.scala
----------------------------------------------------------------------
diff --git a/tools/src/test/scala/io/prediction/tools/admin/AdminAPISpec.scala b/tools/src/test/scala/io/prediction/tools/admin/AdminAPISpec.scala
deleted file mode 100644
index 93af23f..0000000
--- a/tools/src/test/scala/io/prediction/tools/admin/AdminAPISpec.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-package io.prediction.tools.admin
-
-import akka.actor.{ActorSystem, Props}
-import akka.testkit.TestProbe
-import io.prediction.data.storage.Storage
-import org.specs2.mutable.Specification
-import spray.http._
-import spray.httpx.RequestBuilding._
-import spray.util._
-
-
-class AdminAPISpec extends Specification{
-
-  val system = ActorSystem(Utils.actorSystemNameFrom(getClass))
-  val config = AdminServerConfig(
-    ip = "localhost",
-    port = 7071)
-
-  val commandClient = new CommandClient(
-    appClient = Storage.getMetaDataApps,
-    accessKeyClient = Storage.getMetaDataAccessKeys,
-    eventClient = Storage.getLEvents()
-  )
-
-  val adminActor= system.actorOf(Props(classOf[AdminServiceActor], commandClient))
-
-  "GET / request" should {
-    "properly produce OK HttpResponses" in {
-      val probe = TestProbe()(system)
-      probe.send(adminActor, Get("/"))
-
-      probe.expectMsg(
-        HttpResponse(
-          200,
-          HttpEntity(
-            contentType = ContentTypes.`application/json`,
-            string = """{"status":"alive"}"""
-          )
-        )
-      )
-      success
-    }
-  }
-
-  "GET /cmd/app request" should {
-    "properly produce OK HttpResponses" in {
-      /*
-      val probe = TestProbe()(system)
-      probe.send(adminActor,Get("/cmd/app"))
-
-      //TODO: Need to convert the response string to the corresponding case object to assert some properties on the object
-      probe.expectMsg(
-        HttpResponse(
-          200,
-          HttpEntity(
-            contentType = ContentTypes.`application/json`,
-            string = """{"status":1}"""
-          )
-        )
-      )*/
-      pending
-    }
-  }
-
-  step(system.shutdown())
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/test/scala/org/apache/predictionio/tools/admin/AdminAPISpec.scala
----------------------------------------------------------------------
diff --git a/tools/src/test/scala/org/apache/predictionio/tools/admin/AdminAPISpec.scala b/tools/src/test/scala/org/apache/predictionio/tools/admin/AdminAPISpec.scala
new file mode 100644
index 0000000..604e593
--- /dev/null
+++ b/tools/src/test/scala/org/apache/predictionio/tools/admin/AdminAPISpec.scala
@@ -0,0 +1,66 @@
+package org.apache.predictionio.tools.admin
+
+import akka.actor.{ActorSystem, Props}
+import akka.testkit.TestProbe
+import org.apache.predictionio.data.storage.Storage
+import org.specs2.mutable.Specification
+import spray.http._
+import spray.httpx.RequestBuilding._
+import spray.util._
+
+
+class AdminAPISpec extends Specification{
+
+  val system = ActorSystem(Utils.actorSystemNameFrom(getClass))
+  val config = AdminServerConfig(
+    ip = "localhost",
+    port = 7071)
+
+  val commandClient = new CommandClient(
+    appClient = Storage.getMetaDataApps,
+    accessKeyClient = Storage.getMetaDataAccessKeys,
+    eventClient = Storage.getLEvents()
+  )
+
+  val adminActor= system.actorOf(Props(classOf[AdminServiceActor], commandClient))
+
+  "GET / request" should {
+    "properly produce OK HttpResponses" in {
+      val probe = TestProbe()(system)
+      probe.send(adminActor, Get("/"))
+
+      probe.expectMsg(
+        HttpResponse(
+          200,
+          HttpEntity(
+            contentType = ContentTypes.`application/json`,
+            string = """{"status":"alive"}"""
+          )
+        )
+      )
+      success
+    }
+  }
+
+  "GET /cmd/app request" should {
+    "properly produce OK HttpResponses" in {
+      /*
+      val probe = TestProbe()(system)
+      probe.send(adminActor,Get("/cmd/app"))
+
+      //TODO: Need to convert the response string to the corresponding case object to assert some properties on the object
+      probe.expectMsg(
+        HttpResponse(
+          200,
+          HttpEntity(
+            contentType = ContentTypes.`application/json`,
+            string = """{"status":1}"""
+          )
+        )
+      )*/
+      pending
+    }
+  }
+
+  step(system.shutdown())
+}


[02/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/console/Console.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/console/Console.scala b/tools/src/main/scala/org/apache/predictionio/tools/console/Console.scala
new file mode 100644
index 0000000..87aac07
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/console/Console.scala
@@ -0,0 +1,1277 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools.console
+
+import java.io.File
+import java.net.URI
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.controller.Utils
+import org.apache.predictionio.core.BuildInfo
+import org.apache.predictionio.data.api.EventServer
+import org.apache.predictionio.data.api.EventServerConfig
+import org.apache.predictionio.data.storage
+import org.apache.predictionio.data.storage.EngineManifest
+import org.apache.predictionio.data.storage.EngineManifestSerializer
+import org.apache.predictionio.data.storage.hbase.upgrade.Upgrade_0_8_3
+import org.apache.predictionio.tools.RegisterEngine
+import org.apache.predictionio.tools.RunServer
+import org.apache.predictionio.tools.RunWorkflow
+import org.apache.predictionio.tools.admin.AdminServer
+import org.apache.predictionio.tools.admin.AdminServerConfig
+import org.apache.predictionio.tools.dashboard.Dashboard
+import org.apache.predictionio.tools.dashboard.DashboardConfig
+import org.apache.predictionio.workflow.JsonExtractorOption
+import org.apache.predictionio.workflow.JsonExtractorOption.JsonExtractorOption
+import org.apache.predictionio.workflow.WorkflowUtils
+import org.apache.commons.io.FileUtils
+import org.json4s._
+import org.json4s.native.JsonMethods._
+import org.json4s.native.Serialization.read
+import org.json4s.native.Serialization.write
+import semverfi._
+
+import scala.collection.JavaConversions._
+import scala.io.Source
+import scala.sys.process._
+import scala.util.Random
+import scalaj.http.Http
+
+case class ConsoleArgs(
+  common: CommonArgs = CommonArgs(),
+  build: BuildArgs = BuildArgs(),
+  app: AppArgs = AppArgs(),
+  accessKey: AccessKeyArgs = AccessKeyArgs(),
+  deploy: DeployArgs = DeployArgs(),
+  eventServer: EventServerArgs = EventServerArgs(),
+  adminServer: AdminServerArgs = AdminServerArgs(),
+  dashboard: DashboardArgs = DashboardArgs(),
+  upgrade: UpgradeArgs = UpgradeArgs(),
+  template: TemplateArgs = TemplateArgs(),
+  export: ExportArgs = ExportArgs(),
+  imprt: ImportArgs = ImportArgs(),
+  commands: Seq[String] = Seq(),
+  metricsClass: Option[String] = None,
+  metricsParamsJsonPath: Option[String] = None,
+  paramsPath: String = "params",
+  engineInstanceId: Option[String] = None,
+  mainClass: Option[String] = None)
+
+case class CommonArgs(
+  batch: String = "",
+  sparkPassThrough: Seq[String] = Seq(),
+  driverPassThrough: Seq[String] = Seq(),
+  pioHome: Option[String] = None,
+  sparkHome: Option[String] = None,
+  engineId: Option[String] = None,
+  engineVersion: Option[String] = None,
+  engineFactory: Option[String] = None,
+  engineParamsKey: Option[String] = None,
+  evaluation: Option[String] = None,
+  engineParamsGenerator: Option[String] = None,
+  variantJson: File = new File("engine.json"),
+  manifestJson: File = new File("manifest.json"),
+  stopAfterRead: Boolean = false,
+  stopAfterPrepare: Boolean = false,
+  skipSanityCheck: Boolean = false,
+  verbose: Boolean = false,
+  verbosity: Int = 0,
+  sparkKryo: Boolean = false,
+  scratchUri: Option[URI] = None,
+  jsonExtractor: JsonExtractorOption = JsonExtractorOption.Both)
+
+case class BuildArgs(
+  sbt: Option[File] = None,
+  sbtExtra: Option[String] = None,
+  sbtAssemblyPackageDependency: Boolean = true,
+  sbtClean: Boolean = false,
+  uberJar: Boolean = false,
+  forceGeneratePIOSbt: Boolean = false)
+
+case class DeployArgs(
+  ip: String = "0.0.0.0",
+  port: Int = 8000,
+  logUrl: Option[String] = None,
+  logPrefix: Option[String] = None)
+
+case class EventServerArgs(
+  enabled: Boolean = false,
+  ip: String = "0.0.0.0",
+  port: Int = 7070,
+  stats: Boolean = false)
+
+case class AdminServerArgs(
+ip: String = "127.0.0.1",
+port: Int = 7071)
+
+case class DashboardArgs(
+  ip: String = "127.0.0.1",
+  port: Int = 9000)
+
+case class UpgradeArgs(
+  from: String = "0.0.0",
+  to: String = "0.0.0",
+  oldAppId: Int = 0,
+  newAppId: Int = 0
+)
+
+object Console extends Logging {
+  def main(args: Array[String]): Unit = {
+    val parser = new scopt.OptionParser[ConsoleArgs]("pio") {
+      override def showUsageOnError: Boolean = false
+      head("PredictionIO Command Line Interface Console", BuildInfo.version)
+      help("")
+      note("Note that it is possible to supply pass-through arguments at\n" +
+        "the end of the command by using a '--' separator, e.g.\n\n" +
+        "pio train --params-path params -- --master spark://mycluster:7077\n" +
+        "\nIn the example above, the '--master' argument will be passed to\n" +
+        "underlying spark-submit command. Please refer to the usage section\n" +
+        "for each command for more information.\n\n" +
+        "The following options are common to all commands:\n")
+      opt[String]("pio-home") action { (x, c) =>
+        c.copy(common = c.common.copy(pioHome = Some(x)))
+      } text("Root directory of a PredictionIO installation.\n" +
+        "        Specify this if automatic discovery fail.")
+      opt[String]("spark-home") action { (x, c) =>
+        c.copy(common = c.common.copy(sparkHome = Some(x)))
+      } text("Root directory of an Apache Spark installation.\n" +
+        "        If not specified, will try to use the SPARK_HOME\n" +
+        "        environmental variable. If this fails as well, default to\n" +
+        "        current directory.")
+      opt[String]("engine-id") abbr("ei") action { (x, c) =>
+        c.copy(common = c.common.copy(engineId = Some(x)))
+      } text("Specify an engine ID. Usually used by distributed deployment.")
+      opt[String]("engine-version") abbr("ev") action { (x, c) =>
+        c.copy(common = c.common.copy(engineVersion = Some(x)))
+      } text("Specify an engine version. Usually used by distributed " +
+        "deployment.")
+      opt[File]("variant") abbr("v") action { (x, c) =>
+        c.copy(common = c.common.copy(variantJson = x))
+      }
+      opt[File]("manifest") abbr("m") action { (x, c) =>
+        c.copy(common = c.common.copy(manifestJson = x))
+      }
+      opt[File]("sbt") action { (x, c) =>
+        c.copy(build = c.build.copy(sbt = Some(x)))
+      } validate { x =>
+        if (x.exists) {
+          success
+        } else {
+          failure(s"${x.getCanonicalPath} does not exist.")
+        }
+      } text("Path to sbt. Default: sbt")
+      opt[Unit]("verbose") action { (x, c) =>
+        c.copy(common = c.common.copy(verbose = true))
+      }
+      opt[Unit]("spark-kryo") abbr("sk") action { (x, c) =>
+        c.copy(common = c.common.copy(sparkKryo = true))
+      }
+      opt[String]("scratch-uri") action { (x, c) =>
+        c.copy(common = c.common.copy(scratchUri = Some(new URI(x))))
+      }
+      note("")
+      cmd("version").
+        text("Displays the version of this command line console.").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "version")
+        }
+      note("")
+      cmd("help").action { (_, c) =>
+        c.copy(commands = c.commands :+ "help")
+      } children(
+        arg[String]("<command>") optional()
+          action { (x, c) =>
+            c.copy(commands = c.commands :+ x)
+          }
+        )
+      note("")
+      cmd("build").
+        text("Build an engine at the current directory.").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "build")
+        } children(
+          opt[String]("sbt-extra") action { (x, c) =>
+            c.copy(build = c.build.copy(sbtExtra = Some(x)))
+          } text("Extra command to pass to SBT when it builds your engine."),
+          opt[Unit]("clean") action { (x, c) =>
+            c.copy(build = c.build.copy(sbtClean = true))
+          } text("Clean build."),
+          opt[Unit]("no-asm") action { (x, c) =>
+            c.copy(build = c.build.copy(sbtAssemblyPackageDependency = false))
+          } text("Skip building external dependencies assembly."),
+          opt[Unit]("uber-jar") action { (x, c) =>
+            c.copy(build = c.build.copy(uberJar = true))
+          },
+          opt[Unit]("generate-pio-sbt") action { (x, c) =>
+            c.copy(build = c.build.copy(forceGeneratePIOSbt = true))
+          }
+        )
+      note("")
+      cmd("unregister").
+        text("Unregister an engine at the current directory.").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "unregister")
+        }
+      note("")
+      cmd("train").
+        text("Kick off a training using an engine. This will produce an\n" +
+          "engine instance. This command will pass all pass-through\n" +
+          "arguments to its underlying spark-submit command.").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "train")
+        } children(
+          opt[String]("batch") action { (x, c) =>
+            c.copy(common = c.common.copy(batch = x))
+          } text("Batch label of the run."),
+          opt[String]("params-path") action { (x, c) =>
+            c.copy(paramsPath = x)
+          } text("Directory to lookup parameters JSON files. Default: params"),
+          opt[String]("metrics-params") abbr("mp") action { (x, c) =>
+            c.copy(metricsParamsJsonPath = Some(x))
+          } text("Metrics parameters JSON file. Will try to use\n" +
+            "        metrics.json in the base path."),
+          opt[Unit]("skip-sanity-check") abbr("ssc") action { (x, c) =>
+            c.copy(common = c.common.copy(skipSanityCheck = true))
+          },
+          opt[Unit]("stop-after-read") abbr("sar") action { (x, c) =>
+            c.copy(common = c.common.copy(stopAfterRead = true))
+          },
+          opt[Unit]("stop-after-prepare") abbr("sap") action { (x, c) =>
+            c.copy(common = c.common.copy(stopAfterPrepare = true))
+          },
+          opt[Unit]("uber-jar") action { (x, c) =>
+            c.copy(build = c.build.copy(uberJar = true))
+          },
+          opt[Int]("verbosity") action { (x, c) =>
+            c.copy(common = c.common.copy(verbosity = x))
+          },
+          opt[String]("engine-factory") action { (x, c) =>
+            c.copy(common = c.common.copy(engineFactory = Some(x)))
+          },
+          opt[String]("engine-params-key") action { (x, c) =>
+            c.copy(common = c.common.copy(engineParamsKey = Some(x)))
+          },
+          opt[String]("json-extractor") action { (x, c) =>
+            c.copy(common = c.common.copy(jsonExtractor = JsonExtractorOption.withName(x)))
+          } validate { x =>
+              if (JsonExtractorOption.values.map(_.toString).contains(x)) {
+                success
+              } else {
+                val validOptions = JsonExtractorOption.values.mkString("|")
+                failure(s"$x is not a valid json-extractor option [$validOptions]")
+              }
+          }
+        )
+      note("")
+      cmd("eval").
+        text("Kick off an evaluation using an engine. This will produce an\n" +
+          "engine instance. This command will pass all pass-through\n" +
+          "arguments to its underlying spark-submit command.").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "eval")
+        } children(
+          arg[String]("<evaluation-class>") action { (x, c) =>
+            c.copy(common = c.common.copy(evaluation = Some(x)))
+          },
+          arg[String]("[<engine-parameters-generator-class>]") optional() action { (x, c) =>
+            c.copy(common = c.common.copy(engineParamsGenerator = Some(x)))
+          } text("Optional engine parameters generator class, overriding the first argument"),
+          opt[String]("batch") action { (x, c) =>
+            c.copy(common = c.common.copy(batch = x))
+          } text("Batch label of the run."),
+          opt[String]("json-extractor") action { (x, c) =>
+            c.copy(common = c.common.copy(jsonExtractor = JsonExtractorOption.withName(x)))
+          } validate { x =>
+            if (JsonExtractorOption.values.map(_.toString).contains(x)) {
+              success
+            } else {
+              val validOptions = JsonExtractorOption.values.mkString("|")
+              failure(s"$x is not a valid json-extractor option [$validOptions]")
+            }
+          }
+        )
+      note("")
+      cmd("deploy").
+        text("Deploy an engine instance as a prediction server. This\n" +
+          "command will pass all pass-through arguments to its underlying\n" +
+          "spark-submit command.").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "deploy")
+        } children(
+          opt[String]("batch") action { (x, c) =>
+            c.copy(common = c.common.copy(batch = x))
+          } text("Batch label of the deployment."),
+          opt[String]("engine-instance-id") action { (x, c) =>
+            c.copy(engineInstanceId = Some(x))
+          } text("Engine instance ID."),
+          opt[String]("ip") action { (x, c) =>
+            c.copy(deploy = c.deploy.copy(ip = x))
+          },
+          opt[Int]("port") action { (x, c) =>
+            c.copy(deploy = c.deploy.copy(port = x))
+          } text("Port to bind to. Default: 8000"),
+          opt[Unit]("feedback") action { (_, c) =>
+            c.copy(eventServer = c.eventServer.copy(enabled = true))
+          } text("Enable feedback loop to event server."),
+          opt[String]("event-server-ip") action { (x, c) =>
+            c.copy(eventServer = c.eventServer.copy(ip = x))
+          },
+          opt[Int]("event-server-port") action { (x, c) =>
+            c.copy(eventServer = c.eventServer.copy(port = x))
+          } text("Event server port. Default: 7070"),
+          opt[Int]("admin-server-port") action { (x, c) =>
+            c.copy(adminServer = c.adminServer.copy(port = x))
+          } text("Admin server port. Default: 7071"),
+          opt[String]("admin-server-port") action { (x, c) =>
+          c.copy(adminServer = c.adminServer.copy(ip = x))
+          } text("Admin server IP. Default: localhost"),
+          opt[String]("accesskey") action { (x, c) =>
+            c.copy(accessKey = c.accessKey.copy(accessKey = x))
+          } text("Access key of the App where feedback data will be stored."),
+          opt[Unit]("uber-jar") action { (x, c) =>
+            c.copy(build = c.build.copy(uberJar = true))
+          },
+          opt[String]("log-url") action { (x, c) =>
+            c.copy(deploy = c.deploy.copy(logUrl = Some(x)))
+          },
+          opt[String]("log-prefix") action { (x, c) =>
+            c.copy(deploy = c.deploy.copy(logPrefix = Some(x)))
+          },
+          opt[String]("json-extractor") action { (x, c) =>
+            c.copy(common = c.common.copy(jsonExtractor = JsonExtractorOption.withName(x)))
+          } validate { x =>
+            if (JsonExtractorOption.values.map(_.toString).contains(x)) {
+              success
+            } else {
+              val validOptions = JsonExtractorOption.values.mkString("|")
+              failure(s"$x is not a valid json-extractor option [$validOptions]")
+            }
+          }
+        )
+      note("")
+      cmd("undeploy").
+        text("Undeploy an engine instance as a prediction server.").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "undeploy")
+        } children(
+          opt[String]("ip") action { (x, c) =>
+            c.copy(deploy = c.deploy.copy(ip = x))
+          },
+          opt[Int]("port") action { (x, c) =>
+            c.copy(deploy = c.deploy.copy(port = x))
+          } text("Port to unbind from. Default: 8000")
+        )
+      note("")
+      cmd("dashboard").
+        text("Launch a dashboard at the specific IP and port.").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "dashboard")
+        } children(
+          opt[String]("ip") action { (x, c) =>
+            c.copy(dashboard = c.dashboard.copy(ip = x))
+          },
+          opt[Int]("port") action { (x, c) =>
+            c.copy(dashboard = c.dashboard.copy(port = x))
+          } text("Port to bind to. Default: 9000")
+        )
+      note("")
+      cmd("eventserver").
+        text("Launch an Event Server at the specific IP and port.").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "eventserver")
+        } children(
+          opt[String]("ip") action { (x, c) =>
+            c.copy(eventServer = c.eventServer.copy(ip = x))
+          },
+          opt[Int]("port") action { (x, c) =>
+            c.copy(eventServer = c.eventServer.copy(port = x))
+          } text("Port to bind to. Default: 7070"),
+          opt[Unit]("stats") action { (x, c) =>
+            c.copy(eventServer = c.eventServer.copy(stats = true))
+          }
+        )
+      cmd("adminserver").
+        text("Launch an Admin Server at the specific IP and port.").
+        action { (_, c) =>
+        c.copy(commands = c.commands :+ "adminserver")
+      } children(
+        opt[String]("ip") action { (x, c) =>
+          c.copy(adminServer = c.adminServer.copy(ip = x))
+        } text("IP to bind to. Default: localhost"),
+        opt[Int]("port") action { (x, c) =>
+          c.copy(adminServer = c.adminServer.copy(port = x))
+        } text("Port to bind to. Default: 7071")
+        )
+      note("")
+      cmd("run").
+        text("Launch a driver program. This command will pass all\n" +
+          "pass-through arguments to its underlying spark-submit command.\n" +
+          "In addition, it also supports a second level of pass-through\n" +
+          "arguments to the driver program, e.g.\n" +
+          "pio run -- --master spark://localhost:7077 -- --driver-arg foo").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "run")
+        } children(
+          arg[String]("<main class>") action { (x, c) =>
+            c.copy(mainClass = Some(x))
+          } text("Main class name of the driver program."),
+          opt[String]("sbt-extra") action { (x, c) =>
+            c.copy(build = c.build.copy(sbtExtra = Some(x)))
+          } text("Extra command to pass to SBT when it builds your engine."),
+          opt[Unit]("clean") action { (x, c) =>
+            c.copy(build = c.build.copy(sbtClean = true))
+          } text("Clean build."),
+          opt[Unit]("no-asm") action { (x, c) =>
+            c.copy(build = c.build.copy(sbtAssemblyPackageDependency = false))
+          } text("Skip building external dependencies assembly.")
+        )
+      note("")
+      cmd("status").
+        text("Displays status information about the PredictionIO system.").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "status")
+        }
+      note("")
+      cmd("upgrade").
+        text("Upgrade tool").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "upgrade")
+        } children(
+          arg[String]("<from version>") action { (x, c) =>
+            c.copy(upgrade = c.upgrade.copy(from = x))
+          } text("The version upgraded from."),
+          arg[String]("<to version>") action { (x, c) =>
+            c.copy(upgrade = c.upgrade.copy(to = x))
+          } text("The version upgraded to."),
+          arg[Int]("<old App ID>") action { (x, c) =>
+            c.copy(upgrade = c.upgrade.copy(oldAppId = x))
+          } text("Old App ID."),
+          arg[Int]("<new App ID>") action { (x, c) =>
+            c.copy(upgrade = c.upgrade.copy(newAppId = x))
+          } text("New App ID.")
+        )
+      note("")
+      cmd("app").
+        text("Manage apps.\n").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "app")
+        } children(
+          cmd("new").
+            text("Create a new app key to app ID mapping.").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "new")
+            } children(
+              opt[Int]("id") action { (x, c) =>
+                c.copy(app = c.app.copy(id = Some(x)))
+              },
+              opt[String]("description") action { (x, c) =>
+                c.copy(app = c.app.copy(description = Some(x)))
+              },
+              opt[String]("access-key") action { (x, c) =>
+                c.copy(accessKey = c.accessKey.copy(accessKey = x))
+              },
+              arg[String]("<name>") action { (x, c) =>
+                c.copy(app = c.app.copy(name = x))
+              }
+            ),
+          note(""),
+          cmd("list").
+            text("List all apps.").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "list")
+            },
+          note(""),
+          cmd("show").
+            text("Show details of an app.").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "show")
+            } children (
+              arg[String]("<name>") action { (x, c) =>
+                c.copy(app = c.app.copy(name = x))
+              } text("Name of the app to be shown.")
+            ),
+          note(""),
+          cmd("delete").
+            text("Delete an app.").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "delete")
+            } children(
+              arg[String]("<name>") action { (x, c) =>
+                c.copy(app = c.app.copy(name = x))
+              } text("Name of the app to be deleted."),
+              opt[Unit]("force") abbr("f") action { (x, c) =>
+                c.copy(app = c.app.copy(force = true))
+              } text("Delete an app without prompting for confirmation")
+            ),
+          note(""),
+          cmd("data-delete").
+            text("Delete data of an app").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "data-delete")
+            } children(
+              arg[String]("<name>") action { (x, c) =>
+                c.copy(app = c.app.copy(name = x))
+              } text("Name of the app whose data to be deleted."),
+              opt[String]("channel") action { (x, c) =>
+                c.copy(app = c.app.copy(dataDeleteChannel = Some(x)))
+              } text("Name of channel whose data to be deleted."),
+              opt[Unit]("all") action { (x, c) =>
+                c.copy(app = c.app.copy(all = true))
+              } text("Delete data of all channels including default"),
+              opt[Unit]("force") abbr("f") action { (x, c) =>
+                c.copy(app = c.app.copy(force = true))
+              } text("Delete data of an app without prompting for confirmation")
+            ),
+          note(""),
+          cmd("channel-new").
+            text("Create a new channel for the app.").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "channel-new")
+            } children (
+              arg[String]("<name>") action { (x, c) =>
+                c.copy(app = c.app.copy(name = x))
+              } text("App name."),
+              arg[String]("<channel>") action { (x, c) =>
+                c.copy(app = c.app.copy(channel = x))
+              } text ("Channel name to be created.")
+            ),
+          note(""),
+          cmd("channel-delete").
+            text("Delete a channel of the app.").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "channel-delete")
+            } children (
+              arg[String]("<name>") action { (x, c) =>
+                c.copy(app = c.app.copy(name = x))
+              } text("App name."),
+              arg[String]("<channel>") action { (x, c) =>
+                c.copy(app = c.app.copy(channel = x))
+              } text ("Channel name to be deleted."),
+              opt[Unit]("force") abbr("f") action { (x, c) =>
+                c.copy(app = c.app.copy(force = true))
+              } text("Delete a channel of the app without prompting for confirmation")
+            )
+        )
+      note("")
+      cmd("accesskey").
+        text("Manage app access keys.\n").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "accesskey")
+        } children(
+          cmd("new").
+            text("Add allowed event(s) to an access key.").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "new")
+            } children(
+              opt[String]("key") action { (x, c) =>
+                c.copy(accessKey = c.accessKey.copy(accessKey = x))
+              },
+              arg[String]("<app name>") action { (x, c) =>
+                c.copy(app = c.app.copy(name = x))
+              },
+              arg[String]("[<event1> <event2> ...]") unbounded() optional()
+                action { (x, c) =>
+                  c.copy(accessKey = c.accessKey.copy(
+                    events = c.accessKey.events :+ x))
+                }
+            ),
+          cmd("list").
+            text("List all access keys of an app.").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "list")
+            } children(
+              arg[String]("<app name>") optional() action { (x, c) =>
+                c.copy(app = c.app.copy(name = x))
+              } text("App name.")
+            ),
+          note(""),
+          cmd("delete").
+            text("Delete an access key.").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "delete")
+            } children(
+              arg[String]("<access key>") action { (x, c) =>
+                c.copy(accessKey = c.accessKey.copy(accessKey = x))
+              } text("The access key to be deleted.")
+            )
+        )
+      cmd("template").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "template")
+        } children(
+          cmd("get").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "get")
+            } children(
+              arg[String]("<template ID>") required() action { (x, c) =>
+                c.copy(template = c.template.copy(repository = x))
+              },
+              arg[String]("<new engine directory>") action { (x, c) =>
+                c.copy(template = c.template.copy(directory = x))
+              },
+              opt[String]("version") action { (x, c) =>
+                c.copy(template = c.template.copy(version = Some(x)))
+              },
+              opt[String]("name") action { (x, c) =>
+                c.copy(template = c.template.copy(name = Some(x)))
+              },
+              opt[String]("package") action { (x, c) =>
+                c.copy(template = c.template.copy(packageName = Some(x)))
+              },
+              opt[String]("email") action { (x, c) =>
+                c.copy(template = c.template.copy(email = Some(x)))
+              }
+            ),
+          cmd("list").
+            action { (_, c) =>
+              c.copy(commands = c.commands :+ "list")
+            }
+        )
+      cmd("export").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "export")
+        } children(
+          opt[Int]("appid") required() action { (x, c) =>
+            c.copy(export = c.export.copy(appId = x))
+          },
+          opt[String]("output") required() action { (x, c) =>
+            c.copy(export = c.export.copy(outputPath = x))
+          },
+          opt[String]("format") action { (x, c) =>
+            c.copy(export = c.export.copy(format = x))
+          },
+          opt[String]("channel") action { (x, c) =>
+            c.copy(export = c.export.copy(channel = Some(x)))
+          }
+        )
+      cmd("import").
+        action { (_, c) =>
+          c.copy(commands = c.commands :+ "import")
+        } children(
+          opt[Int]("appid") required() action { (x, c) =>
+            c.copy(imprt = c.imprt.copy(appId = x))
+          },
+          opt[String]("input") required() action { (x, c) =>
+            c.copy(imprt = c.imprt.copy(inputPath = x))
+          },
+          opt[String]("channel") action { (x, c) =>
+            c.copy(imprt = c.imprt.copy(channel = Some(x)))
+          }
+        )
+    }
+
+    val separatorIndex = args.indexWhere(_ == "--")
+    val (consoleArgs, theRest) =
+      if (separatorIndex == -1) {
+        (args, Array[String]())
+      } else {
+        args.splitAt(separatorIndex)
+      }
+    val allPassThroughArgs = theRest.drop(1)
+    val secondSepIdx = allPassThroughArgs.indexWhere(_ == "--")
+    val (sparkPassThroughArgs, driverPassThroughArgs) =
+      if (secondSepIdx == -1) {
+        (allPassThroughArgs, Array[String]())
+      } else {
+        val t = allPassThroughArgs.splitAt(secondSepIdx)
+        (t._1, t._2.drop(1))
+      }
+
+    parser.parse(consoleArgs, ConsoleArgs()) map { pca =>
+      val ca = pca.copy(common = pca.common.copy(
+        sparkPassThrough = sparkPassThroughArgs,
+        driverPassThrough = driverPassThroughArgs))
+      WorkflowUtils.modifyLogging(ca.common.verbose)
+      val rv: Int = ca.commands match {
+        case Seq("") =>
+          System.err.println(help())
+          1
+        case Seq("version") =>
+          version(ca)
+          0
+        case Seq("build") =>
+          regenerateManifestJson(ca.common.manifestJson)
+          build(ca)
+        case Seq("unregister") =>
+          unregister(ca)
+          0
+        case Seq("train") =>
+          regenerateManifestJson(ca.common.manifestJson)
+          train(ca)
+        case Seq("eval") =>
+          regenerateManifestJson(ca.common.manifestJson)
+          train(ca)
+        case Seq("deploy") =>
+          deploy(ca)
+        case Seq("undeploy") =>
+          undeploy(ca)
+        case Seq("dashboard") =>
+          dashboard(ca)
+          0
+        case Seq("eventserver") =>
+          eventserver(ca)
+          0
+        case Seq("adminserver") =>
+          adminserver(ca)
+          0
+        case Seq("run") =>
+          generateManifestJson(ca.common.manifestJson)
+          run(ca)
+        case Seq("status") =>
+          status(ca)
+        case Seq("upgrade") =>
+          upgrade(ca)
+          0
+        case Seq("app", "new") =>
+          App.create(ca)
+        case Seq("app", "list") =>
+          App.list(ca)
+        case Seq("app", "show") =>
+          App.show(ca)
+        case Seq("app", "delete") =>
+          App.delete(ca)
+        case Seq("app", "data-delete") =>
+          App.dataDelete(ca)
+        case Seq("app", "channel-new") =>
+          App.channelNew(ca)
+        case Seq("app", "channel-delete") =>
+          App.channelDelete(ca)
+        case Seq("accesskey", "new") =>
+          AccessKey.create(ca)
+        case Seq("accesskey", "list") =>
+          AccessKey.list(ca)
+        case Seq("accesskey", "delete") =>
+          AccessKey.delete(ca)
+        case Seq("template", "get") =>
+          Template.get(ca)
+        case Seq("template", "list") =>
+          Template.list(ca)
+        case Seq("export") =>
+          Export.eventsToFile(ca)
+        case Seq("import") =>
+          Import.fileToEvents(ca)
+        case _ =>
+          System.err.println(help(ca.commands))
+          1
+      }
+      sys.exit(rv)
+    } getOrElse {
+      val command = args.toSeq.filterNot(_.startsWith("--")).head
+      System.err.println(help(Seq(command)))
+      sys.exit(1)
+    }
+  }
+
+  def help(commands: Seq[String] = Seq()): String = {
+    if (commands.isEmpty) {
+      mainHelp
+    } else {
+      val stripped =
+        (if (commands.head == "help") commands.drop(1) else commands).
+          mkString("-")
+      helpText.getOrElse(stripped, s"Help is unavailable for ${stripped}.")
+    }
+  }
+
+  val mainHelp = txt.main().toString
+
+  val helpText = Map(
+    "" -> mainHelp,
+    "status" -> txt.status().toString,
+    "upgrade" -> txt.upgrade().toString,
+    "version" -> txt.version().toString,
+    "template" -> txt.template().toString,
+    "build" -> txt.build().toString,
+    "train" -> txt.train().toString,
+    "deploy" -> txt.deploy().toString,
+    "eventserver" -> txt.eventserver().toString,
+    "adminserver" -> txt.adminserver().toString,
+    "app" -> txt.app().toString,
+    "accesskey" -> txt.accesskey().toString,
+    "import" -> txt.imprt().toString,
+    "export" -> txt.export().toString,
+    "run" -> txt.run().toString,
+    "eval" -> txt.eval().toString,
+    "dashboard" -> txt.dashboard().toString)
+
+  def version(ca: ConsoleArgs): Unit = println(BuildInfo.version)
+
+  def build(ca: ConsoleArgs): Int = {
+    Template.verifyTemplateMinVersion(new File("template.json"))
+    compile(ca)
+    info("Looking for an engine...")
+    val jarFiles = jarFilesForScala
+    if (jarFiles.isEmpty) {
+      error("No engine found. Your build might have failed. Aborting.")
+      return 1
+    }
+    jarFiles foreach { f => info(s"Found ${f.getName}")}
+    RegisterEngine.registerEngine(
+      ca.common.manifestJson,
+      jarFiles,
+      false)
+    info("Your engine is ready for training.")
+    0
+  }
+
+  def unregister(ca: ConsoleArgs): Unit = {
+    RegisterEngine.unregisterEngine(ca.common.manifestJson)
+  }
+
+  def train(ca: ConsoleArgs): Int = {
+    Template.verifyTemplateMinVersion(new File("template.json"))
+    withRegisteredManifest(
+      ca.common.manifestJson,
+      ca.common.engineId,
+      ca.common.engineVersion) { em =>
+      RunWorkflow.newRunWorkflow(ca, em)
+    }
+  }
+
+  def deploy(ca: ConsoleArgs): Int = {
+    Template.verifyTemplateMinVersion(new File("template.json"))
+    withRegisteredManifest(
+      ca.common.manifestJson,
+      ca.common.engineId,
+      ca.common.engineVersion) { em =>
+      val variantJson = parse(Source.fromFile(ca.common.variantJson).mkString)
+      val variantId = variantJson \ "id" match {
+        case JString(s) => s
+        case _ =>
+          error("Unable to read engine variant ID from " +
+            s"${ca.common.variantJson.getCanonicalPath}. Aborting.")
+          return 1
+      }
+      val engineInstances = storage.Storage.getMetaDataEngineInstances
+      val engineInstance = ca.engineInstanceId map { eid =>
+        engineInstances.get(eid)
+      } getOrElse {
+        engineInstances.getLatestCompleted(em.id, em.version, variantId)
+      }
+      engineInstance map { r =>
+        RunServer.newRunServer(ca, em, r.id)
+      } getOrElse {
+        ca.engineInstanceId map { eid =>
+          error(
+            s"Invalid engine instance ID ${ca.engineInstanceId}. Aborting.")
+        } getOrElse {
+          error(
+            s"No valid engine instance found for engine ${em.id} " +
+              s"${em.version}.\nTry running 'train' before 'deploy'. Aborting.")
+        }
+        1
+      }
+    }
+  }
+
+  def dashboard(ca: ConsoleArgs): Unit = {
+    info(s"Creating dashboard at ${ca.dashboard.ip}:${ca.dashboard.port}")
+    Dashboard.createDashboard(DashboardConfig(
+      ip = ca.dashboard.ip,
+      port = ca.dashboard.port))
+  }
+
+  def eventserver(ca: ConsoleArgs): Unit = {
+    info(
+      s"Creating Event Server at ${ca.eventServer.ip}:${ca.eventServer.port}")
+    EventServer.createEventServer(EventServerConfig(
+      ip = ca.eventServer.ip,
+      port = ca.eventServer.port,
+      stats = ca.eventServer.stats))
+  }
+
+  def adminserver(ca: ConsoleArgs): Unit = {
+    info(
+      s"Creating Admin Server at ${ca.adminServer.ip}:${ca.adminServer.port}")
+    AdminServer.createAdminServer(AdminServerConfig(
+      ip = ca.adminServer.ip,
+      port = ca.adminServer.port
+    ))
+  }
+
+  def undeploy(ca: ConsoleArgs): Int = {
+    val serverUrl = s"http://${ca.deploy.ip}:${ca.deploy.port}"
+    info(
+      s"Undeploying any existing engine instance at ${serverUrl}")
+    try {
+      val code = Http(s"${serverUrl}/stop").asString.code
+      code match {
+        case 200 => 0
+        case 404 =>
+          error(s"Another process is using ${serverUrl}. Unable to undeploy.")
+          1
+        case _ =>
+          error(s"Another process is using ${serverUrl}, or an existing " +
+            s"engine server is not responding properly (HTTP ${code}). " +
+            "Unable to undeploy.")
+            1
+      }
+    } catch {
+      case e: java.net.ConnectException =>
+        warn(s"Nothing at ${serverUrl}")
+        0
+      case _: Throwable =>
+        error("Another process might be occupying " +
+          s"${ca.deploy.ip}:${ca.deploy.port}. Unable to undeploy.")
+        1
+    }
+  }
+
+  def compile(ca: ConsoleArgs): Unit = {
+    // only add pioVersion to sbt if project/pio.sbt exists
+    if (new File("project", "pio-build.sbt").exists || ca.build.forceGeneratePIOSbt) {
+      FileUtils.writeLines(
+        new File("pio.sbt"),
+        Seq(
+          "// Generated automatically by pio build.",
+          "// Changes in this file will be overridden.",
+          "",
+          "pioVersion := \"" + BuildInfo.version + "\""))
+    }
+    implicit val formats = Utils.json4sDefaultFormats
+    try {
+      val engineFactory =
+        (parse(Source.fromFile("engine.json").mkString) \ "engineFactory").
+          extract[String]
+      WorkflowUtils.checkUpgrade("build", engineFactory)
+    } catch {
+      case e: Throwable => WorkflowUtils.checkUpgrade("build")
+    }
+    val sbt = detectSbt(ca)
+    info(s"Using command '${sbt}' at the current working directory to build.")
+    info("If the path above is incorrect, this process will fail.")
+    val asm =
+      if (ca.build.sbtAssemblyPackageDependency) {
+        " assemblyPackageDependency"
+      } else {
+        ""
+      }
+    val clean = if (ca.build.sbtClean) " clean" else ""
+    val buildCmd = s"${sbt} ${ca.build.sbtExtra.getOrElse("")}${clean} " +
+      (if (ca.build.uberJar) "assembly" else s"package${asm}")
+    val core = new File(s"pio-assembly-${BuildInfo.version}.jar")
+    if (ca.build.uberJar) {
+      info(s"Uber JAR enabled. Putting ${core.getName} in lib.")
+      val dst = new File("lib")
+      dst.mkdir()
+      FileUtils.copyFileToDirectory(
+        coreAssembly(ca.common.pioHome.get),
+        dst,
+        true)
+    } else {
+      if (new File("engine.json").exists()) {
+        info(s"Uber JAR disabled. Making sure lib/${core.getName} is absent.")
+        new File("lib", core.getName).delete()
+      } else {
+        info("Uber JAR disabled, but current working directory does not look " +
+          s"like an engine project directory. Please delete lib/${core.getName} manually.")
+      }
+    }
+    info(s"Going to run: ${buildCmd}")
+    try {
+      val r =
+        if (ca.common.verbose) {
+          buildCmd.!(ProcessLogger(line => info(line), line => error(line)))
+        } else {
+          buildCmd.!(ProcessLogger(
+            line => outputSbtError(line),
+            line => outputSbtError(line)))
+        }
+      if (r != 0) {
+        error(s"Return code of previous step is ${r}. Aborting.")
+        sys.exit(1)
+      }
+      info("Build finished successfully.")
+    } catch {
+      case e: java.io.IOException =>
+        error(s"${e.getMessage}")
+        sys.exit(1)
+    }
+  }
+
+  private def outputSbtError(line: String): Unit = {
+    """\[.*error.*\]""".r findFirstIn line foreach { _ => error(line) }
+  }
+
+  def run(ca: ConsoleArgs): Int = {
+    compile(ca)
+
+    val extraFiles = WorkflowUtils.thirdPartyConfFiles
+
+    val jarFiles = jarFilesForScala
+    jarFiles foreach { f => info(s"Found JAR: ${f.getName}") }
+    val allJarFiles = jarFiles.map(_.getCanonicalPath)
+    val cmd = s"${getSparkHome(ca.common.sparkHome)}/bin/spark-submit --jars " +
+      s"${allJarFiles.mkString(",")} " +
+      (if (extraFiles.size > 0) {
+        s"--files ${extraFiles.mkString(",")} "
+      } else {
+        ""
+      }) +
+      "--class " +
+      s"${ca.mainClass.get} ${ca.common.sparkPassThrough.mkString(" ")} " +
+      coreAssembly(ca.common.pioHome.get) + " " +
+      ca.common.driverPassThrough.mkString(" ")
+    val proc = Process(
+      cmd,
+      None,
+      "SPARK_YARN_USER_ENV" -> sys.env.filter(kv => kv._1.startsWith("PIO_")).
+        map(kv => s"${kv._1}=${kv._2}").mkString(","))
+    info(s"Submission command: ${cmd}")
+    val r = proc.!
+    if (r != 0) {
+      error(s"Return code of previous step is ${r}. Aborting.")
+      return 1
+    }
+    r
+  }
+
+  def status(ca: ConsoleArgs): Int = {
+    info("Inspecting PredictionIO...")
+    ca.common.pioHome map { pioHome =>
+      info(s"PredictionIO ${BuildInfo.version} is installed at $pioHome")
+    } getOrElse {
+      error("Unable to locate PredictionIO installation. Aborting.")
+      return 1
+    }
+    info("Inspecting Apache Spark...")
+    val sparkHome = getSparkHome(ca.common.sparkHome)
+    if (new File(s"$sparkHome/bin/spark-submit").exists) {
+      info(s"Apache Spark is installed at $sparkHome")
+      val sparkMinVersion = "1.3.0"
+      val sparkReleaseFile = new File(s"$sparkHome/RELEASE")
+      if (sparkReleaseFile.exists) {
+        val sparkReleaseStrings =
+          Source.fromFile(sparkReleaseFile).mkString.split(' ')
+        if (sparkReleaseStrings.length < 2) {
+          warn(stripMarginAndNewlines(
+            s"""|Apache Spark version information cannot be found (RELEASE file
+                |is empty). This is a known issue for certain vendors (e.g.
+                |Cloudera). Please make sure you are using a version of at least
+                |$sparkMinVersion."""))
+        } else {
+          val sparkReleaseVersion = sparkReleaseStrings(1)
+          val parsedMinVersion = Version.apply(sparkMinVersion)
+          val parsedCurrentVersion = Version.apply(sparkReleaseVersion)
+          if (parsedCurrentVersion >= parsedMinVersion) {
+            info(stripMarginAndNewlines(
+              s"""|Apache Spark $sparkReleaseVersion detected (meets minimum
+                  |requirement of $sparkMinVersion)"""))
+          } else {
+            error(stripMarginAndNewlines(
+              s"""|Apache Spark $sparkReleaseVersion detected (does not meet
+                  |minimum requirement. Aborting."""))
+          }
+        }
+      } else {
+        warn(stripMarginAndNewlines(
+          s"""|Apache Spark version information cannot be found. If you are
+              |using a developmental tree, please make sure you are using a
+              |version of at least $sparkMinVersion."""))
+      }
+    } else {
+      error("Unable to locate a proper Apache Spark installation. Aborting.")
+      return 1
+    }
+    info("Inspecting storage backend connections...")
+    try {
+      storage.Storage.verifyAllDataObjects()
+    } catch {
+      case e: Throwable =>
+        error("Unable to connect to all storage backends successfully. The " +
+          "following shows the error message from the storage backend.")
+        error(s"${e.getMessage} (${e.getClass.getName})", e)
+        error("Dumping configuration of initialized storage backend sources. " +
+          "Please make sure they are correct.")
+        storage.Storage.config.get("sources") map { src =>
+          src foreach { case (s, p) =>
+            error(s"Source Name: $s; Type: ${p.getOrElse("type", "(error)")}; " +
+              s"Configuration: ${p.getOrElse("config", "(error)")}")
+          }
+        } getOrElse {
+          error("No properly configured storage backend sources.")
+        }
+        return 1
+    }
+    info("(sleeping 5 seconds for all messages to show up...)")
+    Thread.sleep(5000)
+    info("Your system is all ready to go.")
+    0
+  }
+
+  def upgrade(ca: ConsoleArgs): Unit = {
+    (ca.upgrade.from, ca.upgrade.to) match {
+      case ("0.8.2", "0.8.3") => {
+        Upgrade_0_8_3.runMain(ca.upgrade.oldAppId, ca.upgrade.newAppId)
+      }
+      case _ =>
+        println(s"Upgrade from version ${ca.upgrade.from} to ${ca.upgrade.to}"
+          + s" is not supported.")
+    }
+  }
+
+  def coreAssembly(pioHome: String): File = {
+    val core = s"pio-assembly-${BuildInfo.version}.jar"
+    val coreDir =
+      if (new File(pioHome + File.separator + "RELEASE").exists) {
+        new File(pioHome + File.separator + "lib")
+      } else {
+        new File(pioHome + File.separator + "assembly")
+      }
+    val coreFile = new File(coreDir, core)
+    if (coreFile.exists) {
+      coreFile
+    } else {
+      error(s"PredictionIO Core Assembly (${coreFile.getCanonicalPath}) does " +
+        "not exist. Aborting.")
+      sys.exit(1)
+    }
+  }
+
+  val manifestAutogenTag = "pio-autogen-manifest"
+
+  def regenerateManifestJson(json: File): Unit = {
+    val cwd = sys.props("user.dir")
+    val ha = java.security.MessageDigest.getInstance("SHA-1").
+      digest(cwd.getBytes).map("%02x".format(_)).mkString
+    if (json.exists) {
+      val em = readManifestJson(json)
+      if (em.description == Some(manifestAutogenTag) && ha != em.version) {
+        warn("This engine project directory contains an auto-generated " +
+          "manifest that has been copied/moved from another location. ")
+        warn("Regenerating the manifest to reflect the updated location. " +
+          "This will dissociate with all previous engine instances.")
+        generateManifestJson(json)
+      } else {
+        info(s"Using existing engine manifest JSON at ${json.getCanonicalPath}")
+      }
+    } else {
+      generateManifestJson(json)
+    }
+  }
+
+  def generateManifestJson(json: File): Unit = {
+    val cwd = sys.props("user.dir")
+    implicit val formats = Utils.json4sDefaultFormats +
+      new EngineManifestSerializer
+    val rand = Random.alphanumeric.take(32).mkString
+    val ha = java.security.MessageDigest.getInstance("SHA-1").
+      digest(cwd.getBytes).map("%02x".format(_)).mkString
+    val em = EngineManifest(
+      id = rand,
+      version = ha,
+      name = new File(cwd).getName,
+      description = Some(manifestAutogenTag),
+      files = Seq(),
+      engineFactory = "")
+    try {
+      FileUtils.writeStringToFile(json, write(em), "ISO-8859-1")
+    } catch {
+      case e: java.io.IOException =>
+        error(s"Cannot generate ${json} automatically (${e.getMessage}). " +
+          "Aborting.")
+        sys.exit(1)
+    }
+  }
+
+  def readManifestJson(json: File): EngineManifest = {
+    implicit val formats = Utils.json4sDefaultFormats +
+      new EngineManifestSerializer
+    try {
+      read[EngineManifest](Source.fromFile(json).mkString)
+    } catch {
+      case e: java.io.FileNotFoundException =>
+        error(s"${json.getCanonicalPath} does not exist. Aborting.")
+        sys.exit(1)
+      case e: MappingException =>
+        error(s"${json.getCanonicalPath} has invalid content: " +
+          e.getMessage)
+        sys.exit(1)
+    }
+  }
+
+  def withRegisteredManifest(
+      json: File,
+      engineId: Option[String],
+      engineVersion: Option[String])(
+      op: EngineManifest => Int): Int = {
+    val ej = readManifestJson(json)
+    val id = engineId getOrElse ej.id
+    val version = engineVersion getOrElse ej.version
+    storage.Storage.getMetaDataEngineManifests.get(id, version) map {
+      op
+    } getOrElse {
+      error(s"Engine ${id} ${version} cannot be found in the system.")
+      error("Possible reasons:")
+      error("- the engine is not yet built by the 'build' command;")
+      error("- the meta data store is offline.")
+      1
+    }
+  }
+
+  def jarFilesAt(path: File): Array[File] = recursiveListFiles(path) filter {
+    _.getName.toLowerCase.endsWith(".jar")
+  }
+
+  def jarFilesForScala: Array[File] = {
+    val libFiles = jarFilesForScalaFilter(jarFilesAt(new File("lib")))
+    val targetFiles = jarFilesForScalaFilter(jarFilesAt(new File("target" +
+      File.separator + s"scala-${scalaVersionNoPatch}")))
+    // Use libFiles is target is empty.
+    if (targetFiles.size > 0) targetFiles else libFiles
+  }
+
+  def jarFilesForScalaFilter(jars: Array[File]): Array[File] =
+    jars.filterNot { f =>
+      f.getName.toLowerCase.endsWith("-javadoc.jar") ||
+      f.getName.toLowerCase.endsWith("-sources.jar")
+    }
+
+  def recursiveListFiles(f: File): Array[File] = {
+    Option(f.listFiles) map { these =>
+      these ++ these.filter(_.isDirectory).flatMap(recursiveListFiles)
+    } getOrElse Array[File]()
+  }
+
+  def getSparkHome(sparkHome: Option[String]): String = {
+    sparkHome getOrElse {
+      sys.env.getOrElse("SPARK_HOME", ".")
+    }
+  }
+
+  def versionNoPatch(fullVersion: String): String = {
+    val v = """^(\d+\.\d+)""".r
+    val versionNoPatch = for {
+      v(np) <- v findFirstIn fullVersion
+    } yield np
+    versionNoPatch.getOrElse(fullVersion)
+  }
+
+  def scalaVersionNoPatch: String = versionNoPatch(BuildInfo.scalaVersion)
+
+  def detectSbt(ca: ConsoleArgs): String = {
+    ca.build.sbt map {
+      _.getCanonicalPath
+    } getOrElse {
+      val f = new File(Seq(ca.common.pioHome.get, "sbt", "sbt").mkString(
+        File.separator))
+      if (f.exists) f.getCanonicalPath else "sbt"
+    }
+  }
+
+  def stripMarginAndNewlines(string: String): String =
+    string.stripMargin.replaceAll("\n", " ")
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/console/Export.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/console/Export.scala b/tools/src/main/scala/org/apache/predictionio/tools/console/Export.scala
new file mode 100644
index 0000000..7c0dfa4
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/console/Export.scala
@@ -0,0 +1,42 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools.console
+
+import org.apache.predictionio.tools.Runner
+
+case class ExportArgs(
+  appId: Int = 0,
+  channel: Option[String] = None,
+  outputPath: String = "",
+  format: String = "json")
+
+object Export {
+  def eventsToFile(ca: ConsoleArgs): Int = {
+    val channelArg = ca.export.channel
+      .map(ch => Seq("--channel", ch)).getOrElse(Nil)
+    Runner.runOnSpark(
+      "org.apache.predictionio.tools.export.EventsToFile",
+      Seq(
+        "--appid",
+        ca.export.appId.toString,
+        "--output",
+        ca.export.outputPath,
+        "--format",
+        ca.export.format) ++ channelArg,
+      ca,
+      Nil)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/console/Import.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/console/Import.scala b/tools/src/main/scala/org/apache/predictionio/tools/console/Import.scala
new file mode 100644
index 0000000..185aefb
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/console/Import.scala
@@ -0,0 +1,39 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools.console
+
+import org.apache.predictionio.tools.Runner
+
+case class ImportArgs(
+  appId: Int = 0,
+  channel: Option[String] = None,
+  inputPath: String = "")
+
+object Import {
+  def fileToEvents(ca: ConsoleArgs): Int = {
+    val channelArg = ca.imprt.channel
+      .map(ch => Seq("--channel", ch)).getOrElse(Nil)
+    Runner.runOnSpark(
+      "org.apache.predictionio.tools.imprt.FileToEvents",
+      Seq(
+        "--appid",
+        ca.imprt.appId.toString,
+        "--input",
+        ca.imprt.inputPath) ++ channelArg,
+      ca,
+      Nil)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/console/Template.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/console/Template.scala b/tools/src/main/scala/org/apache/predictionio/tools/console/Template.scala
new file mode 100644
index 0000000..f47cacf
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/console/Template.scala
@@ -0,0 +1,429 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools.console
+
+import java.io.BufferedInputStream
+import java.io.BufferedOutputStream
+import java.io.File
+import java.io.FileInputStream
+import java.io.FileOutputStream
+import java.net.ConnectException
+import java.net.URI
+import java.util.zip.ZipInputStream
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.controller.Utils
+import org.apache.predictionio.core.BuildInfo
+import org.apache.commons.io.FileUtils
+import org.json4s._
+import org.json4s.native.JsonMethods._
+import org.json4s.native.Serialization.read
+import org.json4s.native.Serialization.write
+import semverfi._
+
+import scala.io.Source
+import scala.sys.process._
+import scalaj.http._
+
+case class TemplateArgs(
+  directory: String = "",
+  repository: String = "",
+  version: Option[String] = None,
+  name: Option[String] = None,
+  packageName: Option[String] = None,
+  email: Option[String] = None)
+
+case class GitHubTag(
+  name: String,
+  zipball_url: String,
+  tarball_url: String,
+  commit: GitHubCommit)
+
+case class GitHubCommit(
+  sha: String,
+  url: String)
+
+case class GitHubCache(
+  headers: Map[String, String],
+  body: String)
+
+case class TemplateEntry(
+  repo: String)
+
+case class TemplateMetaData(
+  pioVersionMin: Option[String] = None)
+
+object Template extends Logging {
+  implicit val formats = Utils.json4sDefaultFormats
+
+  def templateMetaData(templateJson: File): TemplateMetaData = {
+    if (!templateJson.exists) {
+      warn(s"$templateJson does not exist. Template metadata will not be available. " +
+        "(This is safe to ignore if you are not working on a template.)")
+      TemplateMetaData()
+    } else {
+      val jsonString = Source.fromFile(templateJson)(scala.io.Codec.ISO8859).mkString
+      val json = try {
+        parse(jsonString)
+      } catch {
+        case e: org.json4s.ParserUtil.ParseException =>
+          warn(s"$templateJson cannot be parsed. Template metadata will not be available.")
+          return TemplateMetaData()
+      }
+      val pioVersionMin = json \ "pio" \ "version" \ "min"
+      pioVersionMin match {
+        case JString(s) => TemplateMetaData(pioVersionMin = Some(s))
+        case _ => TemplateMetaData()
+      }
+    }
+  }
+
+  /** Creates a wrapper that provides the functionality of scalaj.http.Http()
+    * with automatic proxy settings handling. The proxy settings will first
+    * come from "git" followed by system properties "http.proxyHost" and
+    * "http.proxyPort".
+    *
+    * @param url URL to be connected
+    * @return
+    */
+  def httpOptionalProxy(url: String): HttpRequest = {
+    val gitProxy = try {
+      Some(Process("git config --global http.proxy").lines.toList(0))
+    } catch {
+      case e: Throwable => None
+    }
+
+    val (host, port) = gitProxy map { p =>
+      val proxyUri = new URI(p)
+      (Option(proxyUri.getHost),
+        if (proxyUri.getPort == -1) None else Some(proxyUri.getPort))
+    } getOrElse {
+      (sys.props.get("http.proxyHost"),
+        sys.props.get("http.proxyPort").map { p =>
+          try {
+            Some(p.toInt)
+          } catch {
+            case e: NumberFormatException => None
+          }
+        } getOrElse None)
+    }
+
+    (host, port) match {
+      case (Some(h), Some(p)) => Http(url).proxy(h, p)
+      case _ => Http(url)
+    }
+  }
+
+  def getGitHubRepos(
+      repos: Seq[String],
+      apiType: String,
+      repoFilename: String): Map[String, GitHubCache] = {
+    val reposCache = try {
+      val cache =
+        Source.fromFile(repoFilename)(scala.io.Codec.ISO8859).mkString
+        read[Map[String, GitHubCache]](cache)
+    } catch {
+      case e: Throwable => Map[String, GitHubCache]()
+    }
+    val newReposCache = reposCache ++ (try {
+      repos.map { repo =>
+        val url = s"https://api.github.com/repos/$repo/$apiType"
+        val http = httpOptionalProxy(url)
+        val response = reposCache.get(repo).map { cache =>
+          cache.headers.get("ETag").map { etag =>
+            http.header("If-None-Match", etag).asString
+          } getOrElse {
+            http.asString
+          }
+        } getOrElse {
+          http.asString
+        }
+
+        val body = if (response.code == 304) {
+          reposCache(repo).body
+        } else {
+          response.body
+        }
+
+        repo -> GitHubCache(headers = response.headers, body = body)
+      }.toMap
+    } catch {
+      case e: ConnectException =>
+        githubConnectErrorMessage(e)
+        Map()
+    })
+    FileUtils.writeStringToFile(
+      new File(repoFilename),
+      write(newReposCache),
+      "ISO-8859-1")
+    newReposCache
+  }
+
+  def sub(repo: String, name: String, email: String, org: String): Unit = {
+    val data = Map(
+      "repo" -> repo,
+      "name" -> name,
+      "email" -> email,
+      "org" -> org)
+    try {
+      httpOptionalProxy("https://update.prediction.io/templates.subscribe").
+        postData("json=" + write(data)).asString
+    } catch {
+      case e: Throwable => error("Unable to subscribe.")
+    }
+  }
+
+  def meta(repo: String, name: String, org: String): Unit = {
+    try {
+      httpOptionalProxy(
+        s"https://meta.prediction.io/templates/$repo/$org/$name").asString
+    } catch {
+      case e: Throwable => debug("Template metadata unavailable.")
+    }
+  }
+
+  def list(ca: ConsoleArgs): Int = {
+    val templatesUrl = "https://templates.prediction.io/index.json"
+    try {
+      val templatesJson = Source.fromURL(templatesUrl).mkString("")
+      val templates = read[List[TemplateEntry]](templatesJson)
+      println("The following is a list of template IDs registered on " +
+        "PredictionIO Template Gallery:")
+      println()
+      templates.sortBy(_.repo.toLowerCase).foreach { template =>
+        println(template.repo)
+      }
+      println()
+      println("Notice that it is possible use any GitHub repository as your " +
+        "engine template ID (e.g. YourOrg/YourTemplate).")
+      0
+    } catch {
+      case e: Throwable =>
+        error(s"Unable to list templates from $templatesUrl " +
+          s"(${e.getMessage}). Aborting.")
+        1
+    }
+  }
+
+  def githubConnectErrorMessage(e: ConnectException): Unit = {
+    error(s"Unable to connect to GitHub (Reason: ${e.getMessage}). " +
+      "Please check your network configuration and proxy settings.")
+  }
+
+  def get(ca: ConsoleArgs): Int = {
+    val repos =
+      getGitHubRepos(Seq(ca.template.repository), "tags", ".templates-cache")
+
+    repos.get(ca.template.repository).map { repo =>
+      try {
+        read[List[GitHubTag]](repo.body)
+      } catch {
+        case e: MappingException =>
+          error(s"Either ${ca.template.repository} is not a valid GitHub " +
+            "repository, or it does not have any tag. Aborting.")
+          return 1
+      }
+    } getOrElse {
+      error(s"Failed to retrieve ${ca.template.repository}. Aborting.")
+      return 1
+    }
+
+    val name = ca.template.name getOrElse {
+      try {
+        Process("git config --global user.name").lines.toList(0)
+      } catch {
+        case e: Throwable =>
+          readLine("Please enter author's name: ")
+      }
+    }
+
+    val organization = ca.template.packageName getOrElse {
+      readLine(
+        "Please enter the template's Scala package name (e.g. com.mycompany): ")
+    }
+
+    val email = ca.template.email getOrElse {
+      try {
+        Process("git config --global user.email").lines.toList(0)
+      } catch {
+        case e: Throwable =>
+          readLine("Please enter author's e-mail address: ")
+      }
+    }
+
+    println(s"Author's name:         $name")
+    println(s"Author's e-mail:       $email")
+    println(s"Author's organization: $organization")
+
+    var subscribe = readLine("Would you like to be informed about new bug " +
+      "fixes and security updates of this template? (Y/n) ")
+    var valid = false
+
+    do {
+      subscribe match {
+        case "" | "Y" | "y" =>
+          sub(ca.template.repository, name, email, organization)
+          valid = true
+        case "n" | "N" =>
+          meta(ca.template.repository, name, organization)
+          valid = true
+        case _ =>
+          println("Please answer 'y' or 'n'")
+          subscribe = readLine("(Y/n)? ")
+      }
+    } while (!valid)
+
+    val repo = repos(ca.template.repository)
+
+    println(s"Retrieving ${ca.template.repository}")
+    val tags = read[List[GitHubTag]](repo.body)
+    println(s"There are ${tags.size} tags")
+
+    if (tags.size == 0) {
+      println(s"${ca.template.repository} does not have any tag. Aborting.")
+      return 1
+    }
+
+    val tag = ca.template.version.map { v =>
+      tags.find(_.name == v).getOrElse {
+        println(s"${ca.template.repository} does not have tag $v. Aborting.")
+        return 1
+      }
+    } getOrElse tags.head
+
+    println(s"Using tag ${tag.name}")
+    val url =
+      s"https://github.com/${ca.template.repository}/archive/${tag.name}.zip"
+    println(s"Going to download $url")
+    val trial = try {
+      httpOptionalProxy(url).asBytes
+    } catch {
+      case e: ConnectException =>
+        githubConnectErrorMessage(e)
+        return 1
+    }
+    val finalTrial = try {
+      trial.location.map { loc =>
+        println(s"Redirecting to $loc")
+        httpOptionalProxy(loc).asBytes
+      } getOrElse trial
+    } catch {
+      case e: ConnectException =>
+        githubConnectErrorMessage(e)
+        return 1
+    }
+    val zipFilename =
+      s"${ca.template.repository.replace('/', '-')}-${tag.name}.zip"
+    FileUtils.writeByteArrayToFile(
+      new File(zipFilename),
+      finalTrial.body)
+    val zis = new ZipInputStream(
+      new BufferedInputStream(new FileInputStream(zipFilename)))
+    val bufferSize = 4096
+    val filesToModify = collection.mutable.ListBuffer[String]()
+    var ze = zis.getNextEntry
+    while (ze != null) {
+      val filenameSegments = ze.getName.split(File.separatorChar)
+      val destFilename = (ca.template.directory +: filenameSegments.tail).
+        mkString(File.separator)
+      if (ze.isDirectory) {
+        new File(destFilename).mkdirs
+      } else {
+        val os = new BufferedOutputStream(
+          new FileOutputStream(destFilename),
+          bufferSize)
+        val data = Array.ofDim[Byte](bufferSize)
+        var count = zis.read(data, 0, bufferSize)
+        while (count != -1) {
+          os.write(data, 0, count)
+          count = zis.read(data, 0, bufferSize)
+        }
+        os.flush()
+        os.close()
+
+        val nameOnly = new File(destFilename).getName
+
+        if (organization != "" &&
+          (nameOnly.endsWith(".scala") ||
+            nameOnly == "build.sbt" ||
+            nameOnly == "engine.json")) {
+          filesToModify += destFilename
+        }
+      }
+      ze = zis.getNextEntry
+    }
+    zis.close()
+    new File(zipFilename).delete
+
+    val engineJsonFile =
+      new File(ca.template.directory, "engine.json")
+
+    val engineJson = try {
+      Some(parse(Source.fromFile(engineJsonFile).mkString))
+    } catch {
+      case e: java.io.IOException =>
+        error("Unable to read engine.json. Skipping automatic package " +
+          "name replacement.")
+        None
+      case e: MappingException =>
+        error("Unable to parse engine.json. Skipping automatic package " +
+          "name replacement.")
+        None
+    }
+
+    val engineFactory = engineJson.map { ej =>
+      (ej \ "engineFactory").extractOpt[String]
+    } getOrElse None
+
+    engineFactory.map { ef =>
+      val pkgName = ef.split('.').dropRight(1).mkString(".")
+      println(s"Replacing $pkgName with $organization...")
+
+      filesToModify.foreach { ftm =>
+        println(s"Processing $ftm...")
+        val fileContent = Source.fromFile(ftm).getLines()
+        val processedLines =
+          fileContent.map(_.replaceAllLiterally(pkgName, organization))
+        FileUtils.writeStringToFile(
+          new File(ftm),
+          processedLines.mkString("\n"))
+      }
+    } getOrElse {
+      error("engineFactory is not found in engine.json. Skipping automatic " +
+        "package name replacement.")
+    }
+
+    verifyTemplateMinVersion(new File(ca.template.directory, "template.json"))
+
+    println(s"Engine template ${ca.template.repository} is now ready at " +
+      ca.template.directory)
+
+    0
+  }
+
+  def verifyTemplateMinVersion(templateJsonFile: File): Unit = {
+    val metadata = templateMetaData(templateJsonFile)
+
+    metadata.pioVersionMin.foreach { pvm =>
+      if (Version(BuildInfo.version) < Version(pvm)) {
+        error(s"This engine template requires at least PredictionIO $pvm. " +
+          s"The template may not work with PredictionIO ${BuildInfo.version}.")
+        sys.exit(1)
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/dashboard/CorsSupport.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/dashboard/CorsSupport.scala b/tools/src/main/scala/org/apache/predictionio/tools/dashboard/CorsSupport.scala
new file mode 100644
index 0000000..aaafd8a
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/dashboard/CorsSupport.scala
@@ -0,0 +1,75 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools.dashboard
+
+// Reference from: https://gist.github.com/waymost/4b5598523c2c7361abea
+
+import spray.http.{HttpMethods, HttpMethod, HttpResponse, AllOrigins}
+import spray.http.HttpHeaders._
+import spray.http.HttpMethods._
+import spray.http.HttpEntity
+import spray.routing._
+import spray.http.StatusCodes
+import spray.http.ContentTypes
+
+// see also https://developer.mozilla.org/en-US/docs/Web/HTTP/Access_control_CORS
+trait CORSSupport {
+  this: HttpService =>
+
+  private val allowOriginHeader = `Access-Control-Allow-Origin`(AllOrigins)
+  private val optionsCorsHeaders = List(
+    `Access-Control-Allow-Headers`("""Origin,
+                                      |X-Requested-With,
+                                      |Content-Type,
+                                      |Accept,
+                                      |Accept-Encoding,
+                                      |Accept-Language,
+                                      |Host,
+                                      |Referer,
+                                      |User-Agent""".stripMargin.replace("\n", " ")),
+    `Access-Control-Max-Age`(1728000)
+  )
+
+  def cors[T]: Directive0 = mapRequestContext { ctx =>
+    ctx.withRouteResponseHandling {
+      // OPTION request for a resource that responds to other methods
+      case Rejected(x) if (ctx.request.method.equals(HttpMethods.OPTIONS) &&
+          x.exists(_.isInstanceOf[MethodRejection])) => {
+        val allowedMethods: List[HttpMethod] = x.collect {
+          case rejection: MethodRejection => rejection.supported
+        }
+        ctx.complete {
+          HttpResponse().withHeaders(
+            `Access-Control-Allow-Methods`(HttpMethods.OPTIONS, allowedMethods :_*) ::
+            allowOriginHeader ::
+            optionsCorsHeaders
+          )
+        }
+      }
+    }.withHttpResponseHeadersMapped { headers =>
+      allowOriginHeader :: headers
+    }
+  }
+
+  override def timeoutRoute: StandardRoute = complete {
+    HttpResponse(
+      StatusCodes.InternalServerError,
+      HttpEntity(ContentTypes.`text/plain(UTF-8)`,
+          "The server was not able to produce a timely response to your request."),
+      List(allowOriginHeader)
+    )
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/dashboard/Dashboard.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/dashboard/Dashboard.scala b/tools/src/main/scala/org/apache/predictionio/tools/dashboard/Dashboard.scala
new file mode 100644
index 0000000..bfd7c64
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/dashboard/Dashboard.scala
@@ -0,0 +1,156 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools.dashboard
+
+import com.typesafe.config.ConfigFactory
+import org.apache.predictionio.authentication.KeyAuthentication
+import org.apache.predictionio.configuration.SSLConfiguration
+import org.apache.predictionio.data.storage.Storage
+import spray.can.server.ServerSettings
+import spray.routing.directives.AuthMagnet
+import scala.concurrent.{Future, ExecutionContext}
+import akka.actor.{ActorContext, Actor, ActorSystem, Props}
+import akka.io.IO
+import akka.pattern.ask
+import akka.util.Timeout
+import com.github.nscala_time.time.Imports.DateTime
+import grizzled.slf4j.Logging
+import spray.can.Http
+import spray.http._
+import spray.http.MediaTypes._
+import spray.routing._
+import spray.routing.authentication.{Authentication, UserPass, BasicAuth}
+
+import scala.concurrent.duration._
+
+case class DashboardConfig(
+  ip: String = "localhost",
+  port: Int = 9000)
+
+object Dashboard extends Logging with SSLConfiguration{
+  def main(args: Array[String]): Unit = {
+    val parser = new scopt.OptionParser[DashboardConfig]("Dashboard") {
+      opt[String]("ip") action { (x, c) =>
+        c.copy(ip = x)
+      } text("IP to bind to (default: localhost).")
+      opt[Int]("port") action { (x, c) =>
+        c.copy(port = x)
+      } text("Port to bind to (default: 9000).")
+    }
+
+    parser.parse(args, DashboardConfig()) map { dc =>
+      createDashboard(dc)
+    }
+  }
+
+  def createDashboard(dc: DashboardConfig): Unit = {
+    implicit val system = ActorSystem("pio-dashboard")
+    val service =
+      system.actorOf(Props(classOf[DashboardActor], dc), "dashboard")
+    implicit val timeout = Timeout(5.seconds)
+    val settings = ServerSettings(system)
+    IO(Http) ? Http.Bind(
+      service,
+      interface = dc.ip,
+      port = dc.port,
+      settings = Some(settings.copy(sslEncryption = true)))
+    system.awaitTermination
+  }
+}
+
+class DashboardActor(
+    val dc: DashboardConfig)
+  extends Actor with DashboardService {
+  def actorRefFactory: ActorContext = context
+  def receive: Actor.Receive = runRoute(dashboardRoute)
+}
+
+trait DashboardService extends HttpService with KeyAuthentication with CORSSupport {
+
+  implicit def executionContext: ExecutionContext = actorRefFactory.dispatcher
+  val dc: DashboardConfig
+  val evaluationInstances = Storage.getMetaDataEvaluationInstances
+  val pioEnvVars = sys.env.filter(kv => kv._1.startsWith("PIO_"))
+  val serverStartTime = DateTime.now
+  val dashboardRoute =
+    path("") {
+      authenticate(withAccessKeyFromFile) { request =>
+        get {
+          respondWithMediaType(`text/html`) {
+            complete {
+              val completedInstances = evaluationInstances.getCompleted
+              html.index(
+                dc,
+                serverStartTime,
+                pioEnvVars,
+                completedInstances).toString
+            }
+          }
+        }
+      }
+    } ~
+    pathPrefix("engine_instances" / Segment) { instanceId =>
+      path("evaluator_results.txt") {
+        get {
+          respondWithMediaType(`text/plain`) {
+            evaluationInstances.get(instanceId).map { i =>
+              complete(i.evaluatorResults)
+            } getOrElse {
+              complete(StatusCodes.NotFound)
+            }
+          }
+        }
+      } ~
+      path("evaluator_results.html") {
+        get {
+          respondWithMediaType(`text/html`) {
+            evaluationInstances.get(instanceId).map { i =>
+              complete(i.evaluatorResultsHTML)
+            } getOrElse {
+              complete(StatusCodes.NotFound)
+            }
+          }
+        }
+      } ~
+      path("evaluator_results.json") {
+        get {
+          respondWithMediaType(`application/json`) {
+            evaluationInstances.get(instanceId).map { i =>
+              complete(i.evaluatorResultsJSON)
+            } getOrElse {
+              complete(StatusCodes.NotFound)
+            }
+          }
+        }
+      } ~
+      cors {
+        path("local_evaluator_results.json") {
+          get {
+            respondWithMediaType(`application/json`) {
+              evaluationInstances.get(instanceId).map { i =>
+                complete(i.evaluatorResultsJSON)
+              } getOrElse {
+                complete(StatusCodes.NotFound)
+              }
+            }
+          }
+        }
+      }
+    } ~
+    pathPrefix("assets") {
+      getFromResourceDirectory("assets")
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/export/EventsToFile.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/export/EventsToFile.scala b/tools/src/main/scala/org/apache/predictionio/tools/export/EventsToFile.scala
new file mode 100644
index 0000000..feabce4
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/export/EventsToFile.scala
@@ -0,0 +1,104 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools.export
+
+import org.apache.predictionio.controller.Utils
+import org.apache.predictionio.data.storage.EventJson4sSupport
+import org.apache.predictionio.data.storage.Storage
+import org.apache.predictionio.tools.Runner
+import org.apache.predictionio.workflow.WorkflowContext
+import org.apache.predictionio.workflow.WorkflowUtils
+
+import grizzled.slf4j.Logging
+import org.apache.spark.sql.SQLContext
+import org.json4s.native.Serialization._
+
+case class EventsToFileArgs(
+  env: String = "",
+  logFile: String = "",
+  appId: Int = 0,
+  channel: Option[String] = None,
+  outputPath: String = "",
+  format: String = "parquet",
+  verbose: Boolean = false,
+  debug: Boolean = false)
+
+object EventsToFile extends Logging {
+  def main(args: Array[String]): Unit = {
+    val parser = new scopt.OptionParser[EventsToFileArgs]("EventsToFile") {
+      opt[String]("env") action { (x, c) =>
+        c.copy(env = x)
+      }
+      opt[String]("log-file") action { (x, c) =>
+        c.copy(logFile = x)
+      }
+      opt[Int]("appid") action { (x, c) =>
+        c.copy(appId = x)
+      }
+      opt[String]("channel") action { (x, c) =>
+        c.copy(channel = Some(x))
+      }
+      opt[String]("format") action { (x, c) =>
+        c.copy(format = x)
+      }
+      opt[String]("output") action { (x, c) =>
+        c.copy(outputPath = x)
+      }
+      opt[Unit]("verbose") action { (x, c) =>
+        c.copy(verbose = true)
+      }
+      opt[Unit]("debug") action { (x, c) =>
+        c.copy(debug = true)
+      }
+    }
+    parser.parse(args, EventsToFileArgs()) map { args =>
+      // get channelId
+      val channels = Storage.getMetaDataChannels
+      val channelMap = channels.getByAppid(args.appId).map(c => (c.name, c.id)).toMap
+
+      val channelId: Option[Int] = args.channel.map { ch =>
+        if (!channelMap.contains(ch)) {
+          error(s"Channel ${ch} doesn't exist in this app.")
+          sys.exit(1)
+        }
+
+        channelMap(ch)
+      }
+
+      val channelStr = args.channel.map(n => " Channel " + n).getOrElse("")
+
+      WorkflowUtils.modifyLogging(verbose = args.verbose)
+      @transient lazy implicit val formats = Utils.json4sDefaultFormats +
+        new EventJson4sSupport.APISerializer
+      val sc = WorkflowContext(
+        mode = "Export",
+        batch = "App ID " + args.appId + channelStr,
+        executorEnv = Runner.envStringToMap(args.env))
+      val sqlContext = new SQLContext(sc)
+      val events = Storage.getPEvents()
+      val eventsRdd = events.find(appId = args.appId, channelId = channelId)(sc)
+      val jsonStringRdd = eventsRdd.map(write(_))
+      if (args.format == "json") {
+        jsonStringRdd.saveAsTextFile(args.outputPath)
+      } else {
+        val jsonRdd = sqlContext.jsonRDD(jsonStringRdd)
+        jsonRdd.saveAsParquetFile(args.outputPath)
+      }
+      info(s"Events are exported to ${args.outputPath}/.")
+      info("Done.")
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/imprt/FileToEvents.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/imprt/FileToEvents.scala b/tools/src/main/scala/org/apache/predictionio/tools/imprt/FileToEvents.scala
new file mode 100644
index 0000000..98a3344
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/imprt/FileToEvents.scala
@@ -0,0 +1,103 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools.imprt
+
+import org.apache.predictionio.controller.Utils
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.EventJson4sSupport
+import org.apache.predictionio.data.storage.Storage
+import org.apache.predictionio.tools.Runner
+import org.apache.predictionio.workflow.WorkflowContext
+import org.apache.predictionio.workflow.WorkflowUtils
+
+import grizzled.slf4j.Logging
+import org.json4s.native.Serialization._
+
+import scala.util.{Failure, Try}
+
+case class FileToEventsArgs(
+  env: String = "",
+  logFile: String = "",
+  appId: Int = 0,
+  channel: Option[String] = None,
+  inputPath: String = "",
+  verbose: Boolean = false,
+  debug: Boolean = false)
+
+object FileToEvents extends Logging {
+  def main(args: Array[String]): Unit = {
+    val parser = new scopt.OptionParser[FileToEventsArgs]("FileToEvents") {
+      opt[String]("env") action { (x, c) =>
+        c.copy(env = x)
+      }
+      opt[String]("log-file") action { (x, c) =>
+        c.copy(logFile = x)
+      }
+      opt[Int]("appid") action { (x, c) =>
+        c.copy(appId = x)
+      }
+      opt[String]("channel") action { (x, c) =>
+        c.copy(channel = Some(x))
+      }
+      opt[String]("input") action { (x, c) =>
+        c.copy(inputPath = x)
+      }
+      opt[Unit]("verbose") action { (x, c) =>
+        c.copy(verbose = true)
+      }
+      opt[Unit]("debug") action { (x, c) =>
+        c.copy(debug = true)
+      }
+    }
+    parser.parse(args, FileToEventsArgs()) map { args =>
+      // get channelId
+      val channels = Storage.getMetaDataChannels
+      val channelMap = channels.getByAppid(args.appId).map(c => (c.name, c.id)).toMap
+
+      val channelId: Option[Int] = args.channel.map { ch =>
+        if (!channelMap.contains(ch)) {
+          error(s"Channel ${ch} doesn't exist in this app.")
+          sys.exit(1)
+        }
+
+        channelMap(ch)
+      }
+
+      val channelStr = args.channel.map(n => " Channel " + n).getOrElse("")
+
+      WorkflowUtils.modifyLogging(verbose = args.verbose)
+      @transient lazy implicit val formats = Utils.json4sDefaultFormats +
+        new EventJson4sSupport.APISerializer
+      val sc = WorkflowContext(
+        mode = "Import",
+        batch = "App ID " + args.appId + channelStr,
+        executorEnv = Runner.envStringToMap(args.env))
+      val rdd = sc.textFile(args.inputPath).filter(_.trim.nonEmpty).map { json =>
+        Try(read[Event](json)).recoverWith {
+          case e: Throwable =>
+            error(s"\nmalformed json => $json")
+            Failure(e)
+        }.get
+      }
+      val events = Storage.getPEvents()
+      events.write(events = rdd,
+        appId = args.appId,
+        channelId = channelId)(sc)
+      info("Events are imported.")
+      info("Done.")
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/accesskey.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/accesskey.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/accesskey.scala.txt
deleted file mode 100644
index 651dbaf..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/accesskey.scala.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-Usage: pio accesskey new [--key] <app name> [<event1> <event2>...]
-
-Add allowed event(s) to an access key.
-
-  --key <value>
-      Specify a custom key.
-  <app name>
-      App to be associated with the new access key.
-  <event1> <event2>...
-      Allowed event name(s) to be added to the access key.
-
-Usage: pio accesskey list [<app name>]
-
-  <app name>
-      App name.
-
-Usage: pio accesskey delete <access key>
-
-  <access key>
-      The access key to be deleted.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/adminserver.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/adminserver.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/adminserver.scala.txt
deleted file mode 100644
index 4ec0237..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/adminserver.scala.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-(Experimental Only!) Usage: pio adminserver [--ip <value>] [--port <value>]
-
-  --ip <value>
-      IP to bind to. Default: localhost
-  --port <value>
-      Port to bind to. Default: 7071

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/app.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/app.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/app.scala.txt
deleted file mode 100644
index 49f21b1..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/app.scala.txt
+++ /dev/null
@@ -1,74 +0,0 @@
-Usage: pio app new [--id <value>] [--description <value>] [--access-key <value>]
-                   <name>
-
-Create a new app key to app ID mapping.
-
-  --id <value>
-      Specify this if you already have data under an app ID.
-  --description <value>
-      Description of the new app.
-  --access-key <value>
-      Specify a custom default access key.
-  <name>
-      App name.
-
-
-Usage: pio app list
-
-List all apps.
-
-
-Usage: pio app show <name>
-
-Show details of an app.
-
-  <name>
-      App name.
-
-
-Usage: pio app delete <name> [--force]
-
-Name of the app to be deleted.
-
-  <name>
-      App name.
-  --force, -f
-      Delete data without prompting for confirmation
-
-
-Usage: pio app data-delete <name> [--channel <name>] [--all] [--force]
-
-Delete data of an app.
-
-  <name>
-      App name.
-  --channel <name>
-      Delete data of the specified channel (default channel if not specified)
-  --all
-      Delete all data of this app (including both default and all channels)
-  --force, -f
-      Delete data without prompting for confirmation
-
-
-Usage: pio app channel-new <name> <channel>
-
-Create a new channel for the app.
-
-  <name>
-      App name.
-
-  <channel>
-      Channel name to be created.
-
-
-Usage: pio app channel-delete <name> <channel> [--force]
-
-Delete a channel for the app.
-
-  <name>
-      App name.
-
-  <channel>
-      Channel name to be deleted.
-  --force, -f
-      Delete data without prompting for confirmation

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/twirl/io/prediction/tools/console/build.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/io/prediction/tools/console/build.scala.txt b/tools/src/main/twirl/io/prediction/tools/console/build.scala.txt
deleted file mode 100644
index be80c50..0000000
--- a/tools/src/main/twirl/io/prediction/tools/console/build.scala.txt
+++ /dev/null
@@ -1,11 +0,0 @@
-Usage: pio build [--sbt-extra <value>] [--clean] [--no-asm]
-                 [common options...]
-                 
-Build an engine at the current directory.
-
-  --sbt-extra <value>
-      Extra command to pass to SBT when it builds your engine.
-  --clean
-      Clean build.
-  --no-asm
-      Skip building external dependencies assembly.



[03/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/dashboard/CorsSupport.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/dashboard/CorsSupport.scala b/tools/src/main/scala/io/prediction/tools/dashboard/CorsSupport.scala
deleted file mode 100644
index 3d2c888..0000000
--- a/tools/src/main/scala/io/prediction/tools/dashboard/CorsSupport.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools.dashboard
-
-// Reference from: https://gist.github.com/waymost/4b5598523c2c7361abea
-
-import spray.http.{HttpMethods, HttpMethod, HttpResponse, AllOrigins}
-import spray.http.HttpHeaders._
-import spray.http.HttpMethods._
-import spray.http.HttpEntity
-import spray.routing._
-import spray.http.StatusCodes
-import spray.http.ContentTypes
-
-// see also https://developer.mozilla.org/en-US/docs/Web/HTTP/Access_control_CORS
-trait CORSSupport {
-  this: HttpService =>
-
-  private val allowOriginHeader = `Access-Control-Allow-Origin`(AllOrigins)
-  private val optionsCorsHeaders = List(
-    `Access-Control-Allow-Headers`("""Origin,
-                                      |X-Requested-With,
-                                      |Content-Type,
-                                      |Accept,
-                                      |Accept-Encoding,
-                                      |Accept-Language,
-                                      |Host,
-                                      |Referer,
-                                      |User-Agent""".stripMargin.replace("\n", " ")),
-    `Access-Control-Max-Age`(1728000)
-  )
-
-  def cors[T]: Directive0 = mapRequestContext { ctx =>
-    ctx.withRouteResponseHandling {
-      // OPTION request for a resource that responds to other methods
-      case Rejected(x) if (ctx.request.method.equals(HttpMethods.OPTIONS) &&
-          x.exists(_.isInstanceOf[MethodRejection])) => {
-        val allowedMethods: List[HttpMethod] = x.collect {
-          case rejection: MethodRejection => rejection.supported
-        }
-        ctx.complete {
-          HttpResponse().withHeaders(
-            `Access-Control-Allow-Methods`(HttpMethods.OPTIONS, allowedMethods :_*) ::
-            allowOriginHeader ::
-            optionsCorsHeaders
-          )
-        }
-      }
-    }.withHttpResponseHeadersMapped { headers =>
-      allowOriginHeader :: headers
-    }
-  }
-
-  override def timeoutRoute: StandardRoute = complete {
-    HttpResponse(
-      StatusCodes.InternalServerError,
-      HttpEntity(ContentTypes.`text/plain(UTF-8)`,
-          "The server was not able to produce a timely response to your request."),
-      List(allowOriginHeader)
-    )
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/dashboard/Dashboard.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/dashboard/Dashboard.scala b/tools/src/main/scala/io/prediction/tools/dashboard/Dashboard.scala
deleted file mode 100644
index 154ba4e..0000000
--- a/tools/src/main/scala/io/prediction/tools/dashboard/Dashboard.scala
+++ /dev/null
@@ -1,156 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools.dashboard
-
-import com.typesafe.config.ConfigFactory
-import io.prediction.authentication.KeyAuthentication
-import io.prediction.configuration.SSLConfiguration
-import io.prediction.data.storage.Storage
-import spray.can.server.ServerSettings
-import spray.routing.directives.AuthMagnet
-import scala.concurrent.{Future, ExecutionContext}
-import akka.actor.{ActorContext, Actor, ActorSystem, Props}
-import akka.io.IO
-import akka.pattern.ask
-import akka.util.Timeout
-import com.github.nscala_time.time.Imports.DateTime
-import grizzled.slf4j.Logging
-import spray.can.Http
-import spray.http._
-import spray.http.MediaTypes._
-import spray.routing._
-import spray.routing.authentication.{Authentication, UserPass, BasicAuth}
-
-import scala.concurrent.duration._
-
-case class DashboardConfig(
-  ip: String = "localhost",
-  port: Int = 9000)
-
-object Dashboard extends Logging with SSLConfiguration{
-  def main(args: Array[String]): Unit = {
-    val parser = new scopt.OptionParser[DashboardConfig]("Dashboard") {
-      opt[String]("ip") action { (x, c) =>
-        c.copy(ip = x)
-      } text("IP to bind to (default: localhost).")
-      opt[Int]("port") action { (x, c) =>
-        c.copy(port = x)
-      } text("Port to bind to (default: 9000).")
-    }
-
-    parser.parse(args, DashboardConfig()) map { dc =>
-      createDashboard(dc)
-    }
-  }
-
-  def createDashboard(dc: DashboardConfig): Unit = {
-    implicit val system = ActorSystem("pio-dashboard")
-    val service =
-      system.actorOf(Props(classOf[DashboardActor], dc), "dashboard")
-    implicit val timeout = Timeout(5.seconds)
-    val settings = ServerSettings(system)
-    IO(Http) ? Http.Bind(
-      service,
-      interface = dc.ip,
-      port = dc.port,
-      settings = Some(settings.copy(sslEncryption = true)))
-    system.awaitTermination
-  }
-}
-
-class DashboardActor(
-    val dc: DashboardConfig)
-  extends Actor with DashboardService {
-  def actorRefFactory: ActorContext = context
-  def receive: Actor.Receive = runRoute(dashboardRoute)
-}
-
-trait DashboardService extends HttpService with KeyAuthentication with CORSSupport {
-
-  implicit def executionContext: ExecutionContext = actorRefFactory.dispatcher
-  val dc: DashboardConfig
-  val evaluationInstances = Storage.getMetaDataEvaluationInstances
-  val pioEnvVars = sys.env.filter(kv => kv._1.startsWith("PIO_"))
-  val serverStartTime = DateTime.now
-  val dashboardRoute =
-    path("") {
-      authenticate(withAccessKeyFromFile) { request =>
-        get {
-          respondWithMediaType(`text/html`) {
-            complete {
-              val completedInstances = evaluationInstances.getCompleted
-              html.index(
-                dc,
-                serverStartTime,
-                pioEnvVars,
-                completedInstances).toString
-            }
-          }
-        }
-      }
-    } ~
-    pathPrefix("engine_instances" / Segment) { instanceId =>
-      path("evaluator_results.txt") {
-        get {
-          respondWithMediaType(`text/plain`) {
-            evaluationInstances.get(instanceId).map { i =>
-              complete(i.evaluatorResults)
-            } getOrElse {
-              complete(StatusCodes.NotFound)
-            }
-          }
-        }
-      } ~
-      path("evaluator_results.html") {
-        get {
-          respondWithMediaType(`text/html`) {
-            evaluationInstances.get(instanceId).map { i =>
-              complete(i.evaluatorResultsHTML)
-            } getOrElse {
-              complete(StatusCodes.NotFound)
-            }
-          }
-        }
-      } ~
-      path("evaluator_results.json") {
-        get {
-          respondWithMediaType(`application/json`) {
-            evaluationInstances.get(instanceId).map { i =>
-              complete(i.evaluatorResultsJSON)
-            } getOrElse {
-              complete(StatusCodes.NotFound)
-            }
-          }
-        }
-      } ~
-      cors {
-        path("local_evaluator_results.json") {
-          get {
-            respondWithMediaType(`application/json`) {
-              evaluationInstances.get(instanceId).map { i =>
-                complete(i.evaluatorResultsJSON)
-              } getOrElse {
-                complete(StatusCodes.NotFound)
-              }
-            }
-          }
-        }
-      }
-    } ~
-    pathPrefix("assets") {
-      getFromResourceDirectory("assets")
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/export/EventsToFile.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/export/EventsToFile.scala b/tools/src/main/scala/io/prediction/tools/export/EventsToFile.scala
deleted file mode 100644
index 743d57a..0000000
--- a/tools/src/main/scala/io/prediction/tools/export/EventsToFile.scala
+++ /dev/null
@@ -1,104 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools.export
-
-import io.prediction.controller.Utils
-import io.prediction.data.storage.EventJson4sSupport
-import io.prediction.data.storage.Storage
-import io.prediction.tools.Runner
-import io.prediction.workflow.WorkflowContext
-import io.prediction.workflow.WorkflowUtils
-
-import grizzled.slf4j.Logging
-import org.apache.spark.sql.SQLContext
-import org.json4s.native.Serialization._
-
-case class EventsToFileArgs(
-  env: String = "",
-  logFile: String = "",
-  appId: Int = 0,
-  channel: Option[String] = None,
-  outputPath: String = "",
-  format: String = "parquet",
-  verbose: Boolean = false,
-  debug: Boolean = false)
-
-object EventsToFile extends Logging {
-  def main(args: Array[String]): Unit = {
-    val parser = new scopt.OptionParser[EventsToFileArgs]("EventsToFile") {
-      opt[String]("env") action { (x, c) =>
-        c.copy(env = x)
-      }
-      opt[String]("log-file") action { (x, c) =>
-        c.copy(logFile = x)
-      }
-      opt[Int]("appid") action { (x, c) =>
-        c.copy(appId = x)
-      }
-      opt[String]("channel") action { (x, c) =>
-        c.copy(channel = Some(x))
-      }
-      opt[String]("format") action { (x, c) =>
-        c.copy(format = x)
-      }
-      opt[String]("output") action { (x, c) =>
-        c.copy(outputPath = x)
-      }
-      opt[Unit]("verbose") action { (x, c) =>
-        c.copy(verbose = true)
-      }
-      opt[Unit]("debug") action { (x, c) =>
-        c.copy(debug = true)
-      }
-    }
-    parser.parse(args, EventsToFileArgs()) map { args =>
-      // get channelId
-      val channels = Storage.getMetaDataChannels
-      val channelMap = channels.getByAppid(args.appId).map(c => (c.name, c.id)).toMap
-
-      val channelId: Option[Int] = args.channel.map { ch =>
-        if (!channelMap.contains(ch)) {
-          error(s"Channel ${ch} doesn't exist in this app.")
-          sys.exit(1)
-        }
-
-        channelMap(ch)
-      }
-
-      val channelStr = args.channel.map(n => " Channel " + n).getOrElse("")
-
-      WorkflowUtils.modifyLogging(verbose = args.verbose)
-      @transient lazy implicit val formats = Utils.json4sDefaultFormats +
-        new EventJson4sSupport.APISerializer
-      val sc = WorkflowContext(
-        mode = "Export",
-        batch = "App ID " + args.appId + channelStr,
-        executorEnv = Runner.envStringToMap(args.env))
-      val sqlContext = new SQLContext(sc)
-      val events = Storage.getPEvents()
-      val eventsRdd = events.find(appId = args.appId, channelId = channelId)(sc)
-      val jsonStringRdd = eventsRdd.map(write(_))
-      if (args.format == "json") {
-        jsonStringRdd.saveAsTextFile(args.outputPath)
-      } else {
-        val jsonRdd = sqlContext.jsonRDD(jsonStringRdd)
-        jsonRdd.saveAsParquetFile(args.outputPath)
-      }
-      info(s"Events are exported to ${args.outputPath}/.")
-      info("Done.")
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/imprt/FileToEvents.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/imprt/FileToEvents.scala b/tools/src/main/scala/io/prediction/tools/imprt/FileToEvents.scala
deleted file mode 100644
index 9a19a33..0000000
--- a/tools/src/main/scala/io/prediction/tools/imprt/FileToEvents.scala
+++ /dev/null
@@ -1,103 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools.imprt
-
-import io.prediction.controller.Utils
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.EventJson4sSupport
-import io.prediction.data.storage.Storage
-import io.prediction.tools.Runner
-import io.prediction.workflow.WorkflowContext
-import io.prediction.workflow.WorkflowUtils
-
-import grizzled.slf4j.Logging
-import org.json4s.native.Serialization._
-
-import scala.util.{Failure, Try}
-
-case class FileToEventsArgs(
-  env: String = "",
-  logFile: String = "",
-  appId: Int = 0,
-  channel: Option[String] = None,
-  inputPath: String = "",
-  verbose: Boolean = false,
-  debug: Boolean = false)
-
-object FileToEvents extends Logging {
-  def main(args: Array[String]): Unit = {
-    val parser = new scopt.OptionParser[FileToEventsArgs]("FileToEvents") {
-      opt[String]("env") action { (x, c) =>
-        c.copy(env = x)
-      }
-      opt[String]("log-file") action { (x, c) =>
-        c.copy(logFile = x)
-      }
-      opt[Int]("appid") action { (x, c) =>
-        c.copy(appId = x)
-      }
-      opt[String]("channel") action { (x, c) =>
-        c.copy(channel = Some(x))
-      }
-      opt[String]("input") action { (x, c) =>
-        c.copy(inputPath = x)
-      }
-      opt[Unit]("verbose") action { (x, c) =>
-        c.copy(verbose = true)
-      }
-      opt[Unit]("debug") action { (x, c) =>
-        c.copy(debug = true)
-      }
-    }
-    parser.parse(args, FileToEventsArgs()) map { args =>
-      // get channelId
-      val channels = Storage.getMetaDataChannels
-      val channelMap = channels.getByAppid(args.appId).map(c => (c.name, c.id)).toMap
-
-      val channelId: Option[Int] = args.channel.map { ch =>
-        if (!channelMap.contains(ch)) {
-          error(s"Channel ${ch} doesn't exist in this app.")
-          sys.exit(1)
-        }
-
-        channelMap(ch)
-      }
-
-      val channelStr = args.channel.map(n => " Channel " + n).getOrElse("")
-
-      WorkflowUtils.modifyLogging(verbose = args.verbose)
-      @transient lazy implicit val formats = Utils.json4sDefaultFormats +
-        new EventJson4sSupport.APISerializer
-      val sc = WorkflowContext(
-        mode = "Import",
-        batch = "App ID " + args.appId + channelStr,
-        executorEnv = Runner.envStringToMap(args.env))
-      val rdd = sc.textFile(args.inputPath).filter(_.trim.nonEmpty).map { json =>
-        Try(read[Event](json)).recoverWith {
-          case e: Throwable =>
-            error(s"\nmalformed json => $json")
-            Failure(e)
-        }.get
-      }
-      val events = Storage.getPEvents()
-      events.write(events = rdd,
-        appId = args.appId,
-        channelId = channelId)(sc)
-      info("Events are imported.")
-      info("Done.")
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/RegisterEngine.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/RegisterEngine.scala b/tools/src/main/scala/org/apache/predictionio/tools/RegisterEngine.scala
new file mode 100644
index 0000000..1640d55
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/RegisterEngine.scala
@@ -0,0 +1,84 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools
+
+import java.io.File
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.EngineManifest
+import org.apache.predictionio.data.storage.EngineManifestSerializer
+import org.apache.predictionio.data.storage.Storage
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.FileSystem
+import org.apache.hadoop.fs.Path
+import org.json4s._
+import org.json4s.native.Serialization.read
+
+import scala.io.Source
+
+object RegisterEngine extends Logging {
+  val engineManifests = Storage.getMetaDataEngineManifests
+  implicit val formats = DefaultFormats + new EngineManifestSerializer
+
+  def registerEngine(
+      jsonManifest: File,
+      engineFiles: Seq[File],
+      copyLocal: Boolean = false): Unit = {
+    val jsonString = try {
+      Source.fromFile(jsonManifest).mkString
+    } catch {
+      case e: java.io.FileNotFoundException =>
+        error(s"Engine manifest file not found: ${e.getMessage}. Aborting.")
+        sys.exit(1)
+    }
+    val engineManifest = read[EngineManifest](jsonString)
+
+    info(s"Registering engine ${engineManifest.id} ${engineManifest.version}")
+    engineManifests.update(
+      engineManifest.copy(files = engineFiles.map(_.toURI.toString)), true)
+  }
+
+  def unregisterEngine(jsonManifest: File): Unit = {
+    val jsonString = try {
+      Source.fromFile(jsonManifest).mkString
+    } catch {
+      case e: java.io.FileNotFoundException =>
+        error(s"Engine manifest file not found: ${e.getMessage}. Aborting.")
+        sys.exit(1)
+    }
+    val fileEngineManifest = read[EngineManifest](jsonString)
+    val engineManifest = engineManifests.get(
+      fileEngineManifest.id,
+      fileEngineManifest.version)
+
+    engineManifest map { em =>
+      val conf = new Configuration
+      val fs = FileSystem.get(conf)
+
+      em.files foreach { f =>
+        val path = new Path(f)
+        info(s"Removing ${f}")
+        fs.delete(path, false)
+      }
+
+      engineManifests.delete(em.id, em.version)
+      info(s"Unregistered engine ${em.id} ${em.version}")
+    } getOrElse {
+      error(s"${fileEngineManifest.id} ${fileEngineManifest.version} is not " +
+        "registered.")
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/RunServer.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/RunServer.scala b/tools/src/main/scala/org/apache/predictionio/tools/RunServer.scala
new file mode 100644
index 0000000..5dae46b
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/RunServer.scala
@@ -0,0 +1,178 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools
+
+import java.io.File
+import java.net.URI
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.EngineManifest
+import org.apache.predictionio.tools.console.ConsoleArgs
+import org.apache.predictionio.workflow.WorkflowUtils
+
+import scala.sys.process._
+
+object RunServer extends Logging {
+  def runServer(
+      ca: ConsoleArgs,
+      core: File,
+      em: EngineManifest,
+      engineInstanceId: String): Int = {
+    val pioEnvVars = sys.env.filter(kv => kv._1.startsWith("PIO_")).map(kv =>
+      s"${kv._1}=${kv._2}"
+    ).mkString(",")
+
+    val sparkHome = ca.common.sparkHome.getOrElse(
+      sys.env.getOrElse("SPARK_HOME", "."))
+
+    val extraFiles = WorkflowUtils.thirdPartyConfFiles
+
+    val driverClassPathIndex =
+      ca.common.sparkPassThrough.indexOf("--driver-class-path")
+    val driverClassPathPrefix =
+      if (driverClassPathIndex != -1) {
+        Seq(ca.common.sparkPassThrough(driverClassPathIndex + 1))
+      } else {
+        Seq()
+      }
+    val extraClasspaths =
+      driverClassPathPrefix ++ WorkflowUtils.thirdPartyClasspaths
+
+    val deployModeIndex =
+      ca.common.sparkPassThrough.indexOf("--deploy-mode")
+    val deployMode = if (deployModeIndex != -1) {
+      ca.common.sparkPassThrough(deployModeIndex + 1)
+    } else {
+      "client"
+    }
+
+    val mainJar =
+      if (ca.build.uberJar) {
+        if (deployMode == "cluster") {
+          em.files.filter(_.startsWith("hdfs")).head
+        } else {
+          em.files.filterNot(_.startsWith("hdfs")).head
+        }
+      } else {
+        if (deployMode == "cluster") {
+          em.files.filter(_.contains("pio-assembly")).head
+        } else {
+          core.getCanonicalPath
+        }
+      }
+
+    val jarFiles = (em.files ++ Option(new File(ca.common.pioHome.get, "plugins")
+      .listFiles()).getOrElse(Array.empty[File]).map(_.getAbsolutePath)).mkString(",")
+
+    val sparkSubmit =
+      Seq(Seq(sparkHome, "bin", "spark-submit").mkString(File.separator)) ++
+      ca.common.sparkPassThrough ++
+      Seq(
+        "--class",
+        "org.apache.predictionio.workflow.CreateServer",
+        "--name",
+        s"PredictionIO Engine Instance: ${engineInstanceId}") ++
+      (if (!ca.build.uberJar) {
+        Seq("--jars", jarFiles)
+      } else Seq()) ++
+      (if (extraFiles.size > 0) {
+        Seq("--files", extraFiles.mkString(","))
+      } else {
+        Seq()
+      }) ++
+      (if (extraClasspaths.size > 0) {
+        Seq("--driver-class-path", extraClasspaths.mkString(":"))
+      } else {
+        Seq()
+      }) ++
+      (if (ca.common.sparkKryo) {
+        Seq(
+          "--conf",
+          "spark.serializer=org.apache.spark.serializer.KryoSerializer")
+      } else {
+        Seq()
+      }) ++
+      Seq(
+        mainJar,
+        "--engineInstanceId",
+        engineInstanceId,
+        "--ip",
+        ca.deploy.ip,
+        "--port",
+        ca.deploy.port.toString,
+        "--event-server-ip",
+        ca.eventServer.ip,
+        "--event-server-port",
+        ca.eventServer.port.toString) ++
+      (if (ca.accessKey.accessKey != "") {
+        Seq("--accesskey", ca.accessKey.accessKey)
+      } else {
+        Seq()
+      }) ++
+      (if (ca.eventServer.enabled) Seq("--feedback") else Seq()) ++
+      (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Seq()) ++
+      (if (ca.common.verbose) Seq("--verbose") else Seq()) ++
+      ca.deploy.logUrl.map(x => Seq("--log-url", x)).getOrElse(Seq()) ++
+      ca.deploy.logPrefix.map(x => Seq("--log-prefix", x)).getOrElse(Seq()) ++
+      Seq("--json-extractor", ca.common.jsonExtractor.toString)
+
+    info(s"Submission command: ${sparkSubmit.mkString(" ")}")
+
+    val proc =
+      Process(sparkSubmit, None, "CLASSPATH" -> "", "SPARK_YARN_USER_ENV" -> pioEnvVars).run()
+    Runtime.getRuntime.addShutdownHook(new Thread(new Runnable {
+      def run(): Unit = {
+        proc.destroy()
+      }
+    }))
+    proc.exitValue()
+  }
+
+  def newRunServer(
+    ca: ConsoleArgs,
+    em: EngineManifest,
+    engineInstanceId: String): Int = {
+    val jarFiles = em.files.map(new URI(_)) ++
+      Option(new File(ca.common.pioHome.get, "plugins").listFiles())
+        .getOrElse(Array.empty[File]).map(_.toURI)
+    val args = Seq(
+      "--engineInstanceId",
+      engineInstanceId,
+      "--engine-variant",
+      ca.common.variantJson.toURI.toString,
+      "--ip",
+      ca.deploy.ip,
+      "--port",
+      ca.deploy.port.toString,
+      "--event-server-ip",
+      ca.eventServer.ip,
+      "--event-server-port",
+      ca.eventServer.port.toString) ++
+      (if (ca.accessKey.accessKey != "") {
+        Seq("--accesskey", ca.accessKey.accessKey)
+      } else {
+        Nil
+      }) ++
+      (if (ca.eventServer.enabled) Seq("--feedback") else Nil) ++
+      (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Nil) ++
+      (if (ca.common.verbose) Seq("--verbose") else Nil) ++
+      ca.deploy.logUrl.map(x => Seq("--log-url", x)).getOrElse(Nil) ++
+      ca.deploy.logPrefix.map(x => Seq("--log-prefix", x)).getOrElse(Nil) ++
+      Seq("--json-extractor", ca.common.jsonExtractor.toString)
+
+    Runner.runOnSpark("org.apache.predictionio.workflow.CreateServer", args, ca, jarFiles)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/RunWorkflow.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/RunWorkflow.scala b/tools/src/main/scala/org/apache/predictionio/tools/RunWorkflow.scala
new file mode 100644
index 0000000..4b42f40
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/RunWorkflow.scala
@@ -0,0 +1,212 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools
+
+import java.io.File
+import java.net.URI
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.EngineManifest
+import org.apache.predictionio.tools.console.ConsoleArgs
+import org.apache.predictionio.workflow.WorkflowUtils
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.FileSystem
+import org.apache.hadoop.fs.Path
+
+import scala.sys.process._
+
+object RunWorkflow extends Logging {
+  def runWorkflow(
+      ca: ConsoleArgs,
+      core: File,
+      em: EngineManifest,
+      variantJson: File): Int = {
+    // Collect and serialize PIO_* environmental variables
+    val pioEnvVars = sys.env.filter(kv => kv._1.startsWith("PIO_")).map(kv =>
+      s"${kv._1}=${kv._2}"
+    ).mkString(",")
+
+    val sparkHome = ca.common.sparkHome.getOrElse(
+      sys.env.getOrElse("SPARK_HOME", "."))
+
+    val hadoopConf = new Configuration
+    val hdfs = FileSystem.get(hadoopConf)
+
+    val driverClassPathIndex =
+      ca.common.sparkPassThrough.indexOf("--driver-class-path")
+    val driverClassPathPrefix =
+      if (driverClassPathIndex != -1) {
+        Seq(ca.common.sparkPassThrough(driverClassPathIndex + 1))
+      } else {
+        Seq()
+      }
+    val extraClasspaths =
+      driverClassPathPrefix ++ WorkflowUtils.thirdPartyClasspaths
+
+    val deployModeIndex =
+      ca.common.sparkPassThrough.indexOf("--deploy-mode")
+    val deployMode = if (deployModeIndex != -1) {
+      ca.common.sparkPassThrough(deployModeIndex + 1)
+    } else {
+      "client"
+    }
+
+    val extraFiles = WorkflowUtils.thirdPartyConfFiles
+
+    val mainJar =
+      if (ca.build.uberJar) {
+        if (deployMode == "cluster") {
+          em.files.filter(_.startsWith("hdfs")).head
+        } else {
+          em.files.filterNot(_.startsWith("hdfs")).head
+        }
+      } else {
+        if (deployMode == "cluster") {
+          em.files.filter(_.contains("pio-assembly")).head
+        } else {
+          core.getCanonicalPath
+        }
+      }
+
+    val workMode =
+      ca.common.evaluation.map(_ => "Evaluation").getOrElse("Training")
+
+    val engineLocation = Seq(
+      sys.env("PIO_FS_ENGINESDIR"),
+      em.id,
+      em.version)
+
+    if (deployMode == "cluster") {
+      val dstPath = new Path(engineLocation.mkString(Path.SEPARATOR))
+      info("Cluster deploy mode detected. Trying to copy " +
+        s"${variantJson.getCanonicalPath} to " +
+        s"${hdfs.makeQualified(dstPath).toString}.")
+      hdfs.copyFromLocalFile(new Path(variantJson.toURI), dstPath)
+    }
+
+    val sparkSubmit =
+      Seq(Seq(sparkHome, "bin", "spark-submit").mkString(File.separator)) ++
+      ca.common.sparkPassThrough ++
+      Seq(
+        "--class",
+        "org.apache.predictionio.workflow.CreateWorkflow",
+        "--name",
+        s"PredictionIO $workMode: ${em.id} ${em.version} (${ca.common.batch})") ++
+      (if (!ca.build.uberJar) {
+        Seq("--jars", em.files.mkString(","))
+      } else Seq()) ++
+      (if (extraFiles.size > 0) {
+        Seq("--files", extraFiles.mkString(","))
+      } else {
+        Seq()
+      }) ++
+      (if (extraClasspaths.size > 0) {
+        Seq("--driver-class-path", extraClasspaths.mkString(":"))
+      } else {
+        Seq()
+      }) ++
+      (if (ca.common.sparkKryo) {
+        Seq(
+          "--conf",
+          "spark.serializer=org.apache.spark.serializer.KryoSerializer")
+      } else {
+        Seq()
+      }) ++
+      Seq(
+        mainJar,
+        "--env",
+        pioEnvVars,
+        "--engine-id",
+        em.id,
+        "--engine-version",
+        em.version,
+        "--engine-variant",
+        if (deployMode == "cluster") {
+          hdfs.makeQualified(new Path(
+            (engineLocation :+ variantJson.getName).mkString(Path.SEPARATOR))).
+            toString
+        } else {
+          variantJson.getCanonicalPath
+        },
+        "--verbosity",
+        ca.common.verbosity.toString) ++
+      ca.common.engineFactory.map(
+        x => Seq("--engine-factory", x)).getOrElse(Seq()) ++
+      ca.common.engineParamsKey.map(
+        x => Seq("--engine-params-key", x)).getOrElse(Seq()) ++
+      (if (deployMode == "cluster") Seq("--deploy-mode", "cluster") else Seq()) ++
+      (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Seq()) ++
+      (if (ca.common.verbose) Seq("--verbose") else Seq()) ++
+      (if (ca.common.skipSanityCheck) Seq("--skip-sanity-check") else Seq()) ++
+      (if (ca.common.stopAfterRead) Seq("--stop-after-read") else Seq()) ++
+      (if (ca.common.stopAfterPrepare) {
+        Seq("--stop-after-prepare")
+      } else {
+        Seq()
+      }) ++
+      ca.common.evaluation.map(x => Seq("--evaluation-class", x)).
+        getOrElse(Seq()) ++
+      // If engineParamsGenerator is specified, it overrides the evaluation.
+      ca.common.engineParamsGenerator.orElse(ca.common.evaluation)
+        .map(x => Seq("--engine-params-generator-class", x))
+        .getOrElse(Seq()) ++
+      (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Seq()) ++
+      Seq("--json-extractor", ca.common.jsonExtractor.toString)
+
+    info(s"Submission command: ${sparkSubmit.mkString(" ")}")
+    Process(sparkSubmit, None, "CLASSPATH" -> "", "SPARK_YARN_USER_ENV" -> pioEnvVars).!
+  }
+
+  def newRunWorkflow(ca: ConsoleArgs, em: EngineManifest): Int = {
+    val jarFiles = em.files.map(new URI(_))
+    val args = Seq(
+      "--engine-id",
+      em.id,
+      "--engine-version",
+      em.version,
+      "--engine-variant",
+      ca.common.variantJson.toURI.toString,
+      "--verbosity",
+      ca.common.verbosity.toString) ++
+      ca.common.engineFactory.map(
+        x => Seq("--engine-factory", x)).getOrElse(Seq()) ++
+      ca.common.engineParamsKey.map(
+        x => Seq("--engine-params-key", x)).getOrElse(Seq()) ++
+      (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Seq()) ++
+      (if (ca.common.verbose) Seq("--verbose") else Seq()) ++
+      (if (ca.common.skipSanityCheck) Seq("--skip-sanity-check") else Seq()) ++
+      (if (ca.common.stopAfterRead) Seq("--stop-after-read") else Seq()) ++
+      (if (ca.common.stopAfterPrepare) {
+        Seq("--stop-after-prepare")
+      } else {
+        Seq()
+      }) ++
+      ca.common.evaluation.map(x => Seq("--evaluation-class", x)).
+        getOrElse(Seq()) ++
+      // If engineParamsGenerator is specified, it overrides the evaluation.
+      ca.common.engineParamsGenerator.orElse(ca.common.evaluation)
+        .map(x => Seq("--engine-params-generator-class", x))
+        .getOrElse(Seq()) ++
+      (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Seq()) ++
+      Seq("--json-extractor", ca.common.jsonExtractor.toString)
+
+    Runner.runOnSpark(
+      "org.apache.predictionio.workflow.CreateWorkflow",
+      args,
+      ca,
+      jarFiles)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala b/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala
new file mode 100644
index 0000000..3a8fed5
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/Runner.scala
@@ -0,0 +1,211 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools
+
+import java.io.File
+import java.net.URI
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.tools.console.ConsoleArgs
+import org.apache.predictionio.workflow.WorkflowUtils
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.FileSystem
+import org.apache.hadoop.fs.Path
+
+import scala.sys.process._
+
+object Runner extends Logging {
+  def envStringToMap(env: String): Map[String, String] =
+    env.split(',').flatMap(p =>
+      p.split('=') match {
+        case Array(k, v) => List(k -> v)
+        case _ => Nil
+      }
+    ).toMap
+
+  def argumentValue(arguments: Seq[String], argumentName: String): Option[String] = {
+    val argumentIndex = arguments.indexOf(argumentName)
+    try {
+      arguments(argumentIndex) // just to make it error out if index is -1
+      Some(arguments(argumentIndex + 1))
+    } catch {
+      case e: IndexOutOfBoundsException => None
+    }
+  }
+
+  def handleScratchFile(
+      fileSystem: Option[FileSystem],
+      uri: Option[URI],
+      localFile: File): String = {
+    val localFilePath = localFile.getCanonicalPath
+    (fileSystem, uri) match {
+      case (Some(fs), Some(u)) =>
+        val dest = fs.makeQualified(Path.mergePaths(
+          new Path(u),
+          new Path(localFilePath)))
+        info(s"Copying $localFile to ${dest.toString}")
+        fs.copyFromLocalFile(new Path(localFilePath), dest)
+        dest.toUri.toString
+      case _ => localFile.toURI.toString
+    }
+  }
+
+  def cleanup(fs: Option[FileSystem], uri: Option[URI]): Unit = {
+    (fs, uri) match {
+      case (Some(f), Some(u)) =>
+        f.close()
+      case _ => Unit
+    }
+  }
+
+  def detectFilePaths(
+      fileSystem: Option[FileSystem],
+      uri: Option[URI],
+      args: Seq[String]): Seq[String] = {
+    args map { arg =>
+      val f = try {
+        new File(new URI(arg))
+      } catch {
+        case e: Throwable => new File(arg)
+      }
+      if (f.exists()) {
+        handleScratchFile(fileSystem, uri, f)
+      } else {
+        arg
+      }
+    }
+  }
+
+  def runOnSpark(
+      className: String,
+      classArgs: Seq[String],
+      ca: ConsoleArgs,
+      extraJars: Seq[URI]): Int = {
+    // Return error for unsupported cases
+    val deployMode =
+      argumentValue(ca.common.sparkPassThrough, "--deploy-mode").getOrElse("client")
+    val master =
+      argumentValue(ca.common.sparkPassThrough, "--master").getOrElse("local")
+
+    (ca.common.scratchUri, deployMode, master) match {
+      case (Some(u), "client", m) if m != "yarn-cluster" =>
+        error("--scratch-uri cannot be set when deploy mode is client")
+        return 1
+      case (_, "cluster", m) if m.startsWith("spark://") =>
+        error("Using cluster deploy mode with Spark standalone cluster is not supported")
+        return 1
+      case _ => Unit
+    }
+
+    // Initialize HDFS API for scratch URI
+    val fs = ca.common.scratchUri map { uri =>
+      FileSystem.get(uri, new Configuration())
+    }
+
+    // Collect and serialize PIO_* environmental variables
+    val pioEnvVars = sys.env.filter(kv => kv._1.startsWith("PIO_")).map(kv =>
+      s"${kv._1}=${kv._2}"
+    ).mkString(",")
+
+    // Location of Spark
+    val sparkHome = ca.common.sparkHome.getOrElse(
+      sys.env.getOrElse("SPARK_HOME", "."))
+
+    // Local path to PredictionIO assembly JAR
+    val mainJar = handleScratchFile(
+      fs,
+      ca.common.scratchUri,
+      console.Console.coreAssembly(ca.common.pioHome.get))
+
+    // Extra JARs that are needed by the driver
+    val driverClassPathPrefix =
+      argumentValue(ca.common.sparkPassThrough, "--driver-class-path") map { v =>
+        Seq(v)
+      } getOrElse {
+        Nil
+      }
+
+    val extraClasspaths =
+      driverClassPathPrefix ++ WorkflowUtils.thirdPartyClasspaths
+
+    // Extra files that are needed to be passed to --files
+    val extraFiles = WorkflowUtils.thirdPartyConfFiles map { f =>
+      handleScratchFile(fs, ca.common.scratchUri, new File(f))
+    }
+
+    val deployedJars = extraJars map { j =>
+      handleScratchFile(fs, ca.common.scratchUri, new File(j))
+    }
+
+    val sparkSubmitCommand =
+      Seq(Seq(sparkHome, "bin", "spark-submit").mkString(File.separator))
+
+    val sparkSubmitJars = if (extraJars.nonEmpty) {
+      Seq("--jars", deployedJars.map(_.toString).mkString(","))
+    } else {
+      Nil
+    }
+
+    val sparkSubmitFiles = if (extraFiles.nonEmpty) {
+      Seq("--files", extraFiles.mkString(","))
+    } else {
+      Nil
+    }
+
+    val sparkSubmitExtraClasspaths = if (extraClasspaths.nonEmpty) {
+      Seq("--driver-class-path", extraClasspaths.mkString(":"))
+    } else {
+      Nil
+    }
+
+    val sparkSubmitKryo = if (ca.common.sparkKryo) {
+      Seq(
+        "--conf",
+        "spark.serializer=org.apache.spark.serializer.KryoSerializer")
+    } else {
+      Nil
+    }
+
+    val verbose = if (ca.common.verbose) Seq("--verbose") else Nil
+
+    val sparkSubmit = Seq(
+      sparkSubmitCommand,
+      ca.common.sparkPassThrough,
+      Seq("--class", className),
+      sparkSubmitJars,
+      sparkSubmitFiles,
+      sparkSubmitExtraClasspaths,
+      sparkSubmitKryo,
+      Seq(mainJar),
+      detectFilePaths(fs, ca.common.scratchUri, classArgs),
+      Seq("--env", pioEnvVars),
+      verbose).flatten.filter(_ != "")
+    info(s"Submission command: ${sparkSubmit.mkString(" ")}")
+    val proc = Process(
+      sparkSubmit,
+      None,
+      "CLASSPATH" -> "",
+      "SPARK_YARN_USER_ENV" -> pioEnvVars).run()
+    Runtime.getRuntime.addShutdownHook(new Thread(new Runnable {
+      def run(): Unit = {
+        cleanup(fs, ca.common.scratchUri)
+        proc.destroy()
+      }
+    }))
+    cleanup(fs, ca.common.scratchUri)
+    proc.exitValue()
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/admin/AdminAPI.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/admin/AdminAPI.scala b/tools/src/main/scala/org/apache/predictionio/tools/admin/AdminAPI.scala
new file mode 100644
index 0000000..b70cb7e
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/admin/AdminAPI.scala
@@ -0,0 +1,156 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools.admin
+
+import akka.actor.{Actor, ActorSystem, Props}
+import akka.event.Logging
+import akka.io.IO
+import akka.util.Timeout
+import org.apache.predictionio.data.api.StartServer
+import org.apache.predictionio.data.storage.Storage
+import org.json4s.{Formats, DefaultFormats}
+
+import java.util.concurrent.TimeUnit
+
+import spray.can.Http
+import spray.http.{MediaTypes, StatusCodes}
+import spray.httpx.Json4sSupport
+import spray.routing._
+
+import scala.concurrent.ExecutionContext
+
+class AdminServiceActor(val commandClient: CommandClient)
+  extends HttpServiceActor {
+
+  object Json4sProtocol extends Json4sSupport {
+    implicit def json4sFormats: Formats = DefaultFormats
+  }
+
+  import Json4sProtocol._
+
+  val log = Logging(context.system, this)
+
+  // we use the enclosing ActorContext's or ActorSystem's dispatcher for our
+  // Futures
+  implicit def executionContext: ExecutionContext = actorRefFactory.dispatcher
+  implicit val timeout: Timeout = Timeout(5, TimeUnit.SECONDS)
+
+  // for better message response
+  val rejectionHandler = RejectionHandler {
+    case MalformedRequestContentRejection(msg, _) :: _ =>
+      complete(StatusCodes.BadRequest, Map("message" -> msg))
+    case MissingQueryParamRejection(msg) :: _ =>
+      complete(StatusCodes.NotFound,
+        Map("message" -> s"missing required query parameter ${msg}."))
+    case AuthenticationFailedRejection(cause, challengeHeaders) :: _ =>
+      complete(StatusCodes.Unauthorized, challengeHeaders,
+        Map("message" -> s"Invalid accessKey."))
+  }
+
+  val jsonPath = """(.+)\.json$""".r
+
+  val route: Route =
+    pathSingleSlash {
+      get {
+        respondWithMediaType(MediaTypes.`application/json`) {
+          complete(Map("status" -> "alive"))
+        }
+      }
+    } ~
+      path("cmd" / "app" / Segment / "data") {
+        appName => {
+          delete {
+            respondWithMediaType(MediaTypes.`application/json`) {
+              complete(commandClient.futureAppDataDelete(appName))
+            }
+          }
+        }
+      } ~
+      path("cmd" / "app" / Segment) {
+        appName => {
+          delete {
+            respondWithMediaType(MediaTypes.`application/json`) {
+              complete(commandClient.futureAppDelete(appName))
+            }
+          }
+        }
+      } ~
+      path("cmd" / "app") {
+        get {
+          respondWithMediaType(MediaTypes.`application/json`) {
+            complete(commandClient.futureAppList())
+          }
+        } ~
+          post {
+            entity(as[AppRequest]) {
+              appArgs => respondWithMediaType(MediaTypes.`application/json`) {
+                complete(commandClient.futureAppNew(appArgs))
+              }
+            }
+          }
+      }
+  def receive: Actor.Receive = runRoute(route)
+}
+
+class AdminServerActor(val commandClient: CommandClient) extends Actor {
+  val log = Logging(context.system, this)
+  val child = context.actorOf(
+    Props(classOf[AdminServiceActor], commandClient),
+    "AdminServiceActor")
+
+  implicit val system = context.system
+
+  def receive: PartialFunction[Any, Unit] = {
+    case StartServer(host, portNum) => {
+      IO(Http) ! Http.Bind(child, interface = host, port = portNum)
+
+    }
+    case m: Http.Bound => log.info("Bound received. AdminServer is ready.")
+    case m: Http.CommandFailed => log.error("Command failed.")
+    case _ => log.error("Unknown message.")
+  }
+}
+
+case class AdminServerConfig(
+  ip: String = "localhost",
+  port: Int = 7071
+)
+
+object AdminServer {
+  def createAdminServer(config: AdminServerConfig): Unit = {
+    implicit val system = ActorSystem("AdminServerSystem")
+
+    val commandClient = new CommandClient(
+      appClient = Storage.getMetaDataApps,
+      accessKeyClient = Storage.getMetaDataAccessKeys,
+      eventClient = Storage.getLEvents()
+    )
+
+    val serverActor = system.actorOf(
+      Props(classOf[AdminServerActor], commandClient),
+      "AdminServerActor")
+    serverActor ! StartServer(config.ip, config.port)
+    system.awaitTermination
+  }
+}
+
+object AdminRun {
+  def main (args: Array[String]) {
+    AdminServer.createAdminServer(AdminServerConfig(
+      ip = "localhost",
+      port = 7071))
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/admin/CommandClient.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/admin/CommandClient.scala b/tools/src/main/scala/org/apache/predictionio/tools/admin/CommandClient.scala
new file mode 100644
index 0000000..143023e
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/admin/CommandClient.scala
@@ -0,0 +1,160 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools.admin
+
+import org.apache.predictionio.data.storage._
+
+import scala.concurrent.{ExecutionContext, Future}
+
+abstract class BaseResponse()
+
+case class GeneralResponse(
+  status: Int = 0,
+  message: String = ""
+) extends BaseResponse()
+
+case class AppRequest(
+  id: Int = 0,
+  name: String = "",
+  description: String = ""
+)
+
+case class TrainRequest(
+  enginePath: String = ""
+)
+case class AppResponse(
+  id: Int = 0,
+  name: String = "",
+  keys: Seq[AccessKey]
+) extends BaseResponse()
+
+case class AppNewResponse(
+  status: Int = 0,
+  message: String = "",
+  id: Int = 0,
+  name: String = "",
+  key: String
+) extends BaseResponse()
+
+case class AppListResponse(
+  status: Int = 0,
+  message: String = "",
+  apps: Seq[AppResponse]
+) extends BaseResponse()
+
+class CommandClient(
+  val appClient: Apps,
+  val accessKeyClient: AccessKeys,
+  val eventClient: LEvents
+) {
+
+  def futureAppNew(req: AppRequest)(implicit ec: ExecutionContext): Future[BaseResponse] = Future {
+    val response = appClient.getByName(req.name) map { app =>
+      GeneralResponse(0, s"App ${req.name} already exists. Aborting.")
+    } getOrElse {
+      appClient.get(req.id) map {
+        app2 =>
+          GeneralResponse(0,
+              s"App ID ${app2.id} already exists and maps to the app '${app2.name}'. " +
+              "Aborting.")
+      } getOrElse {
+        val appid = appClient.insert(App(
+          id = Option(req.id).getOrElse(0),
+          name = req.name,
+          description = Option(req.description)))
+        appid map { id =>
+          val dbInit = eventClient.init(id)
+          val r = if (dbInit) {
+            val accessKey = AccessKey(
+              key = "",
+              appid = id,
+              events = Seq())
+            val accessKey2 = accessKeyClient.insert(AccessKey(
+              key = "",
+              appid = id,
+              events = Seq()))
+            accessKey2 map { k =>
+              new AppNewResponse(1,"App created successfully.",id, req.name, k)
+            } getOrElse {
+              GeneralResponse(0, s"Unable to create new access key.")
+            }
+          } else {
+            GeneralResponse(0, s"Unable to initialize Event Store for this app ID: ${id}.")
+          }
+          r
+        } getOrElse {
+          GeneralResponse(0, s"Unable to create new app.")
+        }
+      }
+    }
+    response
+  }
+
+  def futureAppList()(implicit ec: ExecutionContext): Future[AppListResponse] = Future {
+    val apps = appClient.getAll().sortBy(_.name)
+    val appsRes = apps.map {
+      app => {
+        new AppResponse(app.id, app.name, accessKeyClient.getByAppid(app.id))
+      }
+    }
+    new AppListResponse(1, "Successful retrieved app list.", appsRes)
+  }
+
+  def futureAppDataDelete(appName: String)
+      (implicit ec: ExecutionContext): Future[GeneralResponse] = Future {
+    val response = appClient.getByName(appName) map { app =>
+      val data = if (eventClient.remove(app.id)) {
+        GeneralResponse(1, s"Removed Event Store for this app ID: ${app.id}")
+      } else {
+        GeneralResponse(0, s"Error removing Event Store for this app.")
+      }
+
+      val dbInit = eventClient.init(app.id)
+      val data2 = if (dbInit) {
+        GeneralResponse(1, s"Initialized Event Store for this app ID: ${app.id}.")
+      } else {
+        GeneralResponse(0, s"Unable to initialize Event Store for this appId:" +
+          s" ${app.id}.")
+      }
+      GeneralResponse(data.status * data2.status, data.message + data2.message)
+    } getOrElse {
+      GeneralResponse(0, s"App ${appName} does not exist.")
+    }
+    response
+  }
+
+  def futureAppDelete(appName: String)
+      (implicit ec: ExecutionContext): Future[GeneralResponse] = Future {
+
+    val response = appClient.getByName(appName) map { app =>
+      val data = if (eventClient.remove(app.id)) {
+        Storage.getMetaDataApps.delete(app.id)
+        GeneralResponse(1, s"App successfully deleted")
+      } else {
+        GeneralResponse(0, s"Error removing Event Store for app ${app.name}.");
+      }
+      data
+    } getOrElse {
+      GeneralResponse(0, s"App ${appName} does not exist.")
+    }
+    response
+  }
+
+  def futureTrain(req: TrainRequest)
+      (implicit ec: ExecutionContext): Future[GeneralResponse] = Future {
+    null
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/admin/README.md
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/admin/README.md b/tools/src/main/scala/org/apache/predictionio/tools/admin/README.md
new file mode 100644
index 0000000..475a3de
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/admin/README.md
@@ -0,0 +1,161 @@
+## Admin API (under development)
+
+### Start Admin HTTP Server without bin/pio (for development)
+
+NOTE: elasticsearch and hbase should be running first.
+
+```
+$ sbt/sbt "tools/compile"
+$ set -a
+$ source conf/pio-env.sh
+$ set +a
+$ sbt/sbt "tools/run-main io.prediction.tools.admin.AdminRun"
+```
+
+### Unit test (Very minimal)
+
+```
+$ set -a
+$ source conf/pio-env.sh
+$ set +a
+$ sbt/sbt "tools/test-only io.prediction.tools.admin.AdminAPISpec"
+```
+
+### Start with pio command adminserver
+
+```
+$ pio adminserver
+```
+
+Admin Server url defaults to `http://localhost:7071`
+
+The host and port can be specified by using the 'ip' and 'port' parameters
+
+```
+$ pio adminserver --ip 127.0.0.1 --port 7080
+```
+
+### Current Supported Commands
+
+#### Check status
+
+```
+$ curl -i http://localhost:7071/
+
+{"status":"alive"}
+```
+
+#### Get list of apps
+
+```
+$ curl -i -X GET http://localhost:7071/cmd/app
+
+{"status":1,"message":"Successful retrieved app list.","apps":[{"id":12,"name":"scratch","keys":[{"key":"gtPgVMIr3uthus1QJWFBcIjNf6d1SNuhaOWQAgdLbOBP1eRWMNIJWl6SkHgI1OoN","appid":12,"events":[]}]},{"id":17,"name":"test-ecommercerec","keys":[{"key":"zPkr6sBwQoBwBjVHK2hsF9u26L38ARSe19QzkdYentuomCtYSuH0vXP5fq7advo4","appid":17,"events":[]}]}]}
+```
+
+#### Create a new app
+
+```
+$ curl -i -X POST http://localhost:7071/cmd/app \
+-H "Content-Type: application/json" \
+-d '{ "name" : "my_new_app" }'
+
+{"status":1,"message":"App created successfully.","id":19,"name":"my_new_app","keys":[{"key":"","appid":19,"events":[]}]}
+```
+
+#### Delete data of app
+
+```
+$ curl -i -X DELETE http://localhost:7071/cmd/app/my_new_app/data
+```
+
+#### Delete app
+
+```
+$ curl -i -X DELETE http://localhost:7071/cmd/app/my_new_app
+
+{"status":1,"message":"App successfully deleted"}
+```
+
+
+## API Doc (To be updated)
+
+### app list:
+GET http://localhost:7071/cmd/app
+
+OK Response:
+{
+  \u201cstatus\u201d: <STATUS>,
+  \u201cmessage\u201d: <MESSAGE>,
+  \u201capps\u201d : [
+    { \u201cname': \u201c<APP_NAME>\u201d,
+      \u201cid': <APP_ID>,
+      \u201caccessKey' : \u201c<ACCESS_KEY>\u201d },
+    { \u201cname': \u201c<APP_NAME>\u201d,
+      \u201cid': <APP_ID>,
+      \u201caccessKey' : \u201c<ACCESS_KEY>\u201d }, ... ]
+}
+
+Error Response:
+{\u201cstatus\u201d: <STATUS>, \u201cmessage\u201d : \u201c<MESSAGE>\u201d}
+
+### app new
+POST http://localhost:7071/cmd/app
+Request Body:
+{ name\u201d: \u201c<APP_NAME>\u201d, // required
+  \u201cid\u201d: <APP_ID>, // optional
+  \u201cdescription\u201d: \u201c<DESCRIPTION>\u201d } // optional
+
+OK Response:
+{ \u201cstatus\u201d: <STATUS>,
+  \u201cmessage\u201d: <MESSAGE>,
+  \u201capp\u201d : {
+    \u201cname\u201d: \u201c<APP_NAME>\u201d,
+    \u201cid\u201d: <APP_ID>,
+    \u201caccessKey\u201d : \u201c<ACCESS_KEY>\u201d }
+}
+
+Error Response:
+{ \u201cstatus\u201d: <STATUS>, \u201cmessage\u201d : \u201c<MESSAGE>\u201d}
+
+### app delete
+DELETE http://localhost:7071/cmd/app/{appName}
+
+OK Response:
+{ "status": <STATUS>, "message" : \u201c<MESSAGE>\u201d}
+
+Error Response:
+{ \u201cstatus\u201d: <STATUS>, \u201cmessage\u201d : \u201c<MESSAGE>\u201d}
+
+### app data-delete
+DELETE http://localhost:7071/cmd/app/{appName}/data
+
+OK Response:
+{ "status": <STATUS>, "message" : \u201c<MESSAGE>\u201d}
+
+Error Response:
+{ \u201cstatus\u201d: <STATUS>, \u201cmessage\u201d : \u201c<MESSAGE>\u201d }
+
+
+### train TBD
+
+#### Training request:
+POST http://localhost:7071/cmd/train
+Request body: TBD
+
+OK Response: TBD
+
+Error Response: TBD
+
+#### Get training status:
+GET http://localhost:7071/cmd/train/{engineInstanceId}
+
+OK Response: TBD
+INIT
+TRAINING
+DONE
+ERROR
+
+Error Response: TBD
+
+### deploy TBD

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/console/AccessKey.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/console/AccessKey.scala b/tools/src/main/scala/org/apache/predictionio/tools/console/AccessKey.scala
new file mode 100644
index 0000000..a6ab83c
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/console/AccessKey.scala
@@ -0,0 +1,83 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools.console
+
+import org.apache.predictionio.data.storage
+
+import grizzled.slf4j.Logging
+
+case class AccessKeyArgs(
+  accessKey: String = "",
+  events: Seq[String] = Seq())
+
+object AccessKey extends Logging {
+  def create(ca: ConsoleArgs): Int = {
+    val apps = storage.Storage.getMetaDataApps
+    apps.getByName(ca.app.name) map { app =>
+      val accessKeys = storage.Storage.getMetaDataAccessKeys
+      val accessKey = accessKeys.insert(storage.AccessKey(
+        key = ca.accessKey.accessKey,
+        appid = app.id,
+        events = ca.accessKey.events))
+      accessKey map { k =>
+        info(s"Created new access key: ${k}")
+        0
+      } getOrElse {
+        error(s"Unable to create new access key.")
+        1
+      }
+    } getOrElse {
+      error(s"App ${ca.app.name} does not exist. Aborting.")
+      1
+    }
+  }
+
+  def list(ca: ConsoleArgs): Int = {
+    val keys =
+      if (ca.app.name == "") {
+        storage.Storage.getMetaDataAccessKeys.getAll
+      } else {
+        val apps = storage.Storage.getMetaDataApps
+        apps.getByName(ca.app.name) map { app =>
+          storage.Storage.getMetaDataAccessKeys.getByAppid(app.id)
+        } getOrElse {
+          error(s"App ${ca.app.name} does not exist. Aborting.")
+          return 1
+        }
+      }
+    val title = "Access Key(s)"
+    info(f"$title%64s | App ID | Allowed Event(s)")
+    keys.sortBy(k => k.appid) foreach { k =>
+      val events =
+        if (k.events.size > 0) k.events.sorted.mkString(",") else "(all)"
+      info(f"${k.key}%64s | ${k.appid}%6d | $events%s")
+    }
+    info(s"Finished listing ${keys.size} access key(s).")
+    0
+  }
+
+  def delete(ca: ConsoleArgs): Int = {
+    try {
+      storage.Storage.getMetaDataAccessKeys.delete(ca.accessKey.accessKey)
+      info(s"Deleted access key ${ca.accessKey.accessKey}.")
+      0
+    } catch {
+      case e: Exception =>
+        error(s"Error deleting access key ${ca.accessKey.accessKey}.", e)
+        1
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/org/apache/predictionio/tools/console/App.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/console/App.scala b/tools/src/main/scala/org/apache/predictionio/tools/console/App.scala
new file mode 100644
index 0000000..cc2f36d
--- /dev/null
+++ b/tools/src/main/scala/org/apache/predictionio/tools/console/App.scala
@@ -0,0 +1,537 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.tools.console
+
+import org.apache.predictionio.data.storage
+
+import grizzled.slf4j.Logging
+
+case class AppArgs(
+  id: Option[Int] = None,
+  name: String = "",
+  channel: String = "",
+  dataDeleteChannel: Option[String] = None,
+  all: Boolean = false,
+  force: Boolean = false,
+  description: Option[String] = None)
+
+object App extends Logging {
+  def create(ca: ConsoleArgs): Int = {
+    val apps = storage.Storage.getMetaDataApps()
+    // get the client in the beginning so error exit right away if can't access client
+    val events = storage.Storage.getLEvents()
+    apps.getByName(ca.app.name) map { app =>
+      error(s"App ${ca.app.name} already exists. Aborting.")
+      1
+    } getOrElse {
+      ca.app.id.map { id =>
+        apps.get(id) map { app =>
+          error(
+            s"App ID ${id} already exists and maps to the app '${app.name}'. " +
+            "Aborting.")
+          return 1
+        }
+      }
+      val appid = apps.insert(storage.App(
+        id = ca.app.id.getOrElse(0),
+        name = ca.app.name,
+        description = ca.app.description))
+      appid map { id =>
+        val dbInit = events.init(id)
+        val r = if (dbInit) {
+          info(s"Initialized Event Store for this app ID: ${id}.")
+          val accessKeys = storage.Storage.getMetaDataAccessKeys
+          val accessKey = accessKeys.insert(storage.AccessKey(
+            key = ca.accessKey.accessKey,
+            appid = id,
+            events = Seq()))
+          accessKey map { k =>
+            info("Created new app:")
+            info(s"      Name: ${ca.app.name}")
+            info(s"        ID: ${id}")
+            info(s"Access Key: ${k}")
+            0
+          } getOrElse {
+            error(s"Unable to create new access key.")
+            1
+          }
+        } else {
+          error(s"Unable to initialize Event Store for this app ID: ${id}.")
+          // revert back the meta data change
+          try {
+            apps.delete(id)
+            0
+          } catch {
+            case e: Exception =>
+              error(s"Failed to revert back the App meta-data change.", e)
+              error(s"The app ${ca.app.name} CANNOT be used!")
+              error(s"Please run 'pio app delete ${ca.app.name}' " +
+                "to delete this app!")
+              1
+          }
+        }
+        events.close()
+        r
+      } getOrElse {
+        error(s"Unable to create new app.")
+        1
+      }
+    }
+  }
+
+  def list(ca: ConsoleArgs): Int = {
+    val apps = storage.Storage.getMetaDataApps.getAll().sortBy(_.name)
+    val accessKeys = storage.Storage.getMetaDataAccessKeys
+    val title = "Name"
+    val ak = "Access Key"
+    info(f"$title%20s |   ID | $ak%64s | Allowed Event(s)")
+    apps foreach { app =>
+      val keys = accessKeys.getByAppid(app.id)
+      keys foreach { k =>
+        val events =
+          if (k.events.size > 0) k.events.sorted.mkString(",") else "(all)"
+        info(f"${app.name}%20s | ${app.id}%4d | ${k.key}%64s | $events%s")
+      }
+    }
+    info(s"Finished listing ${apps.size} app(s).")
+    0
+  }
+
+  def show(ca: ConsoleArgs): Int = {
+    val apps = storage.Storage.getMetaDataApps
+    val accessKeys = storage.Storage.getMetaDataAccessKeys
+    val channels = storage.Storage.getMetaDataChannels
+    apps.getByName(ca.app.name) map { app =>
+      info(s"    App Name: ${app.name}")
+      info(s"      App ID: ${app.id}")
+      info(s" Description: ${app.description.getOrElse("")}")
+      val keys = accessKeys.getByAppid(app.id)
+
+      var firstKey = true
+      keys foreach { k =>
+        val events =
+          if (k.events.size > 0) k.events.sorted.mkString(",") else "(all)"
+        if (firstKey) {
+          info(f"  Access Key: ${k.key}%s | ${events}%s")
+          firstKey = false
+        } else {
+          info(f"              ${k.key}%s | ${events}%s")
+        }
+      }
+
+      val chans = channels.getByAppid(app.id)
+      var firstChan = true
+      val titleName = "Channel Name"
+      val titleID = "Channel ID"
+      chans.foreach { ch =>
+        if (firstChan) {
+          info(f"    Channels: ${titleName}%16s | ${titleID}%10s ")
+          firstChan = false
+        }
+        info(f"              ${ch.name}%16s | ${ch.id}%10s")
+      }
+      0
+    } getOrElse {
+      error(s"App ${ca.app.name} does not exist. Aborting.")
+      1
+    }
+  }
+
+  def delete(ca: ConsoleArgs): Int = {
+    val apps = storage.Storage.getMetaDataApps
+    val accesskeys = storage.Storage.getMetaDataAccessKeys
+    val channels = storage.Storage.getMetaDataChannels
+    val events = storage.Storage.getLEvents()
+    val status = apps.getByName(ca.app.name) map { app =>
+      info(s"The following app (including all channels) will be deleted. Are you sure?")
+      info(s"    App Name: ${app.name}")
+      info(s"      App ID: ${app.id}")
+      info(s" Description: ${app.description.getOrElse("")}")
+      val chans = channels.getByAppid(app.id)
+      var firstChan = true
+      val titleName = "Channel Name"
+      val titleID = "Channel ID"
+      chans.foreach { ch =>
+        if (firstChan) {
+          info(f"    Channels: ${titleName}%16s | ${titleID}%10s ")
+          firstChan = false
+        }
+        info(f"              ${ch.name}%16s | ${ch.id}%10s")
+      }
+
+      val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
+      choice match {
+        case "YES" => {
+          // delete channels
+          val delChannelStatus: Seq[Int] = chans.map { ch =>
+            if (events.remove(app.id, Some(ch.id))) {
+              info(s"Removed Event Store of the channel ID: ${ch.id}")
+              try {
+                channels.delete(ch.id)
+                info(s"Deleted channel ${ch.name}")
+                0
+              } catch {
+                case e: Exception =>
+                  error(s"Error deleting channel ${ch.name}.", e)
+                  1
+              }
+            } else {
+              error(s"Error removing Event Store of the channel ID: ${ch.id}.")
+              return 1
+            }
+          }
+
+          if (delChannelStatus.exists(_ != 0)) {
+            error("Error occurred while deleting channels. Aborting.")
+            return 1
+          }
+
+          try {
+            events.remove(app.id)
+            info(s"Removed Event Store for this app ID: ${app.id}")
+          } catch {
+            case e: Exception =>
+              error(s"Error removing Event Store for this app. Aborting.", e)
+              return 1
+          }
+
+          accesskeys.getByAppid(app.id) foreach { key =>
+            try {
+              accesskeys.delete(key.key)
+              info(s"Removed access key ${key.key}")
+            } catch {
+              case e: Exception =>
+                error(s"Error removing access key ${key.key}. Aborting.", e)
+                return 1
+            }
+          }
+
+          try {
+            apps.delete(app.id)
+            info(s"Deleted app ${app.name}.")
+          } catch {
+            case e: Exception =>
+              error(s"Error deleting app ${app.name}. Aborting.", e)
+              return 1
+          }
+
+          info("Done.")
+          0
+        }
+        case _ =>
+          info("Aborted.")
+          0
+      }
+    } getOrElse {
+      error(s"App ${ca.app.name} does not exist. Aborting.")
+      1
+    }
+    events.close()
+    status
+  }
+
+  def dataDelete(ca: ConsoleArgs): Int = {
+    if (ca.app.all) {
+      dataDeleteAll(ca)
+    } else {
+      dataDeleteOne(ca)
+    }
+  }
+
+  def dataDeleteOne(ca: ConsoleArgs): Int = {
+    val apps = storage.Storage.getMetaDataApps
+    val channels = storage.Storage.getMetaDataChannels
+    apps.getByName(ca.app.name) map { app =>
+
+      val channelId = ca.app.dataDeleteChannel.map { ch =>
+        val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap
+        if (!channelMap.contains(ch)) {
+          error(s"Unable to delete data for channel.")
+          error(s"Channel ${ch} doesn't exist.")
+          return 1
+        }
+
+        channelMap(ch)
+      }
+
+      if (channelId.isDefined) {
+        info(s"Data of the following channel will be deleted. Are you sure?")
+        info(s"Channel Name: ${ca.app.dataDeleteChannel.get}")
+        info(s"  Channel ID: ${channelId.get}")
+        info(s"    App Name: ${app.name}")
+        info(s"      App ID: ${app.id}")
+        info(s" Description: ${app.description}")
+      } else {
+        info(s"Data of the following app (default channel only) will be deleted. Are you sure?")
+        info(s"    App Name: ${app.name}")
+        info(s"      App ID: ${app.id}")
+        info(s" Description: ${app.description}")
+      }
+
+      val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
+
+      choice match {
+        case "YES" => {
+          val events = storage.Storage.getLEvents()
+          // remove table
+          val r1 = if (events.remove(app.id, channelId)) {
+            if (channelId.isDefined) {
+              info(s"Removed Event Store for this channel ID: ${channelId.get}")
+            } else {
+              info(s"Removed Event Store for this app ID: ${app.id}")
+            }
+            0
+          } else {
+            if (channelId.isDefined) {
+              error(s"Error removing Event Store for this channel.")
+            } else {
+              error(s"Error removing Event Store for this app.")
+            }
+            1
+          }
+          // re-create table
+          val dbInit = events.init(app.id, channelId)
+          val r2 = if (dbInit) {
+            if (channelId.isDefined) {
+              info(s"Initialized Event Store for this channel ID: ${channelId.get}.")
+            } else {
+              info(s"Initialized Event Store for this app ID: ${app.id}.")
+            }
+            0
+          } else {
+            if (channelId.isDefined) {
+              error(s"Unable to initialize Event Store for this channel ID:" +
+                s" ${channelId.get}.")
+            } else {
+              error(s"Unable to initialize Event Store for this appId:" +
+                s" ${app.id}.")
+            }
+            1
+          }
+          events.close()
+          info("Done.")
+          r1 + r2
+        }
+        case _ =>
+          info("Aborted.")
+          0
+      }
+    } getOrElse {
+      error(s"App ${ca.app.name} does not exist. Aborting.")
+      1
+    }
+  }
+
+  def dataDeleteAll(ca: ConsoleArgs): Int = {
+    val apps = storage.Storage.getMetaDataApps
+    val channels = storage.Storage.getMetaDataChannels
+    val events = storage.Storage.getLEvents()
+    val status = apps.getByName(ca.app.name) map { app =>
+      info(s"All data of the app (including default and all channels) will be deleted." +
+        " Are you sure?")
+      info(s"    App Name: ${app.name}")
+      info(s"      App ID: ${app.id}")
+      info(s" Description: ${app.description}")
+      val chans = channels.getByAppid(app.id)
+      var firstChan = true
+      val titleName = "Channel Name"
+      val titleID = "Channel ID"
+      chans.foreach { ch =>
+        if (firstChan) {
+          info(f"    Channels: ${titleName}%16s | ${titleID}%10s ")
+          firstChan = false
+        }
+        info(f"              ${ch.name}%16s | ${ch.id}%10s")
+      }
+
+      val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
+      choice match {
+        case "YES" => {
+          // delete channels
+          val delChannelStatus: Seq[Int] = chans.map { ch =>
+            val r1 = if (events.remove(app.id, Some(ch.id))) {
+              info(s"Removed Event Store of the channel ID: ${ch.id}")
+              0
+            } else {
+              error(s"Error removing Event Store of the channel ID: ${ch.id}.")
+              1
+            }
+            // re-create table
+            val dbInit = events.init(app.id, Some(ch.id))
+            val r2 = if (dbInit) {
+              info(s"Initialized Event Store of the channel ID: ${ch.id}")
+              0
+            } else {
+              error(s"Unable to initialize Event Store of the channel ID: ${ch.id}.")
+              1
+            }
+            r1 + r2
+          }
+
+          if (delChannelStatus.filter(_ != 0).isEmpty) {
+            val r1 = if (events.remove(app.id)) {
+              info(s"Removed Event Store for this app ID: ${app.id}")
+              0
+            } else {
+              error(s"Error removing Event Store for this app.")
+              1
+            }
+
+            val dbInit = events.init(app.id)
+            val r2 = if (dbInit) {
+              info(s"Initialized Event Store for this app ID: ${app.id}.")
+              0
+            } else {
+              error(s"Unable to initialize Event Store for this appId: ${app.id}.")
+              1
+            }
+            info("Done.")
+            r1 + r2
+          } else 1
+        }
+        case _ =>
+          info("Aborted.")
+          0
+      }
+    } getOrElse {
+      error(s"App ${ca.app.name} does not exist. Aborting.")
+      1
+    }
+    events.close()
+    status
+  }
+
+  def channelNew(ca: ConsoleArgs): Int = {
+    val apps = storage.Storage.getMetaDataApps
+    val channels = storage.Storage.getMetaDataChannels
+    val events = storage.Storage.getLEvents()
+    val newChannel = ca.app.channel
+    val status = apps.getByName(ca.app.name) map { app =>
+      val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap
+      if (channelMap.contains(newChannel)) {
+        error(s"Unable to create new channel.")
+        error(s"Channel ${newChannel} already exists.")
+        1
+      } else if (!storage.Channel.isValidName(newChannel)) {
+        error(s"Unable to create new channel.")
+        error(s"The channel name ${newChannel} is invalid.")
+        error(s"${storage.Channel.nameConstraint}")
+        1
+      } else {
+
+        val channelId = channels.insert(storage.Channel(
+          id = 0, // new id will be assigned
+          appid = app.id,
+          name = newChannel
+        ))
+        channelId.map { chanId =>
+          info(s"Updated Channel meta-data.")
+          // initialize storage
+          val dbInit = events.init(app.id, Some(chanId))
+          if (dbInit) {
+            info(s"Initialized Event Store for the channel: ${newChannel}.")
+            info(s"Created new channel:")
+            info(s"    Channel Name: ${newChannel}")
+            info(s"      Channel ID: ${chanId}")
+            info(s"          App ID: ${app.id}")
+            0
+          } else {
+            error(s"Unable to create new channel.")
+            error(s"Failed to initalize Event Store.")
+            // reverted back the meta data
+            try {
+              channels.delete(chanId)
+              0
+            } catch {
+              case e: Exception =>
+                error(s"Failed to revert back the Channel meta-data change.", e)
+                error(s"The channel ${newChannel} CANNOT be used!")
+                error(s"Please run 'pio app channel-delete ${app.name} ${newChannel}' " +
+                  "to delete this channel!")
+                1
+            }
+          }
+        }.getOrElse {
+          error(s"Unable to create new channel.")
+          error(s"Failed to update Channel meta-data.")
+          1
+        }
+      }
+    } getOrElse {
+      error(s"App ${ca.app.name} does not exist. Aborting.")
+      1
+    }
+    events.close()
+    status
+  }
+
+  def channelDelete(ca: ConsoleArgs): Int = {
+    val apps = storage.Storage.getMetaDataApps
+    val channels = storage.Storage.getMetaDataChannels
+    val events = storage.Storage.getLEvents()
+    val deleteChannel = ca.app.channel
+    val status = apps.getByName(ca.app.name) map { app =>
+      val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap
+      if (!channelMap.contains(deleteChannel)) {
+        error(s"Unable to delete channel.")
+        error(s"Channel ${deleteChannel} doesn't exist.")
+        1
+      } else {
+        info(s"The following channel will be deleted. Are you sure?")
+        info(s"    Channel Name: ${deleteChannel}")
+        info(s"      Channel ID: ${channelMap(deleteChannel)}")
+        info(s"        App Name: ${app.name}")
+        info(s"          App ID: ${app.id}")
+        val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
+        choice match {
+          case "YES" => {
+            // NOTE: remove storage first before remove meta data (in case remove storage failed)
+            val dbRemoved = events.remove(app.id, Some(channelMap(deleteChannel)))
+            if (dbRemoved) {
+              info(s"Removed Event Store for this channel: ${deleteChannel}")
+              try {
+                channels.delete(channelMap(deleteChannel))
+                info(s"Deleted channel: ${deleteChannel}.")
+                0
+              } catch {
+                case e: Exception =>
+                  error(s"Unable to delete channel.", e)
+                  error(s"Failed to update Channel meta-data.")
+                  error(s"The channel ${deleteChannel} CANNOT be used!")
+                  error(s"Please run 'pio app channel-delete ${app.name} ${deleteChannel}' " +
+                    "to delete this channel again!")
+                  1
+              }
+            } else {
+              error(s"Unable to delete channel.")
+              error(s"Error removing Event Store for this channel.")
+              1
+            }
+          }
+          case _ =>
+            info("Aborted.")
+            0
+        }
+      }
+    } getOrElse {
+      error(s"App ${ca.app.name} does not exist. Aborting.")
+      1
+    }
+    events.close()
+    status
+  }
+
+}


[10/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESChannels.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESChannels.scala b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESChannels.scala
new file mode 100644
index 0000000..cad72e2
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESChannels.scala
@@ -0,0 +1,114 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.elasticsearch
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.Channel
+import org.apache.predictionio.data.storage.Channels
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.elasticsearch.ElasticsearchException
+import org.elasticsearch.client.Client
+import org.elasticsearch.index.query.FilterBuilders.termFilter
+import org.json4s.DefaultFormats
+import org.json4s.JsonDSL._
+import org.json4s.native.JsonMethods._
+import org.json4s.native.Serialization.read
+import org.json4s.native.Serialization.write
+
+class ESChannels(client: Client, config: StorageClientConfig, index: String)
+    extends Channels with Logging {
+
+  implicit val formats = DefaultFormats.lossless
+  private val estype = "channels"
+  private val seq = new ESSequences(client, config, index)
+  private val seqName = "channels"
+
+  val indices = client.admin.indices
+  val indexExistResponse = indices.prepareExists(index).get
+  if (!indexExistResponse.isExists) {
+    indices.prepareCreate(index).get
+  }
+  val typeExistResponse = indices.prepareTypesExists(index).setTypes(estype).get
+  if (!typeExistResponse.isExists) {
+    val json =
+      (estype ->
+        ("properties" ->
+          ("name" -> ("type" -> "string") ~ ("index" -> "not_analyzed"))))
+    indices.preparePutMapping(index).setType(estype).
+      setSource(compact(render(json))).get
+  }
+
+  def insert(channel: Channel): Option[Int] = {
+    val id =
+      if (channel.id == 0) {
+        var roll = seq.genNext(seqName)
+        while (!get(roll).isEmpty) roll = seq.genNext(seqName)
+        roll
+      } else channel.id
+
+    val realChannel = channel.copy(id = id)
+    if (update(realChannel)) Some(id) else None
+  }
+
+  def get(id: Int): Option[Channel] = {
+    try {
+      val response = client.prepareGet(
+        index,
+        estype,
+        id.toString).get()
+      Some(read[Channel](response.getSourceAsString))
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        None
+      case e: NullPointerException => None
+    }
+  }
+
+  def getByAppid(appid: Int): Seq[Channel] = {
+    try {
+      val builder = client.prepareSearch(index).setTypes(estype).
+        setPostFilter(termFilter("appid", appid))
+      ESUtils.getAll[Channel](client, builder)
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        Seq[Channel]()
+    }
+  }
+
+  def update(channel: Channel): Boolean = {
+    try {
+      val response = client.prepareIndex(index, estype, channel.id.toString).
+        setSource(write(channel)).get()
+      true
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        false
+    }
+  }
+
+  def delete(id: Int): Unit = {
+    try {
+      client.prepareDelete(index, estype, id.toString).get
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESEngineInstances.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESEngineInstances.scala b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESEngineInstances.scala
new file mode 100644
index 0000000..367e66f
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESEngineInstances.scala
@@ -0,0 +1,155 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.elasticsearch
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.EngineInstance
+import org.apache.predictionio.data.storage.EngineInstanceSerializer
+import org.apache.predictionio.data.storage.EngineInstances
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.elasticsearch.ElasticsearchException
+import org.elasticsearch.client.Client
+import org.elasticsearch.index.query.FilterBuilders._
+import org.elasticsearch.search.sort.SortOrder
+import org.json4s.JsonDSL._
+import org.json4s._
+import org.json4s.native.JsonMethods._
+import org.json4s.native.Serialization.read
+import org.json4s.native.Serialization.write
+
+class ESEngineInstances(client: Client, config: StorageClientConfig, index: String)
+  extends EngineInstances with Logging {
+  implicit val formats = DefaultFormats + new EngineInstanceSerializer
+  private val estype = "engine_instances"
+
+  val indices = client.admin.indices
+  val indexExistResponse = indices.prepareExists(index).get
+  if (!indexExistResponse.isExists) {
+    indices.prepareCreate(index).get
+  }
+  val typeExistResponse = indices.prepareTypesExists(index).setTypes(estype).get
+  if (!typeExistResponse.isExists) {
+    val json =
+      (estype ->
+        ("properties" ->
+          ("status" -> ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("startTime" -> ("type" -> "date")) ~
+          ("endTime" -> ("type" -> "date")) ~
+          ("engineId" -> ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("engineVersion" ->
+            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("engineVariant" ->
+            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("engineFactory" ->
+            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("batch" ->
+            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("dataSourceParams" ->
+            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("preparatorParams" ->
+            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("algorithmsParams" ->
+            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("servingParams" ->
+            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("status" -> ("type" -> "string") ~ ("index" -> "not_analyzed"))))
+    indices.preparePutMapping(index).setType(estype).
+      setSource(compact(render(json))).get
+  }
+
+  def insert(i: EngineInstance): String = {
+    try {
+      val response = client.prepareIndex(index, estype).
+        setSource(write(i)).get
+      response.getId
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        ""
+    }
+  }
+
+  def get(id: String): Option[EngineInstance] = {
+    try {
+      val response = client.prepareGet(index, estype, id).get
+      if (response.isExists) {
+        Some(read[EngineInstance](response.getSourceAsString))
+      } else {
+        None
+      }
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        None
+    }
+  }
+
+  def getAll(): Seq[EngineInstance] = {
+    try {
+      val builder = client.prepareSearch(index).setTypes(estype)
+      ESUtils.getAll[EngineInstance](client, builder)
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        Seq()
+    }
+  }
+
+  def getCompleted(
+      engineId: String,
+      engineVersion: String,
+      engineVariant: String): Seq[EngineInstance] = {
+    try {
+      val builder = client.prepareSearch(index).setTypes(estype).setPostFilter(
+        andFilter(
+          termFilter("status", "COMPLETED"),
+          termFilter("engineId", engineId),
+          termFilter("engineVersion", engineVersion),
+          termFilter("engineVariant", engineVariant))).
+        addSort("startTime", SortOrder.DESC)
+      ESUtils.getAll[EngineInstance](client, builder)
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        Seq()
+    }
+  }
+
+  def getLatestCompleted(
+      engineId: String,
+      engineVersion: String,
+      engineVariant: String): Option[EngineInstance] =
+    getCompleted(
+      engineId,
+      engineVersion,
+      engineVariant).headOption
+
+  def update(i: EngineInstance): Unit = {
+    try {
+      client.prepareUpdate(index, estype, i.id).setDoc(write(i)).get
+    } catch {
+      case e: ElasticsearchException => error(e.getMessage)
+    }
+  }
+
+  def delete(id: String): Unit = {
+    try {
+      val response = client.prepareDelete(index, estype, id).get
+    } catch {
+      case e: ElasticsearchException => error(e.getMessage)
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESEngineManifests.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESEngineManifests.scala b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESEngineManifests.scala
new file mode 100644
index 0000000..f357d44
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESEngineManifests.scala
@@ -0,0 +1,81 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.elasticsearch
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.EngineManifestSerializer
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.apache.predictionio.data.storage.EngineManifest
+import org.apache.predictionio.data.storage.EngineManifests
+import org.elasticsearch.ElasticsearchException
+import org.elasticsearch.client.Client
+import org.json4s._
+import org.json4s.native.Serialization.read
+import org.json4s.native.Serialization.write
+
+class ESEngineManifests(client: Client, config: StorageClientConfig, index: String)
+  extends EngineManifests with Logging {
+  implicit val formats = DefaultFormats + new EngineManifestSerializer
+  private val estype = "engine_manifests"
+  private def esid(id: String, version: String) = s"$id $version"
+
+  def insert(engineManifest: EngineManifest): Unit = {
+    val json = write(engineManifest)
+    val response = client.prepareIndex(
+      index,
+      estype,
+      esid(engineManifest.id, engineManifest.version)).
+      setSource(json).execute().actionGet()
+  }
+
+  def get(id: String, version: String): Option[EngineManifest] = {
+    try {
+      val response = client.prepareGet(index, estype, esid(id, version)).
+        execute().actionGet()
+      if (response.isExists) {
+        Some(read[EngineManifest](response.getSourceAsString))
+      } else {
+        None
+      }
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        None
+    }
+  }
+
+  def getAll(): Seq[EngineManifest] = {
+    try {
+      val builder = client.prepareSearch()
+      ESUtils.getAll[EngineManifest](client, builder)
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        Seq()
+    }
+  }
+
+  def update(engineManifest: EngineManifest, upsert: Boolean = false): Unit =
+    insert(engineManifest)
+
+  def delete(id: String, version: String): Unit = {
+    try {
+      client.prepareDelete(index, estype, esid(id, version)).execute().actionGet()
+    } catch {
+      case e: ElasticsearchException => error(e.getMessage)
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESEvaluationInstances.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESEvaluationInstances.scala b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESEvaluationInstances.scala
new file mode 100644
index 0000000..c78378f
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESEvaluationInstances.scala
@@ -0,0 +1,133 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.elasticsearch
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.EvaluationInstance
+import org.apache.predictionio.data.storage.EvaluationInstanceSerializer
+import org.apache.predictionio.data.storage.EvaluationInstances
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.elasticsearch.ElasticsearchException
+import org.elasticsearch.client.Client
+import org.elasticsearch.index.query.FilterBuilders._
+import org.elasticsearch.search.sort.SortOrder
+import org.json4s.JsonDSL._
+import org.json4s._
+import org.json4s.native.JsonMethods._
+import org.json4s.native.Serialization.read
+import org.json4s.native.Serialization.write
+
+class ESEvaluationInstances(client: Client, config: StorageClientConfig, index: String)
+  extends EvaluationInstances with Logging {
+  implicit val formats = DefaultFormats + new EvaluationInstanceSerializer
+  private val estype = "evaluation_instances"
+
+  val indices = client.admin.indices
+  val indexExistResponse = indices.prepareExists(index).get
+  if (!indexExistResponse.isExists) {
+    indices.prepareCreate(index).get
+  }
+  val typeExistResponse = indices.prepareTypesExists(index).setTypes(estype).get
+  if (!typeExistResponse.isExists) {
+    val json =
+      (estype ->
+        ("properties" ->
+          ("status" -> ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("startTime" -> ("type" -> "date")) ~
+          ("endTime" -> ("type" -> "date")) ~
+          ("evaluationClass" ->
+            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("engineParamsGeneratorClass" ->
+            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("batch" ->
+            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("evaluatorResults" ->
+            ("type" -> "string") ~ ("index" -> "no")) ~
+          ("evaluatorResultsHTML" ->
+            ("type" -> "string") ~ ("index" -> "no")) ~
+          ("evaluatorResultsJSON" ->
+            ("type" -> "string") ~ ("index" -> "no"))))
+    indices.preparePutMapping(index).setType(estype).
+      setSource(compact(render(json))).get
+  }
+
+  def insert(i: EvaluationInstance): String = {
+    try {
+      val response = client.prepareIndex(index, estype).
+        setSource(write(i)).get
+      response.getId
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        ""
+    }
+  }
+
+  def get(id: String): Option[EvaluationInstance] = {
+    try {
+      val response = client.prepareGet(index, estype, id).get
+      if (response.isExists) {
+        Some(read[EvaluationInstance](response.getSourceAsString))
+      } else {
+        None
+      }
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        None
+    }
+  }
+
+  def getAll(): Seq[EvaluationInstance] = {
+    try {
+      val builder = client.prepareSearch(index).setTypes(estype)
+      ESUtils.getAll[EvaluationInstance](client, builder)
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        Seq()
+    }
+  }
+
+  def getCompleted(): Seq[EvaluationInstance] = {
+    try {
+      val builder = client.prepareSearch(index).setTypes(estype).setPostFilter(
+        termFilter("status", "EVALCOMPLETED")).
+        addSort("startTime", SortOrder.DESC)
+      ESUtils.getAll[EvaluationInstance](client, builder)
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        Seq()
+    }
+  }
+
+  def update(i: EvaluationInstance): Unit = {
+    try {
+      client.prepareUpdate(index, estype, i.id).setDoc(write(i)).get
+    } catch {
+      case e: ElasticsearchException => error(e.getMessage)
+    }
+  }
+
+  def delete(id: String): Unit = {
+    try {
+      client.prepareDelete(index, estype, id).get
+    } catch {
+      case e: ElasticsearchException => error(e.getMessage)
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESSequences.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESSequences.scala b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESSequences.scala
new file mode 100644
index 0000000..78d43ac
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESSequences.scala
@@ -0,0 +1,61 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.elasticsearch
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.elasticsearch.ElasticsearchException
+import org.elasticsearch.client.Client
+import org.json4s.JsonDSL._
+import org.json4s._
+import org.json4s.native.JsonMethods._
+
+class ESSequences(client: Client, config: StorageClientConfig, index: String) extends Logging {
+  implicit val formats = DefaultFormats
+  private val estype = "sequences"
+
+  val indices = client.admin.indices
+  val indexExistResponse = indices.prepareExists(index).get
+  if (!indexExistResponse.isExists) {
+    // val settingsJson =
+    //   ("number_of_shards" -> 1) ~
+    //   ("auto_expand_replicas" -> "0-all")
+    indices.prepareCreate(index).get
+  }
+  val typeExistResponse = indices.prepareTypesExists(index).setTypes(estype).get
+  if (!typeExistResponse.isExists) {
+    val mappingJson =
+      (estype ->
+        ("_source" -> ("enabled" -> 0)) ~
+        ("_all" -> ("enabled" -> 0)) ~
+        ("_type" -> ("index" -> "no")) ~
+        ("enabled" -> 0))
+    indices.preparePutMapping(index).setType(estype).
+      setSource(compact(render(mappingJson))).get
+  }
+
+  def genNext(name: String): Int = {
+    try {
+      val response = client.prepareIndex(index, estype, name).
+        setSource(compact(render("n" -> name))).get
+      response.getVersion().toInt
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        0
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESUtils.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESUtils.scala b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESUtils.scala
new file mode 100644
index 0000000..8410458
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESUtils.scala
@@ -0,0 +1,45 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.elasticsearch
+
+import org.elasticsearch.action.search.SearchRequestBuilder
+import org.elasticsearch.client.Client
+import org.elasticsearch.common.unit.TimeValue
+import org.json4s.Formats
+import org.json4s.native.Serialization.read
+
+import scala.collection.mutable.ArrayBuffer
+
+object ESUtils {
+  val scrollLife = new TimeValue(60000)
+
+  def getAll[T : Manifest](
+      client: Client,
+      builder: SearchRequestBuilder)(
+      implicit formats: Formats): Seq[T] = {
+    val results = ArrayBuffer[T]()
+    var response = builder.setScroll(scrollLife).get
+    var hits = response.getHits().hits()
+    results ++= hits.map(h => read[T](h.getSourceAsString))
+    while (hits.size > 0) {
+      response = client.prepareSearchScroll(response.getScrollId).
+        setScroll(scrollLife).get
+      hits = response.getHits().hits()
+      results ++= hits.map(h => read[T](h.getSourceAsString))
+    }
+    results
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/StorageClient.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/StorageClient.scala b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/StorageClient.scala
new file mode 100644
index 0000000..8b57620
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/StorageClient.scala
@@ -0,0 +1,47 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.elasticsearch
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.BaseStorageClient
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.apache.predictionio.data.storage.StorageClientException
+import org.elasticsearch.client.transport.TransportClient
+import org.elasticsearch.common.settings.ImmutableSettings
+import org.elasticsearch.common.transport.InetSocketTransportAddress
+import org.elasticsearch.transport.ConnectTransportException
+
+class StorageClient(val config: StorageClientConfig) extends BaseStorageClient
+    with Logging {
+  override val prefix = "ES"
+  val client = try {
+    val hosts = config.properties.get("HOSTS").
+      map(_.split(",").toSeq).getOrElse(Seq("localhost"))
+    val ports = config.properties.get("PORTS").
+      map(_.split(",").toSeq.map(_.toInt)).getOrElse(Seq(9300))
+    val settings = ImmutableSettings.settingsBuilder()
+      .put("cluster.name", config.properties.getOrElse("CLUSTERNAME", "elasticsearch"))
+    val transportClient = new TransportClient(settings)
+    (hosts zip ports) foreach { hp =>
+      transportClient.addTransportAddress(
+        new InetSocketTransportAddress(hp._1, hp._2))
+    }
+    transportClient
+  } catch {
+    case e: ConnectTransportException =>
+      throw new StorageClientException(e.getMessage, e)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/package.scala b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/package.scala
new file mode 100644
index 0000000..404bdda
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/package.scala
@@ -0,0 +1,22 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+/** Elasticsearch implementation of storage traits, supporting meta data only
+  *
+  * @group Implementation
+  */
+package object elasticsearch {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/hbase/HBEventsUtil.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/hbase/HBEventsUtil.scala b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/HBEventsUtil.scala
new file mode 100644
index 0000000..47f86a1
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/HBEventsUtil.scala
@@ -0,0 +1,412 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.hbase
+
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.EventValidation
+import org.apache.predictionio.data.storage.DataMap
+
+import org.apache.hadoop.hbase.client.Result
+import org.apache.hadoop.hbase.client.Put
+import org.apache.hadoop.hbase.client.Scan
+import org.apache.hadoop.hbase.util.Bytes
+import org.apache.hadoop.hbase.filter.FilterList
+import org.apache.hadoop.hbase.filter.RegexStringComparator
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
+import org.apache.hadoop.hbase.filter.BinaryComparator
+import org.apache.hadoop.hbase.filter.QualifierFilter
+import org.apache.hadoop.hbase.filter.SkipFilter
+
+import org.json4s.DefaultFormats
+import org.json4s.JObject
+import org.json4s.native.Serialization.{ read, write }
+
+import org.joda.time.DateTime
+import org.joda.time.DateTimeZone
+
+import org.apache.commons.codec.binary.Base64
+import java.security.MessageDigest
+
+import java.util.UUID
+
+/* common utility function for accessing EventsStore in HBase */
+object HBEventsUtil {
+
+  implicit val formats = DefaultFormats
+
+  def tableName(namespace: String, appId: Int, channelId: Option[Int] = None): String = {
+    channelId.map { ch =>
+      s"${namespace}:events_${appId}_${ch}"
+    }.getOrElse {
+      s"${namespace}:events_${appId}"
+    }
+  }
+
+  // column names for "e" column family
+  val colNames: Map[String, Array[Byte]] = Map(
+    "event" -> "e",
+    "entityType" -> "ety",
+    "entityId" -> "eid",
+    "targetEntityType" -> "tety",
+    "targetEntityId" -> "teid",
+    "properties" -> "p",
+    "prId" -> "prid",
+    "eventTime" -> "et",
+    "eventTimeZone" -> "etz",
+    "creationTime" -> "ct",
+    "creationTimeZone" -> "ctz"
+  ).mapValues(Bytes.toBytes(_))
+
+  def hash(entityType: String, entityId: String): Array[Byte] = {
+    val s = entityType + "-" + entityId
+    // get a new MessageDigest object each time for thread-safe
+    val md5 = MessageDigest.getInstance("MD5")
+    md5.digest(Bytes.toBytes(s))
+  }
+
+  class RowKey(
+    val b: Array[Byte]
+  ) {
+    require((b.size == 32), s"Incorrect b size: ${b.size}")
+    lazy val entityHash: Array[Byte] = b.slice(0, 16)
+    lazy val millis: Long = Bytes.toLong(b.slice(16, 24))
+    lazy val uuidLow: Long = Bytes.toLong(b.slice(24, 32))
+
+    lazy val toBytes: Array[Byte] = b
+
+    override def toString: String = {
+      Base64.encodeBase64URLSafeString(toBytes)
+    }
+  }
+
+  object RowKey {
+    def apply(
+      entityType: String,
+      entityId: String,
+      millis: Long,
+      uuidLow: Long): RowKey = {
+        // add UUID least significant bits for multiple actions at the same time
+        // (UUID's most significant bits are actually timestamp,
+        // use eventTime instead).
+        val b = hash(entityType, entityId) ++
+          Bytes.toBytes(millis) ++ Bytes.toBytes(uuidLow)
+        new RowKey(b)
+      }
+
+    // get RowKey from string representation
+    def apply(s: String): RowKey = {
+      try {
+        apply(Base64.decodeBase64(s))
+      } catch {
+        case e: Exception => throw new RowKeyException(
+          s"Failed to convert String ${s} to RowKey because ${e}", e)
+      }
+    }
+
+    def apply(b: Array[Byte]): RowKey = {
+      if (b.size != 32) {
+        val bString = b.mkString(",")
+        throw new RowKeyException(
+          s"Incorrect byte array size. Bytes: ${bString}.")
+      }
+      new RowKey(b)
+    }
+
+  }
+
+  class RowKeyException(val msg: String, val cause: Exception)
+    extends Exception(msg, cause) {
+      def this(msg: String) = this(msg, null)
+    }
+
+  case class PartialRowKey(entityType: String, entityId: String,
+    millis: Option[Long] = None) {
+    val toBytes: Array[Byte] = {
+      hash(entityType, entityId) ++
+        (millis.map(Bytes.toBytes(_)).getOrElse(Array[Byte]()))
+    }
+  }
+
+  def eventToPut(event: Event, appId: Int): (Put, RowKey) = {
+    // generate new rowKey if eventId is None
+    val rowKey = event.eventId.map { id =>
+      RowKey(id) // create rowKey from eventId
+    }.getOrElse {
+      // TOOD: use real UUID. not pseudo random
+      val uuidLow: Long = UUID.randomUUID().getLeastSignificantBits
+      RowKey(
+        entityType = event.entityType,
+        entityId = event.entityId,
+        millis = event.eventTime.getMillis,
+        uuidLow = uuidLow
+      )
+    }
+
+    val eBytes = Bytes.toBytes("e")
+    // use eventTime as HBase's cell timestamp
+    val put = new Put(rowKey.toBytes, event.eventTime.getMillis)
+
+    def addStringToE(col: Array[Byte], v: String): Put = {
+      put.add(eBytes, col, Bytes.toBytes(v))
+    }
+
+    def addLongToE(col: Array[Byte], v: Long): Put = {
+      put.add(eBytes, col, Bytes.toBytes(v))
+    }
+
+    addStringToE(colNames("event"), event.event)
+    addStringToE(colNames("entityType"), event.entityType)
+    addStringToE(colNames("entityId"), event.entityId)
+
+    event.targetEntityType.foreach { targetEntityType =>
+      addStringToE(colNames("targetEntityType"), targetEntityType)
+    }
+
+    event.targetEntityId.foreach { targetEntityId =>
+      addStringToE(colNames("targetEntityId"), targetEntityId)
+    }
+
+    // TODO: make properties Option[]
+    if (!event.properties.isEmpty) {
+      addStringToE(colNames("properties"), write(event.properties.toJObject))
+    }
+
+    event.prId.foreach { prId =>
+      addStringToE(colNames("prId"), prId)
+    }
+
+    addLongToE(colNames("eventTime"), event.eventTime.getMillis)
+    val eventTimeZone = event.eventTime.getZone
+    if (!eventTimeZone.equals(EventValidation.defaultTimeZone)) {
+      addStringToE(colNames("eventTimeZone"), eventTimeZone.getID)
+    }
+
+    addLongToE(colNames("creationTime"), event.creationTime.getMillis)
+    val creationTimeZone = event.creationTime.getZone
+    if (!creationTimeZone.equals(EventValidation.defaultTimeZone)) {
+      addStringToE(colNames("creationTimeZone"), creationTimeZone.getID)
+    }
+
+    // can use zero-length byte array for tag cell value
+    (put, rowKey)
+  }
+
+  def resultToEvent(result: Result, appId: Int): Event = {
+    val rowKey = RowKey(result.getRow())
+
+    val eBytes = Bytes.toBytes("e")
+    // val e = result.getFamilyMap(eBytes)
+
+    def getStringCol(col: String): String = {
+      val r = result.getValue(eBytes, colNames(col))
+      require(r != null,
+        s"Failed to get value for column ${col}. " +
+        s"Rowkey: ${rowKey.toString} " +
+        s"StringBinary: ${Bytes.toStringBinary(result.getRow())}.")
+
+      Bytes.toString(r)
+    }
+
+    def getLongCol(col: String): Long = {
+      val r = result.getValue(eBytes, colNames(col))
+      require(r != null,
+        s"Failed to get value for column ${col}. " +
+        s"Rowkey: ${rowKey.toString} " +
+        s"StringBinary: ${Bytes.toStringBinary(result.getRow())}.")
+
+      Bytes.toLong(r)
+    }
+
+    def getOptStringCol(col: String): Option[String] = {
+      val r = result.getValue(eBytes, colNames(col))
+      if (r == null) {
+        None
+      } else {
+        Some(Bytes.toString(r))
+      }
+    }
+
+    def getTimestamp(col: String): Long = {
+      result.getColumnLatestCell(eBytes, colNames(col)).getTimestamp()
+    }
+
+    val event = getStringCol("event")
+    val entityType = getStringCol("entityType")
+    val entityId = getStringCol("entityId")
+    val targetEntityType = getOptStringCol("targetEntityType")
+    val targetEntityId = getOptStringCol("targetEntityId")
+    val properties: DataMap = getOptStringCol("properties")
+      .map(s => DataMap(read[JObject](s))).getOrElse(DataMap())
+    val prId = getOptStringCol("prId")
+    val eventTimeZone = getOptStringCol("eventTimeZone")
+      .map(DateTimeZone.forID(_))
+      .getOrElse(EventValidation.defaultTimeZone)
+    val eventTime = new DateTime(
+      getLongCol("eventTime"), eventTimeZone)
+    val creationTimeZone = getOptStringCol("creationTimeZone")
+      .map(DateTimeZone.forID(_))
+      .getOrElse(EventValidation.defaultTimeZone)
+    val creationTime: DateTime = new DateTime(
+      getLongCol("creationTime"), creationTimeZone)
+
+    Event(
+      eventId = Some(RowKey(result.getRow()).toString),
+      event = event,
+      entityType = entityType,
+      entityId = entityId,
+      targetEntityType = targetEntityType,
+      targetEntityId = targetEntityId,
+      properties = properties,
+      eventTime = eventTime,
+      tags = Seq(),
+      prId = prId,
+      creationTime = creationTime
+    )
+  }
+
+
+  // for mandatory field. None means don't care.
+  // for optional field. None means don't care.
+  //    Some(None) means not exist.
+  //    Some(Some(x)) means it should match x
+  def createScan(
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    entityType: Option[String] = None,
+    entityId: Option[String] = None,
+    eventNames: Option[Seq[String]] = None,
+    targetEntityType: Option[Option[String]] = None,
+    targetEntityId: Option[Option[String]] = None,
+    reversed: Option[Boolean] = None): Scan = {
+
+    val scan: Scan = new Scan()
+
+    (entityType, entityId) match {
+      case (Some(et), Some(eid)) => {
+        val start = PartialRowKey(et, eid,
+          startTime.map(_.getMillis)).toBytes
+        // if no untilTime, stop when reach next bytes of entityTypeAndId
+        val stop = PartialRowKey(et, eid,
+          untilTime.map(_.getMillis).orElse(Some(-1))).toBytes
+
+        if (reversed.getOrElse(false)) {
+          // Reversed order.
+          // If you specify a startRow and stopRow,
+          // to scan in reverse, the startRow needs to be lexicographically
+          // after the stopRow.
+          scan.setStartRow(stop)
+          scan.setStopRow(start)
+          scan.setReversed(true)
+        } else {
+          scan.setStartRow(start)
+          scan.setStopRow(stop)
+        }
+      }
+      case (_, _) => {
+        val minTime: Long = startTime.map(_.getMillis).getOrElse(0)
+        val maxTime: Long = untilTime.map(_.getMillis).getOrElse(Long.MaxValue)
+        scan.setTimeRange(minTime, maxTime)
+        if (reversed.getOrElse(false)) {
+          scan.setReversed(true)
+        }
+      }
+    }
+
+    val filters = new FilterList(FilterList.Operator.MUST_PASS_ALL)
+
+    val eBytes = Bytes.toBytes("e")
+
+    def createBinaryFilter(col: String, value: Array[Byte]): SingleColumnValueFilter = {
+      val comp = new BinaryComparator(value)
+      new SingleColumnValueFilter(
+        eBytes, colNames(col), CompareOp.EQUAL, comp)
+    }
+
+    // skip the row if the column exists
+    def createSkipRowIfColumnExistFilter(col: String): SkipFilter = {
+      val comp = new BinaryComparator(colNames(col))
+      val q = new QualifierFilter(CompareOp.NOT_EQUAL, comp)
+      // filters an entire row if any of the Cell checks do not pass
+      new SkipFilter(q)
+    }
+
+    entityType.foreach { et =>
+      val compType = new BinaryComparator(Bytes.toBytes(et))
+      val filterType = new SingleColumnValueFilter(
+        eBytes, colNames("entityType"), CompareOp.EQUAL, compType)
+      filters.addFilter(filterType)
+    }
+
+    entityId.foreach { eid =>
+      val compId = new BinaryComparator(Bytes.toBytes(eid))
+      val filterId = new SingleColumnValueFilter(
+        eBytes, colNames("entityId"), CompareOp.EQUAL, compId)
+      filters.addFilter(filterId)
+    }
+
+    eventNames.foreach { eventsList =>
+      // match any of event in the eventsList
+      val eventFilters = new FilterList(FilterList.Operator.MUST_PASS_ONE)
+      eventsList.foreach { e =>
+        val compEvent = new BinaryComparator(Bytes.toBytes(e))
+        val filterEvent = new SingleColumnValueFilter(
+          eBytes, colNames("event"), CompareOp.EQUAL, compEvent)
+        eventFilters.addFilter(filterEvent)
+      }
+      if (!eventFilters.getFilters().isEmpty) {
+        filters.addFilter(eventFilters)
+      }
+    }
+
+    targetEntityType.foreach { tetOpt =>
+      if (tetOpt.isEmpty) {
+        val filter = createSkipRowIfColumnExistFilter("targetEntityType")
+        filters.addFilter(filter)
+      } else {
+        tetOpt.foreach { tet =>
+          val filter = createBinaryFilter(
+            "targetEntityType", Bytes.toBytes(tet))
+          // the entire row will be skipped if the column is not found.
+          filter.setFilterIfMissing(true)
+          filters.addFilter(filter)
+        }
+      }
+    }
+
+    targetEntityId.foreach { teidOpt =>
+      if (teidOpt.isEmpty) {
+        val filter = createSkipRowIfColumnExistFilter("targetEntityId")
+        filters.addFilter(filter)
+      } else {
+        teidOpt.foreach { teid =>
+          val filter = createBinaryFilter(
+            "targetEntityId", Bytes.toBytes(teid))
+          // the entire row will be skipped if the column is not found.
+          filter.setFilterIfMissing(true)
+          filters.addFilter(filter)
+        }
+      }
+    }
+
+    if (!filters.getFilters().isEmpty) {
+      scan.setFilter(filters)
+    }
+
+    scan
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/hbase/HBLEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/hbase/HBLEvents.scala b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/HBLEvents.scala
new file mode 100644
index 0000000..7d7ed40
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/HBLEvents.scala
@@ -0,0 +1,192 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.hbase
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.LEvents
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.apache.predictionio.data.storage.hbase.HBEventsUtil.RowKey
+import org.apache.hadoop.hbase.HColumnDescriptor
+import org.apache.hadoop.hbase.HTableDescriptor
+import org.apache.hadoop.hbase.NamespaceDescriptor
+import org.apache.hadoop.hbase.TableName
+import org.apache.hadoop.hbase.client._
+import org.joda.time.DateTime
+
+import scala.collection.JavaConversions._
+import scala.concurrent.ExecutionContext
+import scala.concurrent.Future
+
+class HBLEvents(val client: HBClient, config: StorageClientConfig, val namespace: String)
+  extends LEvents with Logging {
+
+  // implicit val formats = DefaultFormats + new EventJson4sSupport.DBSerializer
+
+  def resultToEvent(result: Result, appId: Int): Event =
+    HBEventsUtil.resultToEvent(result, appId)
+
+  def getTable(appId: Int, channelId: Option[Int] = None): HTableInterface =
+    client.connection.getTable(HBEventsUtil.tableName(namespace, appId, channelId))
+
+  override
+  def init(appId: Int, channelId: Option[Int] = None): Boolean = {
+    // check namespace exist
+    val existingNamespace = client.admin.listNamespaceDescriptors()
+      .map(_.getName)
+    if (!existingNamespace.contains(namespace)) {
+      val nameDesc = NamespaceDescriptor.create(namespace).build()
+      info(s"The namespace ${namespace} doesn't exist yet. Creating now...")
+      client.admin.createNamespace(nameDesc)
+    }
+
+    val tableName = TableName.valueOf(HBEventsUtil.tableName(namespace, appId, channelId))
+    if (!client.admin.tableExists(tableName)) {
+      info(s"The table ${tableName.getNameAsString()} doesn't exist yet." +
+        " Creating now...")
+      val tableDesc = new HTableDescriptor(tableName)
+      tableDesc.addFamily(new HColumnDescriptor("e"))
+      tableDesc.addFamily(new HColumnDescriptor("r")) // reserved
+      client.admin.createTable(tableDesc)
+    }
+    true
+  }
+
+  override
+  def remove(appId: Int, channelId: Option[Int] = None): Boolean = {
+    val tableName = TableName.valueOf(HBEventsUtil.tableName(namespace, appId, channelId))
+    try {
+      if (client.admin.tableExists(tableName)) {
+        info(s"Removing table ${tableName.getNameAsString()}...")
+        client.admin.disableTable(tableName)
+        client.admin.deleteTable(tableName)
+      } else {
+        info(s"Table ${tableName.getNameAsString()} doesn't exist." +
+          s" Nothing is deleted.")
+      }
+      true
+    } catch {
+      case e: Exception => {
+        error(s"Fail to remove table for appId ${appId}. Exception: ${e}")
+        false
+      }
+    }
+  }
+
+  override
+  def close(): Unit = {
+    client.admin.close()
+    client.connection.close()
+  }
+
+  override
+  def futureInsert(
+    event: Event, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext):
+    Future[String] = {
+    Future {
+      val table = getTable(appId, channelId)
+      val (put, rowKey) = HBEventsUtil.eventToPut(event, appId)
+      table.put(put)
+      table.flushCommits()
+      table.close()
+      rowKey.toString
+    }
+  }
+
+  override
+  def futureGet(
+    eventId: String, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext):
+    Future[Option[Event]] = {
+      Future {
+        val table = getTable(appId, channelId)
+        val rowKey = RowKey(eventId)
+        val get = new Get(rowKey.toBytes)
+
+        val result = table.get(get)
+        table.close()
+
+        if (!result.isEmpty()) {
+          val event = resultToEvent(result, appId)
+          Some(event)
+        } else {
+          None
+        }
+      }
+    }
+
+  override
+  def futureDelete(
+    eventId: String, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext):
+    Future[Boolean] = {
+    Future {
+      val table = getTable(appId, channelId)
+      val rowKey = RowKey(eventId)
+      val exists = table.exists(new Get(rowKey.toBytes))
+      table.delete(new Delete(rowKey.toBytes))
+      table.close()
+      exists
+    }
+  }
+
+  override
+  def futureFind(
+    appId: Int,
+    channelId: Option[Int] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    entityType: Option[String] = None,
+    entityId: Option[String] = None,
+    eventNames: Option[Seq[String]] = None,
+    targetEntityType: Option[Option[String]] = None,
+    targetEntityId: Option[Option[String]] = None,
+    limit: Option[Int] = None,
+    reversed: Option[Boolean] = None)(implicit ec: ExecutionContext):
+    Future[Iterator[Event]] = {
+      Future {
+
+        require(!((reversed == Some(true)) && (entityType.isEmpty || entityId.isEmpty)),
+          "the parameter reversed can only be used with both entityType and entityId specified.")
+
+        val table = getTable(appId, channelId)
+
+        val scan = HBEventsUtil.createScan(
+          startTime = startTime,
+          untilTime = untilTime,
+          entityType = entityType,
+          entityId = entityId,
+          eventNames = eventNames,
+          targetEntityType = targetEntityType,
+          targetEntityId = targetEntityId,
+          reversed = reversed)
+        val scanner = table.getScanner(scan)
+        table.close()
+
+        val eventsIter = scanner.iterator()
+
+        // Get all events if None or Some(-1)
+        val results: Iterator[Result] = limit match {
+          case Some(-1) => eventsIter
+          case None => eventsIter
+          case Some(x) => eventsIter.take(x)
+        }
+
+        val eventsIt = results.map { resultToEvent(_, appId) }
+
+        eventsIt
+      }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/hbase/HBPEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/hbase/HBPEvents.scala b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/HBPEvents.scala
new file mode 100644
index 0000000..72254e0
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/HBPEvents.scala
@@ -0,0 +1,112 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.hbase
+
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.PEvents
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.apache.hadoop.hbase.HBaseConfiguration
+import org.apache.hadoop.hbase.client.Result
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable
+import org.apache.hadoop.hbase.mapreduce.PIOHBaseUtil
+import org.apache.hadoop.hbase.mapreduce.TableInputFormat
+import org.apache.hadoop.hbase.mapreduce.TableOutputFormat
+import org.apache.hadoop.io.Writable
+import org.apache.hadoop.mapreduce.OutputFormat
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+import org.joda.time.DateTime
+
+class HBPEvents(client: HBClient, config: StorageClientConfig, namespace: String) extends PEvents {
+
+  def checkTableExists(appId: Int, channelId: Option[Int]): Unit = {
+    if (!client.admin.tableExists(HBEventsUtil.tableName(namespace, appId, channelId))) {
+      if (channelId.nonEmpty) {
+        logger.error(s"The appId $appId with channelId $channelId does not exist." +
+          s" Please use valid appId and channelId.")
+        throw new Exception(s"HBase table not found for appId $appId" +
+          s" with channelId $channelId.")
+      } else {
+        logger.error(s"The appId $appId does not exist. Please use valid appId.")
+        throw new Exception(s"HBase table not found for appId $appId.")
+      }
+    }
+  }
+
+  override
+  def find(
+    appId: Int,
+    channelId: Option[Int] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    entityType: Option[String] = None,
+    entityId: Option[String] = None,
+    eventNames: Option[Seq[String]] = None,
+    targetEntityType: Option[Option[String]] = None,
+    targetEntityId: Option[Option[String]] = None
+    )(sc: SparkContext): RDD[Event] = {
+
+    checkTableExists(appId, channelId)
+
+    val conf = HBaseConfiguration.create()
+    conf.set(TableInputFormat.INPUT_TABLE,
+      HBEventsUtil.tableName(namespace, appId, channelId))
+
+    val scan = HBEventsUtil.createScan(
+        startTime = startTime,
+        untilTime = untilTime,
+        entityType = entityType,
+        entityId = entityId,
+        eventNames = eventNames,
+        targetEntityType = targetEntityType,
+        targetEntityId = targetEntityId,
+        reversed = None)
+    scan.setCaching(500) // TODO
+    scan.setCacheBlocks(false) // TODO
+
+    conf.set(TableInputFormat.SCAN, PIOHBaseUtil.convertScanToString(scan))
+
+    // HBase is not accessed until this rdd is actually used.
+    val rdd = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat],
+      classOf[ImmutableBytesWritable],
+      classOf[Result]).map {
+        case (key, row) => HBEventsUtil.resultToEvent(row, appId)
+      }
+
+    rdd
+  }
+
+  override
+  def write(
+    events: RDD[Event], appId: Int, channelId: Option[Int])(sc: SparkContext): Unit = {
+
+    checkTableExists(appId, channelId)
+
+    val conf = HBaseConfiguration.create()
+    conf.set(TableOutputFormat.OUTPUT_TABLE,
+      HBEventsUtil.tableName(namespace, appId, channelId))
+    conf.setClass("mapreduce.outputformat.class",
+      classOf[TableOutputFormat[Object]],
+      classOf[OutputFormat[Object, Writable]])
+
+    events.map { event =>
+      val (put, rowKey) = HBEventsUtil.eventToPut(event, appId)
+      (new ImmutableBytesWritable(rowKey.toBytes), put)
+    }.saveAsNewAPIHadoopDataset(conf)
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/hbase/PIOHBaseUtil.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/hbase/PIOHBaseUtil.scala b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/PIOHBaseUtil.scala
new file mode 100644
index 0000000..1027930
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/PIOHBaseUtil.scala
@@ -0,0 +1,28 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.hadoop.hbase.mapreduce
+
+/* Pretends to be hbase.mapreduce package in order to expose its
+ * Package-accessible only static function convertScanToString()
+ */
+
+import org.apache.hadoop.hbase.client.Scan
+
+object PIOHBaseUtil {
+  def convertScanToString(scan: Scan): String = {
+    TableMapReduceUtil.convertScanToString(scan)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/hbase/StorageClient.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/hbase/StorageClient.scala b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/StorageClient.scala
new file mode 100644
index 0000000..f25b14a
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/StorageClient.scala
@@ -0,0 +1,83 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.hbase
+
+import org.apache.predictionio.data.storage.BaseStorageClient
+import org.apache.predictionio.data.storage.StorageClientConfig
+
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.hbase.HBaseConfiguration
+import org.apache.hadoop.hbase.MasterNotRunningException
+import org.apache.hadoop.hbase.ZooKeeperConnectionException
+import org.apache.hadoop.hbase.client.HConnectionManager
+import org.apache.hadoop.hbase.client.HConnection
+import org.apache.hadoop.hbase.client.HBaseAdmin
+
+import grizzled.slf4j.Logging
+
+case class HBClient(
+  val conf: Configuration,
+  val connection: HConnection,
+  val admin: HBaseAdmin
+)
+
+class StorageClient(val config: StorageClientConfig)
+  extends BaseStorageClient with Logging {
+
+  val conf = HBaseConfiguration.create()
+
+  if (config.test) {
+    // use fewer retries and shorter timeout for test mode
+    conf.set("hbase.client.retries.number", "1")
+    conf.set("zookeeper.session.timeout", "30000");
+    conf.set("zookeeper.recovery.retry", "1")
+  }
+
+  try {
+    HBaseAdmin.checkHBaseAvailable(conf)
+  } catch {
+    case e: MasterNotRunningException =>
+      error("HBase master is not running (ZooKeeper ensemble: " +
+        conf.get("hbase.zookeeper.quorum") + "). Please make sure that HBase " +
+        "is running properly, and that the configuration is pointing at the " +
+        "correct ZooKeeper ensemble.")
+      throw e
+    case e: ZooKeeperConnectionException =>
+      error("Cannot connect to ZooKeeper (ZooKeeper ensemble: " +
+        conf.get("hbase.zookeeper.quorum") + "). Please make sure that the " +
+        "configuration is pointing at the correct ZooKeeper ensemble. By " +
+        "default, HBase manages its own ZooKeeper, so if you have not " +
+        "configured HBase to use an external ZooKeeper, that means your " +
+        "HBase is not started or configured properly.")
+      throw e
+    case e: Exception => {
+      error("Failed to connect to HBase." +
+        " Please check if HBase is running properly.")
+      throw e
+    }
+  }
+
+  val connection = HConnectionManager.createConnection(conf)
+
+  val client = HBClient(
+    conf = conf,
+    connection = connection,
+    admin = new HBaseAdmin(connection)
+  )
+
+  override
+  val prefix = "HB"
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/hbase/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/hbase/package.scala b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/package.scala
new file mode 100644
index 0000000..2f8c170
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/package.scala
@@ -0,0 +1,22 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+/** HBase implementation of storage traits, supporting event data only
+  *
+  * @group Implementation
+  */
+package object hbase {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/HB_0_8_0.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/HB_0_8_0.scala b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/HB_0_8_0.scala
new file mode 100644
index 0000000..9dcfb79
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/HB_0_8_0.scala
@@ -0,0 +1,190 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.hbase.upgrade
+
+import org.apache.predictionio.annotation.Experimental
+
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.EventValidation
+import org.apache.predictionio.data.storage.DataMap
+
+import org.apache.hadoop.hbase.client.Scan
+import org.apache.hadoop.hbase.client.HConnection
+import org.apache.hadoop.hbase.client.Result
+import org.apache.hadoop.hbase.TableName
+import org.apache.hadoop.hbase.util.Bytes
+
+import org.joda.time.DateTime
+import org.joda.time.DateTimeZone
+
+import org.json4s.DefaultFormats
+import org.json4s.JObject
+import org.json4s.native.Serialization.{ read, write }
+
+import org.apache.commons.codec.binary.Base64
+
+import scala.collection.JavaConversions._
+
+/** :: Experimental :: */
+@Experimental
+object HB_0_8_0 {
+
+  implicit val formats = DefaultFormats
+
+  def getByAppId(
+    connection: HConnection,
+    namespace: String,
+    appId: Int): Iterator[Event] = {
+    val tableName = TableName.valueOf(namespace, "events")
+    val table = connection.getTable(tableName)
+    val start = PartialRowKey(appId)
+    val stop = PartialRowKey(appId + 1)
+    val scan = new Scan(start.toBytes, stop.toBytes)
+    val scanner = table.getScanner(scan)
+    table.close()
+    scanner.iterator().map { resultToEvent(_) }
+  }
+
+  val colNames: Map[String, Array[Byte]] = Map(
+    "event" -> "e",
+    "entityType" -> "ety",
+    "entityId" -> "eid",
+    "targetEntityType" -> "tety",
+    "targetEntityId" -> "teid",
+    "properties" -> "p",
+    "prId" -> "pk", // columna name is 'pk' in 0.8.0/0.8.1
+    "eventTimeZone" -> "etz",
+    "creationTimeZone" -> "ctz"
+  ).mapValues(Bytes.toBytes(_))
+
+
+  class RowKey(
+    val appId: Int,
+    val millis: Long,
+    val uuidLow: Long
+  ) {
+    lazy val toBytes: Array[Byte] = {
+      // add UUID least significant bits for multiple actions at the same time
+      // (UUID's most significant bits are actually timestamp,
+      // use eventTime instead).
+      Bytes.toBytes(appId) ++ Bytes.toBytes(millis) ++ Bytes.toBytes(uuidLow)
+    }
+    override def toString: String = {
+      Base64.encodeBase64URLSafeString(toBytes)
+    }
+  }
+
+  object RowKey {
+    // get RowKey from string representation
+    def apply(s: String): RowKey = {
+      try {
+        apply(Base64.decodeBase64(s))
+      } catch {
+        case e: Exception => throw new RowKeyException(
+          s"Failed to convert String ${s} to RowKey because ${e}", e)
+      }
+    }
+
+    def apply(b: Array[Byte]): RowKey = {
+      if (b.size != 20) {
+        val bString = b.mkString(",")
+        throw new RowKeyException(
+          s"Incorrect byte array size. Bytes: ${bString}.")
+      }
+
+      new RowKey(
+        appId = Bytes.toInt(b.slice(0, 4)),
+        millis = Bytes.toLong(b.slice(4, 12)),
+        uuidLow = Bytes.toLong(b.slice(12, 20))
+      )
+    }
+  }
+
+  class RowKeyException(msg: String, cause: Exception)
+    extends Exception(msg, cause) {
+      def this(msg: String) = this(msg, null)
+    }
+
+  case class PartialRowKey(val appId: Int, val millis: Option[Long] = None) {
+    val toBytes: Array[Byte] = {
+      Bytes.toBytes(appId) ++
+        (millis.map(Bytes.toBytes(_)).getOrElse(Array[Byte]()))
+    }
+  }
+
+  def resultToEvent(result: Result): Event = {
+    val rowKey = RowKey(result.getRow())
+
+    val eBytes = Bytes.toBytes("e")
+    // val e = result.getFamilyMap(eBytes)
+
+    def getStringCol(col: String): String = {
+      val r = result.getValue(eBytes, colNames(col))
+      require(r != null,
+        s"Failed to get value for column ${col}. " +
+        s"Rowkey: ${rowKey.toString} " +
+        s"StringBinary: ${Bytes.toStringBinary(result.getRow())}.")
+
+      Bytes.toString(r)
+    }
+
+    def getOptStringCol(col: String): Option[String] = {
+      val r = result.getValue(eBytes, colNames(col))
+      if (r == null) {
+        None
+      } else {
+        Some(Bytes.toString(r))
+      }
+    }
+
+    def getTimestamp(col: String): Long = {
+      result.getColumnLatestCell(eBytes, colNames(col)).getTimestamp()
+    }
+
+    val event = getStringCol("event")
+    val entityType = getStringCol("entityType")
+    val entityId = getStringCol("entityId")
+    val targetEntityType = getOptStringCol("targetEntityType")
+    val targetEntityId = getOptStringCol("targetEntityId")
+    val properties: DataMap = getOptStringCol("properties")
+      .map(s => DataMap(read[JObject](s))).getOrElse(DataMap())
+    val prId = getOptStringCol("prId")
+    val eventTimeZone = getOptStringCol("eventTimeZone")
+      .map(DateTimeZone.forID(_))
+      .getOrElse(EventValidation.defaultTimeZone)
+    val creationTimeZone = getOptStringCol("creationTimeZone")
+      .map(DateTimeZone.forID(_))
+      .getOrElse(EventValidation.defaultTimeZone)
+
+    val creationTime: DateTime = new DateTime(
+      getTimestamp("event"), creationTimeZone
+    )
+
+    Event(
+      eventId = Some(RowKey(result.getRow()).toString),
+      event = event,
+      entityType = entityType,
+      entityId = entityId,
+      targetEntityType = targetEntityType,
+      targetEntityId = targetEntityId,
+      properties = properties,
+      eventTime = new DateTime(rowKey.millis, eventTimeZone),
+      tags = Seq(),
+      prId = prId,
+      creationTime = creationTime
+    )
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade.scala b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade.scala
new file mode 100644
index 0000000..7ef5305
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade.scala
@@ -0,0 +1,72 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.storage.hbase.upgrade
+
+import io.prediction.annotation.Experimental
+
+import io.prediction.data.storage.Storage
+import io.prediction.data.storage.hbase.HBLEvents
+import io.prediction.data.storage.hbase.HBEventsUtil
+
+import scala.collection.JavaConversions._
+
+/** :: Experimental :: */
+@Experimental
+object Upgrade {
+
+  def main(args: Array[String]) {
+    val fromAppId = args(0).toInt
+    val toAppId = args(1).toInt
+    val batchSize = args.lift(2).map(_.toInt).getOrElse(100)
+    val fromNamespace = args.lift(3).getOrElse("predictionio_eventdata")
+
+    upgrade(fromAppId, toAppId, batchSize, fromNamespace)
+  }
+
+  /* For upgrade from 0.8.0 or 0.8.1 to 0.8.2 only */
+  def upgrade(
+    fromAppId: Int,
+    toAppId: Int,
+    batchSize: Int,
+    fromNamespace: String) {
+
+    val events = Storage.getLEvents().asInstanceOf[HBLEvents]
+
+    // Assume already run "pio app new <newapp>" (new app already created)
+    // TODO: check if new table empty and warn user if not
+    val newTable = events.getTable(toAppId)
+
+    val newTableName = newTable.getName().getNameAsString()
+    println(s"Copying data from ${fromNamespace}:events for app ID ${fromAppId}"
+      + s" to new HBase table ${newTableName}...")
+
+    HB_0_8_0.getByAppId(
+      events.client.connection,
+      fromNamespace,
+      fromAppId).grouped(batchSize).foreach { eventGroup =>
+        val puts = eventGroup.map{ e =>
+          val (put, rowkey) = HBEventsUtil.eventToPut(e, toAppId)
+          put
+        }
+        newTable.put(puts.toList)
+      }
+
+    newTable.flushCommits()
+    newTable.close()
+    println("Done.")
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade_0_8_3.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade_0_8_3.scala b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade_0_8_3.scala
new file mode 100644
index 0000000..8b80b83
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade_0_8_3.scala
@@ -0,0 +1,221 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package io.prediction.data.storage.hbase.upgrade
+
+import io.prediction.annotation.Experimental
+
+import grizzled.slf4j.Logger
+import io.prediction.data.storage.Storage
+import io.prediction.data.storage.DataMap
+import io.prediction.data.storage.hbase.HBLEvents
+import io.prediction.data.storage.hbase.HBEventsUtil
+
+import scala.collection.JavaConversions._
+
+import scala.concurrent._
+import ExecutionContext.Implicits.global
+import io.prediction.data.storage.LEvents
+import scala.concurrent.Await
+import scala.concurrent.duration.Duration
+import java.lang.Thread
+
+object CheckDistribution {
+  def entityType(eventClient: LEvents, appId: Int)
+  : Map[(String, Option[String]), Int] = {
+    eventClient
+    .find(appId = appId)
+    .foldLeft(Map[(String, Option[String]), Int]().withDefaultValue(0)) {
+      case (m, e) => {
+        val k = (e.entityType, e.targetEntityType)
+        m.updated(k, m(k) + 1)
+      }
+    }
+  }
+
+  def runMain(appId: Int) {
+    val eventClient = Storage.getLEvents().asInstanceOf[HBLEvents]
+
+    entityType(eventClient, appId)
+    .toSeq
+    .sortBy(-_._2)
+    .foreach { println }
+
+  }
+
+  def main(args: Array[String]) {
+    runMain(args(0).toInt)
+  }
+
+}
+
+/** :: Experimental :: */
+@Experimental
+object Upgrade_0_8_3 {
+  val NameMap = Map(
+    "pio_user" -> "user",
+    "pio_item" -> "item")
+  val RevNameMap = NameMap.toSeq.map(_.swap).toMap
+
+  val logger = Logger[this.type]
+
+  def main(args: Array[String]) {
+    val fromAppId = args(0).toInt
+    val toAppId = args(1).toInt
+
+    runMain(fromAppId, toAppId)
+  }
+
+  def runMain(fromAppId: Int, toAppId: Int): Unit = {
+    upgrade(fromAppId, toAppId)
+  }
+
+
+  val obsEntityTypes = Set("pio_user", "pio_item")
+  val obsProperties = Set(
+    "pio_itypes", "pio_starttime", "pio_endtime",
+    "pio_inactive", "pio_price", "pio_rating")
+
+  def hasPIOPrefix(eventClient: LEvents, appId: Int): Boolean = {
+    eventClient.find(appId = appId).filter( e =>
+      (obsEntityTypes.contains(e.entityType) ||
+       e.targetEntityType.map(obsEntityTypes.contains(_)).getOrElse(false) ||
+       (!e.properties.keySet.forall(!obsProperties.contains(_)))
+      )
+    ).hasNext
+  }
+
+  def isEmpty(eventClient: LEvents, appId: Int): Boolean =
+    !eventClient.find(appId = appId).hasNext
+
+
+  def upgradeCopy(eventClient: LEvents, fromAppId: Int, toAppId: Int) {
+    val fromDist = CheckDistribution.entityType(eventClient, fromAppId)
+
+    logger.info("FromAppId Distribution")
+    fromDist.toSeq.sortBy(-_._2).foreach { e => logger.info(e) }
+
+    val events = eventClient
+    .find(appId = fromAppId)
+    .zipWithIndex
+    .foreach { case (fromEvent, index) => {
+      if (index % 50000 == 0) {
+        // logger.info(s"Progress: $fromEvent $index")
+        logger.info(s"Progress: $index")
+      }
+
+
+      val fromEntityType = fromEvent.entityType
+      val toEntityType = NameMap.getOrElse(fromEntityType, fromEntityType)
+
+      val fromTargetEntityType = fromEvent.targetEntityType
+      val toTargetEntityType = fromTargetEntityType
+        .map { et => NameMap.getOrElse(et, et) }
+
+      val toProperties = DataMap(fromEvent.properties.fields.map {
+        case (k, v) =>
+          val newK = if (obsProperties.contains(k)) {
+            val nK = k.stripPrefix("pio_")
+            logger.info(s"property ${k} will be renamed to ${nK}")
+            nK
+          } else k
+          (newK, v)
+      })
+
+      val toEvent = fromEvent.copy(
+        entityType = toEntityType,
+        targetEntityType = toTargetEntityType,
+        properties = toProperties)
+
+      eventClient.insert(toEvent, toAppId)
+    }}
+
+
+    val toDist = CheckDistribution.entityType(eventClient, toAppId)
+
+    logger.info("Recap fromAppId Distribution")
+    fromDist.toSeq.sortBy(-_._2).foreach { e => logger.info(e) }
+
+    logger.info("ToAppId Distribution")
+    toDist.toSeq.sortBy(-_._2).foreach { e => logger.info(e) }
+
+    val fromGood = fromDist
+      .toSeq
+      .forall { case (k, c) => {
+        val (et, tet) = k
+        val net = NameMap.getOrElse(et, et)
+        val ntet = tet.map(tet => NameMap.getOrElse(tet, tet))
+        val nk = (net, ntet)
+        val nc = toDist.getOrElse(nk, -1)
+        val checkMatch = (c == nc)
+        if (!checkMatch) {
+          logger.info(s"${k} doesn't match: old has ${c}. new has ${nc}.")
+        }
+        checkMatch
+      }}
+
+    val toGood = toDist
+      .toSeq
+      .forall { case (k, c) => {
+        val (et, tet) = k
+        val oet = RevNameMap.getOrElse(et, et)
+        val otet = tet.map(tet => RevNameMap.getOrElse(tet, tet))
+        val ok = (oet, otet)
+        val oc = fromDist.getOrElse(ok, -1)
+        val checkMatch = (c == oc)
+        if (!checkMatch) {
+          logger.info(s"${k} doesn't match: new has ${c}. old has ${oc}.")
+        }
+        checkMatch
+      }}
+
+    if (!fromGood || !toGood) {
+      logger.error("Doesn't match!! There is an import error.")
+    } else {
+      logger.info("Count matches. Looks like we are good to go.")
+    }
+  }
+
+  /* For upgrade from 0.8.2 to 0.8.3 only */
+  def upgrade(fromAppId: Int, toAppId: Int) {
+
+    val eventClient = Storage.getLEvents().asInstanceOf[HBLEvents]
+
+    require(fromAppId != toAppId,
+      s"FromAppId: $fromAppId must be different from toAppId: $toAppId")
+
+    if (hasPIOPrefix(eventClient, fromAppId)) {
+      require(
+        isEmpty(eventClient, toAppId),
+        s"Target appId: $toAppId is not empty. Please run " +
+        "`pio app data-delete <app_name>` to clean the data before upgrading")
+
+      logger.info(s"$fromAppId isEmpty: " + isEmpty(eventClient, fromAppId))
+
+      upgradeCopy(eventClient, fromAppId, toAppId)
+
+    } else {
+      logger.info(s"From appId: ${fromAppId} doesn't contain"
+        + s" obsolete entityTypes ${obsEntityTypes} or"
+        + s" obsolete properties ${obsProperties}."
+        + " No need data migration."
+        + s" You can continue to use appId ${fromAppId}.")
+    }
+
+    logger.info("Done.")
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/hdfs/HDFSModels.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/hdfs/HDFSModels.scala b/data/src/main/scala/org/apache/predictionio/data/storage/hdfs/HDFSModels.scala
new file mode 100644
index 0000000..ca967ae
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/hdfs/HDFSModels.scala
@@ -0,0 +1,60 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.hdfs
+
+import java.io.IOException
+
+import com.google.common.io.ByteStreams
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.Model
+import org.apache.predictionio.data.storage.Models
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.apache.hadoop.fs.FileSystem
+import org.apache.hadoop.fs.Path
+
+class HDFSModels(fs: FileSystem, config: StorageClientConfig, prefix: String)
+  extends Models with Logging {
+
+  def insert(i: Model): Unit = {
+    try {
+      val fsdos = fs.create(new Path(s"$prefix${i.id}"))
+      fsdos.write(i.models)
+      fsdos.close
+    } catch {
+      case e: IOException => error(e.getMessage)
+    }
+  }
+
+  def get(id: String): Option[Model] = {
+    try {
+      val p = new Path(s"$prefix$id")
+      Some(Model(
+        id = id,
+        models = ByteStreams.toByteArray(fs.open(p))))
+    } catch {
+      case e: Throwable =>
+        error(e.getMessage)
+        None
+    }
+  }
+
+  def delete(id: String): Unit = {
+    val p = new Path(s"$prefix$id")
+    if (!fs.delete(p, false)) {
+      error(s"Unable to delete ${fs.makeQualified(p).toString}!")
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/hdfs/StorageClient.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/hdfs/StorageClient.scala b/data/src/main/scala/org/apache/predictionio/data/storage/hdfs/StorageClient.scala
new file mode 100644
index 0000000..3382e12
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/hdfs/StorageClient.scala
@@ -0,0 +1,33 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.hdfs
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.BaseStorageClient
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.FileSystem
+import org.apache.hadoop.fs.Path
+
+class StorageClient(val config: StorageClientConfig) extends BaseStorageClient
+    with Logging {
+  override val prefix = "HDFS"
+  val conf = new Configuration
+  val fs = FileSystem.get(conf)
+  fs.setWorkingDirectory(
+    new Path(config.properties.getOrElse("PATH", config.properties("HOSTS"))))
+  val client = fs
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/hdfs/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/hdfs/package.scala b/data/src/main/scala/org/apache/predictionio/data/storage/hdfs/package.scala
new file mode 100644
index 0000000..63b34b4
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/hdfs/package.scala
@@ -0,0 +1,22 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+/** HDFS implementation of storage traits, supporting model data only
+  *
+  * @group Implementation
+  */
+package object hdfs {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCAccessKeys.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCAccessKeys.scala b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCAccessKeys.scala
new file mode 100644
index 0000000..588cc60
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCAccessKeys.scala
@@ -0,0 +1,84 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.jdbc
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.AccessKey
+import org.apache.predictionio.data.storage.AccessKeys
+import org.apache.predictionio.data.storage.StorageClientConfig
+import scalikejdbc._
+
+import scala.util.Random
+
+/** JDBC implementation of [[AccessKeys]] */
+class JDBCAccessKeys(client: String, config: StorageClientConfig, prefix: String)
+  extends AccessKeys with Logging {
+  /** Database table name for this data access object */
+  val tableName = JDBCUtils.prefixTableName(prefix, "accesskeys")
+  DB autoCommit { implicit session =>
+    sql"""
+    create table if not exists $tableName (
+      accesskey varchar(64) not null primary key,
+      appid integer not null,
+      events text)""".execute().apply()
+  }
+
+  def insert(accessKey: AccessKey): Option[String] = DB localTx { implicit s =>
+    val key = if (accessKey.key.isEmpty) generateKey else accessKey.key
+    val events = if (accessKey.events.isEmpty) None else Some(accessKey.events.mkString(","))
+    sql"""
+    insert into $tableName values(
+      $key,
+      ${accessKey.appid},
+      $events)""".update().apply()
+    Some(key)
+  }
+
+  def get(key: String): Option[AccessKey] = DB readOnly { implicit session =>
+    sql"SELECT accesskey, appid, events FROM $tableName WHERE accesskey = $key".
+      map(resultToAccessKey).single().apply()
+  }
+
+  def getAll(): Seq[AccessKey] = DB readOnly { implicit session =>
+    sql"SELECT accesskey, appid, events FROM $tableName".map(resultToAccessKey).list().apply()
+  }
+
+  def getByAppid(appid: Int): Seq[AccessKey] = DB readOnly { implicit session =>
+    sql"SELECT accesskey, appid, events FROM $tableName WHERE appid = $appid".
+      map(resultToAccessKey).list().apply()
+  }
+
+  def update(accessKey: AccessKey): Unit = DB localTx { implicit session =>
+    val events = if (accessKey.events.isEmpty) None else Some(accessKey.events.mkString(","))
+    sql"""
+    UPDATE $tableName SET
+      appid = ${accessKey.appid},
+      events = $events
+    WHERE accesskey = ${accessKey.key}""".update().apply()
+  }
+
+  def delete(key: String): Unit = DB localTx { implicit session =>
+    sql"DELETE FROM $tableName WHERE accesskey = $key".update().apply()
+  }
+
+  /** Convert JDBC results to [[AccessKey]] */
+  def resultToAccessKey(rs: WrappedResultSet): AccessKey = {
+    AccessKey(
+      key = rs.string("accesskey"),
+      appid = rs.int("appid"),
+      events = rs.stringOpt("events").map(_.split(",").toSeq).getOrElse(Nil))
+  }
+}


[14/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/HB_0_8_0.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/HB_0_8_0.scala b/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/HB_0_8_0.scala
deleted file mode 100644
index e3edbc3..0000000
--- a/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/HB_0_8_0.scala
+++ /dev/null
@@ -1,190 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.hbase.upgrade
-
-import io.prediction.annotation.Experimental
-
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.EventValidation
-import io.prediction.data.storage.DataMap
-
-import org.apache.hadoop.hbase.client.Scan
-import org.apache.hadoop.hbase.client.HConnection
-import org.apache.hadoop.hbase.client.Result
-import org.apache.hadoop.hbase.TableName
-import org.apache.hadoop.hbase.util.Bytes
-
-import org.joda.time.DateTime
-import org.joda.time.DateTimeZone
-
-import org.json4s.DefaultFormats
-import org.json4s.JObject
-import org.json4s.native.Serialization.{ read, write }
-
-import org.apache.commons.codec.binary.Base64
-
-import scala.collection.JavaConversions._
-
-/** :: Experimental :: */
-@Experimental
-object HB_0_8_0 {
-
-  implicit val formats = DefaultFormats
-
-  def getByAppId(
-    connection: HConnection,
-    namespace: String,
-    appId: Int): Iterator[Event] = {
-    val tableName = TableName.valueOf(namespace, "events")
-    val table = connection.getTable(tableName)
-    val start = PartialRowKey(appId)
-    val stop = PartialRowKey(appId + 1)
-    val scan = new Scan(start.toBytes, stop.toBytes)
-    val scanner = table.getScanner(scan)
-    table.close()
-    scanner.iterator().map { resultToEvent(_) }
-  }
-
-  val colNames: Map[String, Array[Byte]] = Map(
-    "event" -> "e",
-    "entityType" -> "ety",
-    "entityId" -> "eid",
-    "targetEntityType" -> "tety",
-    "targetEntityId" -> "teid",
-    "properties" -> "p",
-    "prId" -> "pk", // columna name is 'pk' in 0.8.0/0.8.1
-    "eventTimeZone" -> "etz",
-    "creationTimeZone" -> "ctz"
-  ).mapValues(Bytes.toBytes(_))
-
-
-  class RowKey(
-    val appId: Int,
-    val millis: Long,
-    val uuidLow: Long
-  ) {
-    lazy val toBytes: Array[Byte] = {
-      // add UUID least significant bits for multiple actions at the same time
-      // (UUID's most significant bits are actually timestamp,
-      // use eventTime instead).
-      Bytes.toBytes(appId) ++ Bytes.toBytes(millis) ++ Bytes.toBytes(uuidLow)
-    }
-    override def toString: String = {
-      Base64.encodeBase64URLSafeString(toBytes)
-    }
-  }
-
-  object RowKey {
-    // get RowKey from string representation
-    def apply(s: String): RowKey = {
-      try {
-        apply(Base64.decodeBase64(s))
-      } catch {
-        case e: Exception => throw new RowKeyException(
-          s"Failed to convert String ${s} to RowKey because ${e}", e)
-      }
-    }
-
-    def apply(b: Array[Byte]): RowKey = {
-      if (b.size != 20) {
-        val bString = b.mkString(",")
-        throw new RowKeyException(
-          s"Incorrect byte array size. Bytes: ${bString}.")
-      }
-
-      new RowKey(
-        appId = Bytes.toInt(b.slice(0, 4)),
-        millis = Bytes.toLong(b.slice(4, 12)),
-        uuidLow = Bytes.toLong(b.slice(12, 20))
-      )
-    }
-  }
-
-  class RowKeyException(msg: String, cause: Exception)
-    extends Exception(msg, cause) {
-      def this(msg: String) = this(msg, null)
-    }
-
-  case class PartialRowKey(val appId: Int, val millis: Option[Long] = None) {
-    val toBytes: Array[Byte] = {
-      Bytes.toBytes(appId) ++
-        (millis.map(Bytes.toBytes(_)).getOrElse(Array[Byte]()))
-    }
-  }
-
-  def resultToEvent(result: Result): Event = {
-    val rowKey = RowKey(result.getRow())
-
-    val eBytes = Bytes.toBytes("e")
-    // val e = result.getFamilyMap(eBytes)
-
-    def getStringCol(col: String): String = {
-      val r = result.getValue(eBytes, colNames(col))
-      require(r != null,
-        s"Failed to get value for column ${col}. " +
-        s"Rowkey: ${rowKey.toString} " +
-        s"StringBinary: ${Bytes.toStringBinary(result.getRow())}.")
-
-      Bytes.toString(r)
-    }
-
-    def getOptStringCol(col: String): Option[String] = {
-      val r = result.getValue(eBytes, colNames(col))
-      if (r == null) {
-        None
-      } else {
-        Some(Bytes.toString(r))
-      }
-    }
-
-    def getTimestamp(col: String): Long = {
-      result.getColumnLatestCell(eBytes, colNames(col)).getTimestamp()
-    }
-
-    val event = getStringCol("event")
-    val entityType = getStringCol("entityType")
-    val entityId = getStringCol("entityId")
-    val targetEntityType = getOptStringCol("targetEntityType")
-    val targetEntityId = getOptStringCol("targetEntityId")
-    val properties: DataMap = getOptStringCol("properties")
-      .map(s => DataMap(read[JObject](s))).getOrElse(DataMap())
-    val prId = getOptStringCol("prId")
-    val eventTimeZone = getOptStringCol("eventTimeZone")
-      .map(DateTimeZone.forID(_))
-      .getOrElse(EventValidation.defaultTimeZone)
-    val creationTimeZone = getOptStringCol("creationTimeZone")
-      .map(DateTimeZone.forID(_))
-      .getOrElse(EventValidation.defaultTimeZone)
-
-    val creationTime: DateTime = new DateTime(
-      getTimestamp("event"), creationTimeZone
-    )
-
-    Event(
-      eventId = Some(RowKey(result.getRow()).toString),
-      event = event,
-      entityType = entityType,
-      entityId = entityId,
-      targetEntityType = targetEntityType,
-      targetEntityId = targetEntityId,
-      properties = properties,
-      eventTime = new DateTime(rowKey.millis, eventTimeZone),
-      tags = Seq(),
-      prId = prId,
-      creationTime = creationTime
-    )
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade.scala b/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade.scala
deleted file mode 100644
index 7ef5305..0000000
--- a/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.hbase.upgrade
-
-import io.prediction.annotation.Experimental
-
-import io.prediction.data.storage.Storage
-import io.prediction.data.storage.hbase.HBLEvents
-import io.prediction.data.storage.hbase.HBEventsUtil
-
-import scala.collection.JavaConversions._
-
-/** :: Experimental :: */
-@Experimental
-object Upgrade {
-
-  def main(args: Array[String]) {
-    val fromAppId = args(0).toInt
-    val toAppId = args(1).toInt
-    val batchSize = args.lift(2).map(_.toInt).getOrElse(100)
-    val fromNamespace = args.lift(3).getOrElse("predictionio_eventdata")
-
-    upgrade(fromAppId, toAppId, batchSize, fromNamespace)
-  }
-
-  /* For upgrade from 0.8.0 or 0.8.1 to 0.8.2 only */
-  def upgrade(
-    fromAppId: Int,
-    toAppId: Int,
-    batchSize: Int,
-    fromNamespace: String) {
-
-    val events = Storage.getLEvents().asInstanceOf[HBLEvents]
-
-    // Assume already run "pio app new <newapp>" (new app already created)
-    // TODO: check if new table empty and warn user if not
-    val newTable = events.getTable(toAppId)
-
-    val newTableName = newTable.getName().getNameAsString()
-    println(s"Copying data from ${fromNamespace}:events for app ID ${fromAppId}"
-      + s" to new HBase table ${newTableName}...")
-
-    HB_0_8_0.getByAppId(
-      events.client.connection,
-      fromNamespace,
-      fromAppId).grouped(batchSize).foreach { eventGroup =>
-        val puts = eventGroup.map{ e =>
-          val (put, rowkey) = HBEventsUtil.eventToPut(e, toAppId)
-          put
-        }
-        newTable.put(puts.toList)
-      }
-
-    newTable.flushCommits()
-    newTable.close()
-    println("Done.")
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade_0_8_3.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade_0_8_3.scala b/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade_0_8_3.scala
deleted file mode 100644
index 8b80b83..0000000
--- a/data/src/main/scala/io/prediction/data/storage/hbase/upgrade/Upgrade_0_8_3.scala
+++ /dev/null
@@ -1,221 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.hbase.upgrade
-
-import io.prediction.annotation.Experimental
-
-import grizzled.slf4j.Logger
-import io.prediction.data.storage.Storage
-import io.prediction.data.storage.DataMap
-import io.prediction.data.storage.hbase.HBLEvents
-import io.prediction.data.storage.hbase.HBEventsUtil
-
-import scala.collection.JavaConversions._
-
-import scala.concurrent._
-import ExecutionContext.Implicits.global
-import io.prediction.data.storage.LEvents
-import scala.concurrent.Await
-import scala.concurrent.duration.Duration
-import java.lang.Thread
-
-object CheckDistribution {
-  def entityType(eventClient: LEvents, appId: Int)
-  : Map[(String, Option[String]), Int] = {
-    eventClient
-    .find(appId = appId)
-    .foldLeft(Map[(String, Option[String]), Int]().withDefaultValue(0)) {
-      case (m, e) => {
-        val k = (e.entityType, e.targetEntityType)
-        m.updated(k, m(k) + 1)
-      }
-    }
-  }
-
-  def runMain(appId: Int) {
-    val eventClient = Storage.getLEvents().asInstanceOf[HBLEvents]
-
-    entityType(eventClient, appId)
-    .toSeq
-    .sortBy(-_._2)
-    .foreach { println }
-
-  }
-
-  def main(args: Array[String]) {
-    runMain(args(0).toInt)
-  }
-
-}
-
-/** :: Experimental :: */
-@Experimental
-object Upgrade_0_8_3 {
-  val NameMap = Map(
-    "pio_user" -> "user",
-    "pio_item" -> "item")
-  val RevNameMap = NameMap.toSeq.map(_.swap).toMap
-
-  val logger = Logger[this.type]
-
-  def main(args: Array[String]) {
-    val fromAppId = args(0).toInt
-    val toAppId = args(1).toInt
-
-    runMain(fromAppId, toAppId)
-  }
-
-  def runMain(fromAppId: Int, toAppId: Int): Unit = {
-    upgrade(fromAppId, toAppId)
-  }
-
-
-  val obsEntityTypes = Set("pio_user", "pio_item")
-  val obsProperties = Set(
-    "pio_itypes", "pio_starttime", "pio_endtime",
-    "pio_inactive", "pio_price", "pio_rating")
-
-  def hasPIOPrefix(eventClient: LEvents, appId: Int): Boolean = {
-    eventClient.find(appId = appId).filter( e =>
-      (obsEntityTypes.contains(e.entityType) ||
-       e.targetEntityType.map(obsEntityTypes.contains(_)).getOrElse(false) ||
-       (!e.properties.keySet.forall(!obsProperties.contains(_)))
-      )
-    ).hasNext
-  }
-
-  def isEmpty(eventClient: LEvents, appId: Int): Boolean =
-    !eventClient.find(appId = appId).hasNext
-
-
-  def upgradeCopy(eventClient: LEvents, fromAppId: Int, toAppId: Int) {
-    val fromDist = CheckDistribution.entityType(eventClient, fromAppId)
-
-    logger.info("FromAppId Distribution")
-    fromDist.toSeq.sortBy(-_._2).foreach { e => logger.info(e) }
-
-    val events = eventClient
-    .find(appId = fromAppId)
-    .zipWithIndex
-    .foreach { case (fromEvent, index) => {
-      if (index % 50000 == 0) {
-        // logger.info(s"Progress: $fromEvent $index")
-        logger.info(s"Progress: $index")
-      }
-
-
-      val fromEntityType = fromEvent.entityType
-      val toEntityType = NameMap.getOrElse(fromEntityType, fromEntityType)
-
-      val fromTargetEntityType = fromEvent.targetEntityType
-      val toTargetEntityType = fromTargetEntityType
-        .map { et => NameMap.getOrElse(et, et) }
-
-      val toProperties = DataMap(fromEvent.properties.fields.map {
-        case (k, v) =>
-          val newK = if (obsProperties.contains(k)) {
-            val nK = k.stripPrefix("pio_")
-            logger.info(s"property ${k} will be renamed to ${nK}")
-            nK
-          } else k
-          (newK, v)
-      })
-
-      val toEvent = fromEvent.copy(
-        entityType = toEntityType,
-        targetEntityType = toTargetEntityType,
-        properties = toProperties)
-
-      eventClient.insert(toEvent, toAppId)
-    }}
-
-
-    val toDist = CheckDistribution.entityType(eventClient, toAppId)
-
-    logger.info("Recap fromAppId Distribution")
-    fromDist.toSeq.sortBy(-_._2).foreach { e => logger.info(e) }
-
-    logger.info("ToAppId Distribution")
-    toDist.toSeq.sortBy(-_._2).foreach { e => logger.info(e) }
-
-    val fromGood = fromDist
-      .toSeq
-      .forall { case (k, c) => {
-        val (et, tet) = k
-        val net = NameMap.getOrElse(et, et)
-        val ntet = tet.map(tet => NameMap.getOrElse(tet, tet))
-        val nk = (net, ntet)
-        val nc = toDist.getOrElse(nk, -1)
-        val checkMatch = (c == nc)
-        if (!checkMatch) {
-          logger.info(s"${k} doesn't match: old has ${c}. new has ${nc}.")
-        }
-        checkMatch
-      }}
-
-    val toGood = toDist
-      .toSeq
-      .forall { case (k, c) => {
-        val (et, tet) = k
-        val oet = RevNameMap.getOrElse(et, et)
-        val otet = tet.map(tet => RevNameMap.getOrElse(tet, tet))
-        val ok = (oet, otet)
-        val oc = fromDist.getOrElse(ok, -1)
-        val checkMatch = (c == oc)
-        if (!checkMatch) {
-          logger.info(s"${k} doesn't match: new has ${c}. old has ${oc}.")
-        }
-        checkMatch
-      }}
-
-    if (!fromGood || !toGood) {
-      logger.error("Doesn't match!! There is an import error.")
-    } else {
-      logger.info("Count matches. Looks like we are good to go.")
-    }
-  }
-
-  /* For upgrade from 0.8.2 to 0.8.3 only */
-  def upgrade(fromAppId: Int, toAppId: Int) {
-
-    val eventClient = Storage.getLEvents().asInstanceOf[HBLEvents]
-
-    require(fromAppId != toAppId,
-      s"FromAppId: $fromAppId must be different from toAppId: $toAppId")
-
-    if (hasPIOPrefix(eventClient, fromAppId)) {
-      require(
-        isEmpty(eventClient, toAppId),
-        s"Target appId: $toAppId is not empty. Please run " +
-        "`pio app data-delete <app_name>` to clean the data before upgrading")
-
-      logger.info(s"$fromAppId isEmpty: " + isEmpty(eventClient, fromAppId))
-
-      upgradeCopy(eventClient, fromAppId, toAppId)
-
-    } else {
-      logger.info(s"From appId: ${fromAppId} doesn't contain"
-        + s" obsolete entityTypes ${obsEntityTypes} or"
-        + s" obsolete properties ${obsProperties}."
-        + " No need data migration."
-        + s" You can continue to use appId ${fromAppId}.")
-    }
-
-    logger.info("Done.")
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/hdfs/HDFSModels.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/hdfs/HDFSModels.scala b/data/src/main/scala/io/prediction/data/storage/hdfs/HDFSModels.scala
deleted file mode 100644
index daef6bc..0000000
--- a/data/src/main/scala/io/prediction/data/storage/hdfs/HDFSModels.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.hdfs
-
-import java.io.IOException
-
-import com.google.common.io.ByteStreams
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.Model
-import io.prediction.data.storage.Models
-import io.prediction.data.storage.StorageClientConfig
-import org.apache.hadoop.fs.FileSystem
-import org.apache.hadoop.fs.Path
-
-class HDFSModels(fs: FileSystem, config: StorageClientConfig, prefix: String)
-  extends Models with Logging {
-
-  def insert(i: Model): Unit = {
-    try {
-      val fsdos = fs.create(new Path(s"$prefix${i.id}"))
-      fsdos.write(i.models)
-      fsdos.close
-    } catch {
-      case e: IOException => error(e.getMessage)
-    }
-  }
-
-  def get(id: String): Option[Model] = {
-    try {
-      val p = new Path(s"$prefix$id")
-      Some(Model(
-        id = id,
-        models = ByteStreams.toByteArray(fs.open(p))))
-    } catch {
-      case e: Throwable =>
-        error(e.getMessage)
-        None
-    }
-  }
-
-  def delete(id: String): Unit = {
-    val p = new Path(s"$prefix$id")
-    if (!fs.delete(p, false)) {
-      error(s"Unable to delete ${fs.makeQualified(p).toString}!")
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/hdfs/StorageClient.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/hdfs/StorageClient.scala b/data/src/main/scala/io/prediction/data/storage/hdfs/StorageClient.scala
deleted file mode 100644
index 9a3e58d..0000000
--- a/data/src/main/scala/io/prediction/data/storage/hdfs/StorageClient.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.hdfs
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.BaseStorageClient
-import io.prediction.data.storage.StorageClientConfig
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.FileSystem
-import org.apache.hadoop.fs.Path
-
-class StorageClient(val config: StorageClientConfig) extends BaseStorageClient
-    with Logging {
-  override val prefix = "HDFS"
-  val conf = new Configuration
-  val fs = FileSystem.get(conf)
-  fs.setWorkingDirectory(
-    new Path(config.properties.getOrElse("PATH", config.properties("HOSTS"))))
-  val client = fs
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/hdfs/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/hdfs/package.scala b/data/src/main/scala/io/prediction/data/storage/hdfs/package.scala
deleted file mode 100644
index 01e60f4..0000000
--- a/data/src/main/scala/io/prediction/data/storage/hdfs/package.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-/** HDFS implementation of storage traits, supporting model data only
-  *
-  * @group Implementation
-  */
-package object hdfs {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCAccessKeys.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCAccessKeys.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCAccessKeys.scala
deleted file mode 100644
index 0b1e0cb..0000000
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCAccessKeys.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.jdbc
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.AccessKey
-import io.prediction.data.storage.AccessKeys
-import io.prediction.data.storage.StorageClientConfig
-import scalikejdbc._
-
-import scala.util.Random
-
-/** JDBC implementation of [[AccessKeys]] */
-class JDBCAccessKeys(client: String, config: StorageClientConfig, prefix: String)
-  extends AccessKeys with Logging {
-  /** Database table name for this data access object */
-  val tableName = JDBCUtils.prefixTableName(prefix, "accesskeys")
-  DB autoCommit { implicit session =>
-    sql"""
-    create table if not exists $tableName (
-      accesskey varchar(64) not null primary key,
-      appid integer not null,
-      events text)""".execute().apply()
-  }
-
-  def insert(accessKey: AccessKey): Option[String] = DB localTx { implicit s =>
-    val key = if (accessKey.key.isEmpty) generateKey else accessKey.key
-    val events = if (accessKey.events.isEmpty) None else Some(accessKey.events.mkString(","))
-    sql"""
-    insert into $tableName values(
-      $key,
-      ${accessKey.appid},
-      $events)""".update().apply()
-    Some(key)
-  }
-
-  def get(key: String): Option[AccessKey] = DB readOnly { implicit session =>
-    sql"SELECT accesskey, appid, events FROM $tableName WHERE accesskey = $key".
-      map(resultToAccessKey).single().apply()
-  }
-
-  def getAll(): Seq[AccessKey] = DB readOnly { implicit session =>
-    sql"SELECT accesskey, appid, events FROM $tableName".map(resultToAccessKey).list().apply()
-  }
-
-  def getByAppid(appid: Int): Seq[AccessKey] = DB readOnly { implicit session =>
-    sql"SELECT accesskey, appid, events FROM $tableName WHERE appid = $appid".
-      map(resultToAccessKey).list().apply()
-  }
-
-  def update(accessKey: AccessKey): Unit = DB localTx { implicit session =>
-    val events = if (accessKey.events.isEmpty) None else Some(accessKey.events.mkString(","))
-    sql"""
-    UPDATE $tableName SET
-      appid = ${accessKey.appid},
-      events = $events
-    WHERE accesskey = ${accessKey.key}""".update().apply()
-  }
-
-  def delete(key: String): Unit = DB localTx { implicit session =>
-    sql"DELETE FROM $tableName WHERE accesskey = $key".update().apply()
-  }
-
-  /** Convert JDBC results to [[AccessKey]] */
-  def resultToAccessKey(rs: WrappedResultSet): AccessKey = {
-    AccessKey(
-      key = rs.string("accesskey"),
-      appid = rs.int("appid"),
-      events = rs.stringOpt("events").map(_.split(",").toSeq).getOrElse(Nil))
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCApps.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCApps.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCApps.scala
deleted file mode 100644
index 498dbc4..0000000
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCApps.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.jdbc
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.App
-import io.prediction.data.storage.Apps
-import io.prediction.data.storage.StorageClientConfig
-import scalikejdbc._
-
-/** JDBC implementation of [[Apps]] */
-class JDBCApps(client: String, config: StorageClientConfig, prefix: String)
-  extends Apps with Logging {
-  /** Database table name for this data access object */
-  val tableName = JDBCUtils.prefixTableName(prefix, "apps")
-  DB autoCommit { implicit session =>
-    sql"""
-    create table if not exists $tableName (
-      id serial not null primary key,
-      name text not null,
-      description text)""".execute.apply()
-  }
-
-  def insert(app: App): Option[Int] = DB localTx { implicit session =>
-    val q = if (app.id == 0) {
-      sql"""
-      insert into $tableName (name, description) values(${app.name}, ${app.description})
-      """
-    } else {
-      sql"""
-      insert into $tableName values(${app.id}, ${app.name}, ${app.description})
-      """
-    }
-    Some(q.updateAndReturnGeneratedKey().apply().toInt)
-  }
-
-  def get(id: Int): Option[App] = DB readOnly { implicit session =>
-    sql"SELECT id, name, description FROM $tableName WHERE id = ${id}".map(rs =>
-      App(
-        id = rs.int("id"),
-        name = rs.string("name"),
-        description = rs.stringOpt("description"))
-    ).single().apply()
-  }
-
-  def getByName(name: String): Option[App] = DB readOnly { implicit session =>
-    sql"SELECT id, name, description FROM $tableName WHERE name = ${name}".map(rs =>
-      App(
-        id = rs.int("id"),
-        name = rs.string("name"),
-        description = rs.stringOpt("description"))
-    ).single().apply()
-  }
-
-  def getAll(): Seq[App] = DB readOnly { implicit session =>
-    sql"SELECT id, name, description FROM $tableName".map(rs =>
-      App(
-        id = rs.int("id"),
-        name = rs.string("name"),
-        description = rs.stringOpt("description"))
-    ).list().apply()
-  }
-
-  def update(app: App): Unit = DB localTx { implicit session =>
-    sql"""
-    update $tableName set name = ${app.name}, description = ${app.description}
-    where id = ${app.id}""".update().apply()
-  }
-
-  def delete(id: Int): Unit = DB localTx { implicit session =>
-    sql"DELETE FROM $tableName WHERE id = $id".update().apply()
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCChannels.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCChannels.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCChannels.scala
deleted file mode 100644
index a5ed153..0000000
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCChannels.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.jdbc
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.Channel
-import io.prediction.data.storage.Channels
-import io.prediction.data.storage.StorageClientConfig
-import scalikejdbc._
-
-/** JDBC implementation of [[Channels]] */
-class JDBCChannels(client: String, config: StorageClientConfig, prefix: String)
-  extends Channels with Logging {
-  /** Database table name for this data access object */
-  val tableName = JDBCUtils.prefixTableName(prefix, "channels")
-  DB autoCommit { implicit session =>
-    sql"""
-    create table if not exists $tableName (
-      id serial not null primary key,
-      name text not null,
-      appid integer not null)""".execute().apply()
-  }
-
-  def insert(channel: Channel): Option[Int] = DB localTx { implicit session =>
-    val q = if (channel.id == 0) {
-      sql"INSERT INTO $tableName (name, appid) VALUES(${channel.name}, ${channel.appid})"
-    } else {
-      sql"INSERT INTO $tableName VALUES(${channel.id}, ${channel.name}, ${channel.appid})"
-    }
-    Some(q.updateAndReturnGeneratedKey().apply().toInt)
-  }
-
-  def get(id: Int): Option[Channel] = DB localTx { implicit session =>
-    sql"SELECT id, name, appid FROM $tableName WHERE id = $id".
-      map(resultToChannel).single().apply()
-  }
-
-  def getByAppid(appid: Int): Seq[Channel] = DB localTx { implicit session =>
-    sql"SELECT id, name, appid FROM $tableName WHERE appid = $appid".
-      map(resultToChannel).list().apply()
-  }
-
-  def delete(id: Int): Unit = DB localTx { implicit session =>
-    sql"DELETE FROM $tableName WHERE id = $id".update().apply()
-  }
-
-  def resultToChannel(rs: WrappedResultSet): Channel = {
-    Channel(
-      id = rs.int("id"),
-      name = rs.string("name"),
-      appid = rs.int("appid"))
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEngineInstances.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEngineInstances.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEngineInstances.scala
deleted file mode 100644
index 3bd3922..0000000
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEngineInstances.scala
+++ /dev/null
@@ -1,194 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.jdbc
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.EngineInstance
-import io.prediction.data.storage.EngineInstances
-import io.prediction.data.storage.StorageClientConfig
-import scalikejdbc._
-
-/** JDBC implementation of [[EngineInstances]] */
-class JDBCEngineInstances(client: String, config: StorageClientConfig, prefix: String)
-  extends EngineInstances with Logging {
-  /** Database table name for this data access object */
-  val tableName = JDBCUtils.prefixTableName(prefix, "engineinstances")
-  DB autoCommit { implicit session =>
-    sql"""
-    create table if not exists $tableName (
-      id varchar(100) not null primary key,
-      status text not null,
-      startTime timestamp DEFAULT CURRENT_TIMESTAMP,
-      endTime timestamp DEFAULT CURRENT_TIMESTAMP,
-      engineId text not null,
-      engineVersion text not null,
-      engineVariant text not null,
-      engineFactory text not null,
-      batch text not null,
-      env text not null,
-      sparkConf text not null,
-      datasourceParams text not null,
-      preparatorParams text not null,
-      algorithmsParams text not null,
-      servingParams text not null)""".execute().apply()
-  }
-
-  def insert(i: EngineInstance): String = DB localTx { implicit session =>
-    val id = java.util.UUID.randomUUID().toString
-    sql"""
-    INSERT INTO $tableName VALUES(
-      $id,
-      ${i.status},
-      ${i.startTime},
-      ${i.endTime},
-      ${i.engineId},
-      ${i.engineVersion},
-      ${i.engineVariant},
-      ${i.engineFactory},
-      ${i.batch},
-      ${JDBCUtils.mapToString(i.env)},
-      ${JDBCUtils.mapToString(i.sparkConf)},
-      ${i.dataSourceParams},
-      ${i.preparatorParams},
-      ${i.algorithmsParams},
-      ${i.servingParams})""".update().apply()
-    id
-  }
-
-  def get(id: String): Option[EngineInstance] = DB localTx { implicit session =>
-    sql"""
-    SELECT
-      id,
-      status,
-      startTime,
-      endTime,
-      engineId,
-      engineVersion,
-      engineVariant,
-      engineFactory,
-      batch,
-      env,
-      sparkConf,
-      datasourceParams,
-      preparatorParams,
-      algorithmsParams,
-      servingParams
-    FROM $tableName WHERE id = $id""".map(resultToEngineInstance).
-      single().apply()
-  }
-
-  def getAll(): Seq[EngineInstance] = DB localTx { implicit session =>
-    sql"""
-    SELECT
-      id,
-      status,
-      startTime,
-      endTime,
-      engineId,
-      engineVersion,
-      engineVariant,
-      engineFactory,
-      batch,
-      env,
-      sparkConf,
-      datasourceParams,
-      preparatorParams,
-      algorithmsParams,
-      servingParams
-    FROM $tableName""".map(resultToEngineInstance).list().apply()
-  }
-
-  def getLatestCompleted(
-    engineId: String,
-    engineVersion: String,
-    engineVariant: String): Option[EngineInstance] =
-    getCompleted(engineId, engineVersion, engineVariant).headOption
-
-  def getCompleted(
-    engineId: String,
-    engineVersion: String,
-    engineVariant: String): Seq[EngineInstance] = DB localTx { implicit s =>
-    sql"""
-    SELECT
-      id,
-      status,
-      startTime,
-      endTime,
-      engineId,
-      engineVersion,
-      engineVariant,
-      engineFactory,
-      batch,
-      env,
-      sparkConf,
-      datasourceParams,
-      preparatorParams,
-      algorithmsParams,
-      servingParams
-    FROM $tableName
-    WHERE
-      status = 'COMPLETED' AND
-      engineId = $engineId AND
-      engineVersion = $engineVersion AND
-      engineVariant = $engineVariant
-    ORDER BY startTime DESC""".
-      map(resultToEngineInstance).list().apply()
-  }
-
-  def update(i: EngineInstance): Unit = DB localTx { implicit session =>
-    sql"""
-    update $tableName set
-      status = ${i.status},
-      startTime = ${i.startTime},
-      endTime = ${i.endTime},
-      engineId = ${i.engineId},
-      engineVersion = ${i.engineVersion},
-      engineVariant = ${i.engineVariant},
-      engineFactory = ${i.engineFactory},
-      batch = ${i.batch},
-      env = ${JDBCUtils.mapToString(i.env)},
-      sparkConf = ${JDBCUtils.mapToString(i.sparkConf)},
-      datasourceParams = ${i.dataSourceParams},
-      preparatorParams = ${i.preparatorParams},
-      algorithmsParams = ${i.algorithmsParams},
-      servingParams = ${i.servingParams}
-    where id = ${i.id}""".update().apply()
-  }
-
-  def delete(id: String): Unit = DB localTx { implicit session =>
-    sql"DELETE FROM $tableName WHERE id = $id".update().apply()
-  }
-
-  /** Convert JDBC results to [[EngineInstance]] */
-  def resultToEngineInstance(rs: WrappedResultSet): EngineInstance = {
-    EngineInstance(
-      id = rs.string("id"),
-      status = rs.string("status"),
-      startTime = rs.jodaDateTime("startTime"),
-      endTime = rs.jodaDateTime("endTime"),
-      engineId = rs.string("engineId"),
-      engineVersion = rs.string("engineVersion"),
-      engineVariant = rs.string("engineVariant"),
-      engineFactory = rs.string("engineFactory"),
-      batch = rs.string("batch"),
-      env = JDBCUtils.stringToMap(rs.string("env")),
-      sparkConf = JDBCUtils.stringToMap(rs.string("sparkConf")),
-      dataSourceParams = rs.string("datasourceParams"),
-      preparatorParams = rs.string("preparatorParams"),
-      algorithmsParams = rs.string("algorithmsParams"),
-      servingParams = rs.string("servingParams"))
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEngineManifests.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEngineManifests.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEngineManifests.scala
deleted file mode 100644
index a9f467b..0000000
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEngineManifests.scala
+++ /dev/null
@@ -1,111 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.jdbc
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.EngineManifest
-import io.prediction.data.storage.EngineManifests
-import io.prediction.data.storage.StorageClientConfig
-import scalikejdbc._
-
-/** JDBC implementation of [[EngineManifests]] */
-class JDBCEngineManifests(client: String, config: StorageClientConfig, prefix: String)
-  extends EngineManifests with Logging {
-  /** Database table name for this data access object */
-  val tableName = JDBCUtils.prefixTableName(prefix, "enginemanifests")
-  DB autoCommit { implicit session =>
-    sql"""
-    create table if not exists $tableName (
-      id varchar(100) not null primary key,
-      version text not null,
-      engineName text not null,
-      description text,
-      files text not null,
-      engineFactory text not null)""".execute().apply()
-  }
-
-  def insert(m: EngineManifest): Unit = DB localTx { implicit session =>
-    sql"""
-    INSERT INTO $tableName VALUES(
-      ${m.id},
-      ${m.version},
-      ${m.name},
-      ${m.description},
-      ${m.files.mkString(",")},
-      ${m.engineFactory})""".update().apply()
-  }
-
-  def get(id: String, version: String): Option[EngineManifest] = DB localTx { implicit session =>
-    sql"""
-    SELECT
-      id,
-      version,
-      engineName,
-      description,
-      files,
-      engineFactory
-    FROM $tableName WHERE id = $id AND version = $version""".
-      map(resultToEngineManifest).single().apply()
-  }
-
-  def getAll(): Seq[EngineManifest] = DB localTx { implicit session =>
-    sql"""
-    SELECT
-      id,
-      version,
-      engineName,
-      description,
-      files,
-      engineFactory
-    FROM $tableName""".map(resultToEngineManifest).list().apply()
-  }
-
-  def update(m: EngineManifest, upsert: Boolean = false): Unit = {
-    var r = 0
-    DB localTx { implicit session =>
-      r = sql"""
-      update $tableName set
-        engineName = ${m.name},
-        description = ${m.description},
-        files = ${m.files.mkString(",")},
-        engineFactory = ${m.engineFactory}
-      where id = ${m.id} and version = ${m.version}""".update().apply()
-    }
-    if (r == 0) {
-      if (upsert) {
-        insert(m)
-      } else {
-        error("Cannot find a record to update, and upsert is not enabled.")
-      }
-    }
-  }
-
-  def delete(id: String, version: String): Unit = DB localTx { implicit session =>
-    sql"DELETE FROM $tableName WHERE id = $id AND version = $version".
-      update().apply()
-  }
-
-  /** Convert JDBC results to [[EngineManifest]] */
-  def resultToEngineManifest(rs: WrappedResultSet): EngineManifest = {
-    EngineManifest(
-      id = rs.string("id"),
-      version = rs.string("version"),
-      name = rs.string("engineName"),
-      description = rs.stringOpt("description"),
-      files = rs.string("files").split(","),
-      engineFactory = rs.string("engineFactory"))
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEvaluationInstances.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEvaluationInstances.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEvaluationInstances.scala
deleted file mode 100644
index 78c2c93..0000000
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCEvaluationInstances.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.jdbc
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.EvaluationInstance
-import io.prediction.data.storage.EvaluationInstances
-import io.prediction.data.storage.StorageClientConfig
-import scalikejdbc._
-
-/** JDBC implementations of [[EvaluationInstances]] */
-class JDBCEvaluationInstances(client: String, config: StorageClientConfig, prefix: String)
-  extends EvaluationInstances with Logging {
-  /** Database table name for this data access object */
-  val tableName = JDBCUtils.prefixTableName(prefix, "evaluationinstances")
-  DB autoCommit { implicit session =>
-    sql"""
-    create table if not exists $tableName (
-      id varchar(100) not null primary key,
-      status text not null,
-      startTime timestamp DEFAULT CURRENT_TIMESTAMP,
-      endTime timestamp DEFAULT CURRENT_TIMESTAMP,
-      evaluationClass text not null,
-      engineParamsGeneratorClass text not null,
-      batch text not null,
-      env text not null,
-      sparkConf text not null,
-      evaluatorResults text not null,
-      evaluatorResultsHTML text not null,
-      evaluatorResultsJSON text)""".execute().apply()
-  }
-
-  def insert(i: EvaluationInstance): String = DB localTx { implicit session =>
-    val id = java.util.UUID.randomUUID().toString
-    sql"""
-    INSERT INTO $tableName VALUES(
-      $id,
-      ${i.status},
-      ${i.startTime},
-      ${i.endTime},
-      ${i.evaluationClass},
-      ${i.engineParamsGeneratorClass},
-      ${i.batch},
-      ${JDBCUtils.mapToString(i.env)},
-      ${JDBCUtils.mapToString(i.sparkConf)},
-      ${i.evaluatorResults},
-      ${i.evaluatorResultsHTML},
-      ${i.evaluatorResultsJSON})""".update().apply()
-    id
-  }
-
-  def get(id: String): Option[EvaluationInstance] = DB localTx { implicit session =>
-    sql"""
-    SELECT
-      id,
-      status,
-      startTime,
-      endTime,
-      evaluationClass,
-      engineParamsGeneratorClass,
-      batch,
-      env,
-      sparkConf,
-      evaluatorResults,
-      evaluatorResultsHTML,
-      evaluatorResultsJSON
-    FROM $tableName WHERE id = $id
-    """.map(resultToEvaluationInstance).single().apply()
-  }
-
-  def getAll(): Seq[EvaluationInstance] = DB localTx { implicit session =>
-    sql"""
-    SELECT
-      id,
-      status,
-      startTime,
-      endTime,
-      evaluationClass,
-      engineParamsGeneratorClass,
-      batch,
-      env,
-      sparkConf,
-      evaluatorResults,
-      evaluatorResultsHTML,
-      evaluatorResultsJSON
-    FROM $tableName
-    """.map(resultToEvaluationInstance).list().apply()
-  }
-
-  def getCompleted(): Seq[EvaluationInstance] = DB localTx { implicit s =>
-    sql"""
-    SELECT
-      id,
-      status,
-      startTime,
-      endTime,
-      evaluationClass,
-      engineParamsGeneratorClass,
-      batch,
-      env,
-      sparkConf,
-      evaluatorResults,
-      evaluatorResultsHTML,
-      evaluatorResultsJSON
-    FROM $tableName
-    WHERE
-      status = 'EVALCOMPLETED'
-    ORDER BY starttime DESC
-    """.map(resultToEvaluationInstance).list().apply()
-  }
-
-  def update(i: EvaluationInstance): Unit = DB localTx { implicit session =>
-    sql"""
-    update $tableName set
-      status = ${i.status},
-      startTime = ${i.startTime},
-      endTime = ${i.endTime},
-      evaluationClass = ${i.evaluationClass},
-      engineParamsGeneratorClass = ${i.engineParamsGeneratorClass},
-      batch = ${i.batch},
-      env = ${JDBCUtils.mapToString(i.env)},
-      sparkConf = ${JDBCUtils.mapToString(i.sparkConf)},
-      evaluatorResults = ${i.evaluatorResults},
-      evaluatorResultsHTML = ${i.evaluatorResultsHTML},
-      evaluatorResultsJSON = ${i.evaluatorResultsJSON}
-    where id = ${i.id}""".update().apply()
-  }
-
-  def delete(id: String): Unit = DB localTx { implicit session =>
-    sql"DELETE FROM $tableName WHERE id = $id".update().apply()
-  }
-
-  /** Convert JDBC results to [[EvaluationInstance]] */
-  def resultToEvaluationInstance(rs: WrappedResultSet): EvaluationInstance = {
-    EvaluationInstance(
-      id = rs.string("id"),
-      status = rs.string("status"),
-      startTime = rs.jodaDateTime("startTime"),
-      endTime = rs.jodaDateTime("endTime"),
-      evaluationClass = rs.string("evaluationClass"),
-      engineParamsGeneratorClass = rs.string("engineParamsGeneratorClass"),
-      batch = rs.string("batch"),
-      env = JDBCUtils.stringToMap(rs.string("env")),
-      sparkConf = JDBCUtils.stringToMap(rs.string("sparkConf")),
-      evaluatorResults = rs.string("evaluatorResults"),
-      evaluatorResultsHTML = rs.string("evaluatorResultsHTML"),
-      evaluatorResultsJSON = rs.string("evaluatorResultsJSON"))
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCLEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCLEvents.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCLEvents.scala
deleted file mode 100644
index 48a624f..0000000
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCLEvents.scala
+++ /dev/null
@@ -1,241 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.jdbc
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.DataMap
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.LEvents
-import io.prediction.data.storage.StorageClientConfig
-import org.joda.time.DateTime
-import org.joda.time.DateTimeZone
-import org.json4s.JObject
-import org.json4s.native.Serialization.read
-import org.json4s.native.Serialization.write
-import scalikejdbc._
-
-import scala.concurrent.ExecutionContext
-import scala.concurrent.Future
-
-/** JDBC implementation of [[LEvents]] */
-class JDBCLEvents(
-    client: String,
-    config: StorageClientConfig,
-    namespace: String) extends LEvents with Logging {
-  implicit private val formats = org.json4s.DefaultFormats
-
-  def init(appId: Int, channelId: Option[Int] = None): Boolean = {
-
-    // To use index, it must be varchar less than 255 characters on a VARCHAR column
-    val useIndex = config.properties.contains("INDEX") &&
-      config.properties("INDEX").equalsIgnoreCase("enabled")
-
-    val tableName = JDBCUtils.eventTableName(namespace, appId, channelId)
-    val entityIdIndexName = s"idx_${tableName}_ei"
-    val entityTypeIndexName = s"idx_${tableName}_et"
-    DB autoCommit { implicit session =>
-      if (useIndex) {
-        SQL(s"""
-      create table if not exists $tableName (
-        id varchar(32) not null primary key,
-        event varchar(255) not null,
-        entityType varchar(255) not null,
-        entityId varchar(255) not null,
-        targetEntityType text,
-        targetEntityId text,
-        properties text,
-        eventTime timestamp DEFAULT CURRENT_TIMESTAMP,
-        eventTimeZone varchar(50) not null,
-        tags text,
-        prId text,
-        creationTime timestamp DEFAULT CURRENT_TIMESTAMP,
-        creationTimeZone varchar(50) not null)""").execute().apply()
-
-        // create index
-        SQL(s"create index $entityIdIndexName on $tableName (entityId)").execute().apply()
-        SQL(s"create index $entityTypeIndexName on $tableName (entityType)").execute().apply()
-      } else {
-        SQL(s"""
-      create table if not exists $tableName (
-        id varchar(32) not null primary key,
-        event text not null,
-        entityType text not null,
-        entityId text not null,
-        targetEntityType text,
-        targetEntityId text,
-        properties text,
-        eventTime timestamp DEFAULT CURRENT_TIMESTAMP,
-        eventTimeZone varchar(50) not null,
-        tags text,
-        prId text,
-        creationTime timestamp DEFAULT CURRENT_TIMESTAMP,
-        creationTimeZone varchar(50) not null)""").execute().apply()
-      }
-      true
-    }
-  }
-
-  def remove(appId: Int, channelId: Option[Int] = None): Boolean =
-    DB autoCommit { implicit session =>
-      SQL(s"""
-      drop table ${JDBCUtils.eventTableName(namespace, appId, channelId)}
-      """).execute().apply()
-      true
-    }
-
-  def close(): Unit = ConnectionPool.closeAll()
-
-  def futureInsert(event: Event, appId: Int, channelId: Option[Int])(
-    implicit ec: ExecutionContext): Future[String] = Future {
-    DB localTx { implicit session =>
-      val id = event.eventId.getOrElse(JDBCUtils.generateId)
-      val tableName = sqls.createUnsafely(JDBCUtils.eventTableName(namespace, appId, channelId))
-      sql"""
-      insert into $tableName values(
-        $id,
-        ${event.event},
-        ${event.entityType},
-        ${event.entityId},
-        ${event.targetEntityType},
-        ${event.targetEntityId},
-        ${write(event.properties.toJObject)},
-        ${event.eventTime},
-        ${event.eventTime.getZone.getID},
-        ${if (event.tags.nonEmpty) Some(event.tags.mkString(",")) else None},
-        ${event.prId},
-        ${event.creationTime},
-        ${event.creationTime.getZone.getID}
-      )
-      """.update().apply()
-      id
-    }
-  }
-
-  def futureGet(eventId: String, appId: Int, channelId: Option[Int])(
-    implicit ec: ExecutionContext): Future[Option[Event]] = Future {
-    DB readOnly { implicit session =>
-      val tableName = sqls.createUnsafely(JDBCUtils.eventTableName(namespace, appId, channelId))
-      sql"""
-      select
-        id,
-        event,
-        entityType,
-        entityId,
-        targetEntityType,
-        targetEntityId,
-        properties,
-        eventTime,
-        eventTimeZone,
-        tags,
-        prId,
-        creationTime,
-        creationTimeZone
-      from $tableName
-      where id = $eventId
-      """.map(resultToEvent).single().apply()
-    }
-  }
-
-  def futureDelete(eventId: String, appId: Int, channelId: Option[Int])(
-    implicit ec: ExecutionContext): Future[Boolean] = Future {
-    DB localTx { implicit session =>
-      val tableName = sqls.createUnsafely(JDBCUtils.eventTableName(namespace, appId, channelId))
-      sql"""
-      delete from $tableName where id = $eventId
-      """.update().apply()
-      true
-    }
-  }
-
-  def futureFind(
-      appId: Int,
-      channelId: Option[Int] = None,
-      startTime: Option[DateTime] = None,
-      untilTime: Option[DateTime] = None,
-      entityType: Option[String] = None,
-      entityId: Option[String] = None,
-      eventNames: Option[Seq[String]] = None,
-      targetEntityType: Option[Option[String]] = None,
-      targetEntityId: Option[Option[String]] = None,
-      limit: Option[Int] = None,
-      reversed: Option[Boolean] = None
-    )(implicit ec: ExecutionContext): Future[Iterator[Event]] = Future {
-    DB readOnly { implicit session =>
-      val tableName = sqls.createUnsafely(JDBCUtils.eventTableName(namespace, appId, channelId))
-      val whereClause = sqls.toAndConditionOpt(
-        startTime.map(x => sqls"eventTime >= $x"),
-        untilTime.map(x => sqls"eventTime < $x"),
-        entityType.map(x => sqls"entityType = $x"),
-        entityId.map(x => sqls"entityId = $x"),
-        eventNames.map(x =>
-          sqls.toOrConditionOpt(x.map(y =>
-            Some(sqls"event = $y")
-          ): _*)
-        ).getOrElse(None),
-        targetEntityType.map(x => x.map(y => sqls"targetEntityType = $y")
-            .getOrElse(sqls"targetEntityType IS NULL")),
-        targetEntityId.map(x => x.map(y => sqls"targetEntityId = $y")
-            .getOrElse(sqls"targetEntityId IS NULL"))
-      ).map(sqls.where(_)).getOrElse(sqls"")
-      val orderByClause = reversed.map(x =>
-        if (x) sqls"eventTime desc" else sqls"eventTime asc"
-      ).getOrElse(sqls"eventTime asc")
-      val limitClause = limit.map(x =>
-        if (x < 0) sqls"" else sqls.limit(x)
-      ).getOrElse(sqls"")
-      val q = sql"""
-      select
-        id,
-        event,
-        entityType,
-        entityId,
-        targetEntityType,
-        targetEntityId,
-        properties,
-        eventTime,
-        eventTimeZone,
-        tags,
-        prId,
-        creationTime,
-        creationTimeZone
-      from $tableName
-      $whereClause
-      order by $orderByClause
-      $limitClause
-      """
-      q.map(resultToEvent).list().apply().toIterator
-    }
-  }
-
-  private[prediction] def resultToEvent(rs: WrappedResultSet): Event = {
-    Event(
-      eventId = rs.stringOpt("id"),
-      event = rs.string("event"),
-      entityType = rs.string("entityType"),
-      entityId = rs.string("entityId"),
-      targetEntityType = rs.stringOpt("targetEntityType"),
-      targetEntityId = rs.stringOpt("targetEntityId"),
-      properties = rs.stringOpt("properties").map(p =>
-        DataMap(read[JObject](p))).getOrElse(DataMap()),
-      eventTime = new DateTime(rs.jodaDateTime("eventTime"),
-        DateTimeZone.forID(rs.string("eventTimeZone"))),
-      tags = rs.stringOpt("tags").map(t => t.split(",").toList).getOrElse(Nil),
-      prId = rs.stringOpt("prId"),
-      creationTime = new DateTime(rs.jodaDateTime("creationTime"),
-        DateTimeZone.forID(rs.string("creationTimeZone")))
-    )
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCModels.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCModels.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCModels.scala
deleted file mode 100644
index 45c9e31..0000000
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCModels.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.jdbc
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.Model
-import io.prediction.data.storage.Models
-import io.prediction.data.storage.StorageClientConfig
-import scalikejdbc._
-
-/** JDBC implementation of [[Models]] */
-class JDBCModels(client: String, config: StorageClientConfig, prefix: String)
-  extends Models with Logging {
-  /** Database table name for this data access object */
-  val tableName = JDBCUtils.prefixTableName(prefix, "models")
-
-  /** Determines binary column type based on JDBC driver type */
-  val binaryColumnType = JDBCUtils.binaryColumnType(client)
-  DB autoCommit { implicit session =>
-    sql"""
-    create table if not exists $tableName (
-      id varchar(100) not null primary key,
-      models $binaryColumnType not null)""".execute().apply()
-  }
-
-  def insert(i: Model): Unit = DB localTx { implicit session =>
-    sql"insert into $tableName values(${i.id}, ${i.models})".update().apply()
-  }
-
-  def get(id: String): Option[Model] = DB readOnly { implicit session =>
-    sql"select id, models from $tableName where id = $id".map { r =>
-      Model(id = r.string("id"), models = r.bytes("models"))
-    }.single().apply()
-  }
-
-  def delete(id: String): Unit = DB localTx { implicit session =>
-    sql"delete from $tableName where id = $id".execute().apply()
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCPEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCPEvents.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCPEvents.scala
deleted file mode 100644
index b9b26c5..0000000
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCPEvents.scala
+++ /dev/null
@@ -1,160 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.jdbc
-
-import java.sql.{DriverManager, ResultSet}
-
-import com.github.nscala_time.time.Imports._
-import io.prediction.data.storage.{DataMap, Event, PEvents, StorageClientConfig}
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.{JdbcRDD, RDD}
-import org.apache.spark.sql.{SQLContext, SaveMode}
-import org.json4s.JObject
-import org.json4s.native.Serialization
-
-/** JDBC implementation of [[PEvents]] */
-class JDBCPEvents(client: String, config: StorageClientConfig, namespace: String) extends PEvents {
-  @transient private implicit lazy val formats = org.json4s.DefaultFormats
-  def find(
-    appId: Int,
-    channelId: Option[Int] = None,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    entityType: Option[String] = None,
-    entityId: Option[String] = None,
-    eventNames: Option[Seq[String]] = None,
-    targetEntityType: Option[Option[String]] = None,
-    targetEntityId: Option[Option[String]] = None)(sc: SparkContext): RDD[Event] = {
-    val lower = startTime.map(_.getMillis).getOrElse(0.toLong)
-    /** Change the default upper bound from +100 to +1 year because MySQL's
-      * FROM_UNIXTIME(t) will return NULL if we use +100 years.
-      */
-    val upper = untilTime.map(_.getMillis).getOrElse((DateTime.now + 1.years).getMillis)
-    val par = scala.math.min(
-      new Duration(upper - lower).getStandardDays,
-      config.properties.getOrElse("PARTITIONS", "4").toLong).toInt
-    val entityTypeClause = entityType.map(x => s"and entityType = '$x'").getOrElse("")
-    val entityIdClause = entityId.map(x => s"and entityId = '$x'").getOrElse("")
-    val eventNamesClause =
-      eventNames.map("and (" + _.map(y => s"event = '$y'").mkString(" or ") + ")").getOrElse("")
-    val targetEntityTypeClause = targetEntityType.map(
-      _.map(x => s"and targetEntityType = '$x'"
-    ).getOrElse("and targetEntityType is null")).getOrElse("")
-    val targetEntityIdClause = targetEntityId.map(
-      _.map(x => s"and targetEntityId = '$x'"
-    ).getOrElse("and targetEntityId is null")).getOrElse("")
-    val q = s"""
-      select
-        id,
-        event,
-        entityType,
-        entityId,
-        targetEntityType,
-        targetEntityId,
-        properties,
-        eventTime,
-        eventTimeZone,
-        tags,
-        prId,
-        creationTime,
-        creationTimeZone
-      from ${JDBCUtils.eventTableName(namespace, appId, channelId)}
-      where
-        eventTime >= ${JDBCUtils.timestampFunction(client)}(?) and
-        eventTime < ${JDBCUtils.timestampFunction(client)}(?)
-      $entityTypeClause
-      $entityIdClause
-      $eventNamesClause
-      $targetEntityTypeClause
-      $targetEntityIdClause
-      """.replace("\n", " ")
-    new JdbcRDD(
-      sc,
-      () => {
-        DriverManager.getConnection(
-          client,
-          config.properties("USERNAME"),
-          config.properties("PASSWORD"))
-      },
-      q,
-      lower / 1000,
-      upper / 1000,
-      par,
-      (r: ResultSet) => {
-        Event(
-          eventId = Option(r.getString("id")),
-          event = r.getString("event"),
-          entityType = r.getString("entityType"),
-          entityId = r.getString("entityId"),
-          targetEntityType = Option(r.getString("targetEntityType")),
-          targetEntityId = Option(r.getString("targetEntityId")),
-          properties = Option(r.getString("properties")).map(x =>
-            DataMap(Serialization.read[JObject](x))).getOrElse(DataMap()),
-          eventTime = new DateTime(r.getTimestamp("eventTime").getTime,
-            DateTimeZone.forID(r.getString("eventTimeZone"))),
-          tags = Option(r.getString("tags")).map(x =>
-            x.split(",").toList).getOrElse(Nil),
-          prId = Option(r.getString("prId")),
-          creationTime = new DateTime(r.getTimestamp("creationTime").getTime,
-            DateTimeZone.forID(r.getString("creationTimeZone"))))
-      }).cache()
-  }
-
-  def write(events: RDD[Event], appId: Int, channelId: Option[Int])(sc: SparkContext): Unit = {
-    val sqlContext = new SQLContext(sc)
-
-    import sqlContext.implicits._
-
-    val tableName = JDBCUtils.eventTableName(namespace, appId, channelId)
-
-    val eventTableColumns = Seq[String](
-        "id"
-      , "event"
-      , "entityType"
-      , "entityId"
-      , "targetEntityType"
-      , "targetEntityId"
-      , "properties"
-      , "eventTime"
-      , "eventTimeZone"
-      , "tags"
-      , "prId"
-      , "creationTime"
-      , "creationTimeZone")
-
-    val eventDF = events.map { event =>
-      (event.eventId.getOrElse(JDBCUtils.generateId)
-        , event.event
-        , event.entityType
-        , event.entityId
-        , event.targetEntityType.orNull
-        , event.targetEntityId.orNull
-        , if (!event.properties.isEmpty) Serialization.write(event.properties.toJObject) else null
-        , new java.sql.Timestamp(event.eventTime.getMillis)
-        , event.eventTime.getZone.getID
-        , if (event.tags.nonEmpty) Some(event.tags.mkString(",")) else null
-        , event.prId
-        , new java.sql.Timestamp(event.creationTime.getMillis)
-        , event.creationTime.getZone.getID)
-    }.toDF(eventTableColumns:_*)
-
-    // spark version 1.4.0 or higher
-    val prop = new java.util.Properties
-    prop.setProperty("user", config.properties("USERNAME"))
-    prop.setProperty("password", config.properties("PASSWORD"))
-    eventDF.write.mode(SaveMode.Append).jdbc(client, tableName, prop)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCUtils.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCUtils.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCUtils.scala
deleted file mode 100644
index 56a7462..0000000
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/JDBCUtils.scala
+++ /dev/null
@@ -1,103 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.jdbc
-
-import scalikejdbc._
-
-/** JDBC related utilities */
-object JDBCUtils {
-  /** Extract JDBC driver type from URL
-    *
-    * @param url JDBC URL
-    * @return The driver type, e.g. postgresql
-    */
-  def driverType(url: String): String = {
-    val capture = """jdbc:([^:]+):""".r
-    capture findFirstIn url match {
-      case Some(capture(driverType)) => driverType
-      case None => ""
-    }
-  }
-
-  /** Determines binary column type from JDBC URL
-    *
-    * @param url JDBC URL
-    * @return Binary column type as SQLSyntax, e.g. LONGBLOB
-    */
-  def binaryColumnType(url: String): SQLSyntax = {
-    driverType(url) match {
-      case "postgresql" => sqls"bytea"
-      case "mysql" => sqls"longblob"
-      case _ => sqls"longblob"
-    }
-  }
-
-  /** Determines UNIX timestamp conversion function from JDBC URL
-    *
-    * @param url JDBC URL
-    * @return Timestamp conversion function, e.g. TO_TIMESTAMP
-    */
-  def timestampFunction(url: String): String = {
-    driverType(url) match {
-      case "postgresql" => "to_timestamp"
-      case "mysql" => "from_unixtime"
-      case _ => "from_unixtime"
-    }
-  }
-
-  /** Converts Map of String to String to comma-separated list of key=value
-    *
-    * @param m Map of String to String
-    * @return Comma-separated list, e.g. FOO=BAR,X=Y,...
-    */
-  def mapToString(m: Map[String, String]): String = {
-    m.map(t => s"${t._1}=${t._2}").mkString(",")
-  }
-
-  /** Inverse of mapToString
-    *
-    * @param str Comma-separated list, e.g. FOO=BAR,X=Y,...
-    * @return Map of String to String, e.g. Map("FOO" -> "BAR", "X" -> "Y", ...)
-    */
-  def stringToMap(str: String): Map[String, String] = {
-    str.split(",").map { x =>
-      val y = x.split("=")
-      y(0) -> y(1)
-    }.toMap[String, String]
-  }
-
-  /** Generate 32-character random ID using UUID with - stripped */
-  def generateId: String = java.util.UUID.randomUUID().toString.replace("-", "")
-
-  /** Prefix a table name
-    *
-    * @param prefix Table prefix
-    * @param table Table name
-    * @return Prefixed table name
-    */
-  def prefixTableName(prefix: String, table: String): SQLSyntax =
-    sqls.createUnsafely(s"${prefix}_$table")
-
-  /** Derive event table name
-    *
-    * @param namespace Namespace of event tables
-    * @param appId App ID
-    * @param channelId Optional channel ID
-    * @return Full event table name
-    */
-  def eventTableName(namespace: String, appId: Int, channelId: Option[Int]): String =
-    s"${namespace}_${appId}${channelId.map("_" + _).getOrElse("")}"
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/jdbc/StorageClient.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/StorageClient.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/StorageClient.scala
deleted file mode 100644
index 585ca71..0000000
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/StorageClient.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.jdbc
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.BaseStorageClient
-import io.prediction.data.storage.StorageClientConfig
-import io.prediction.data.storage.StorageClientException
-import scalikejdbc._
-
-/** JDBC implementation of [[BaseStorageClient]] */
-class StorageClient(val config: StorageClientConfig)
-  extends BaseStorageClient with Logging {
-  override val prefix = "JDBC"
-
-  if (!config.properties.contains("URL")) {
-    throw new StorageClientException("The URL variable is not set!", null)
-  }
-  if (!config.properties.contains("USERNAME")) {
-    throw new StorageClientException("The USERNAME variable is not set!", null)
-  }
-  if (!config.properties.contains("PASSWORD")) {
-    throw new StorageClientException("The PASSWORD variable is not set!", null)
-  }
-
-  // set max size of connection pool
-  val maxSize: Int = config.properties.getOrElse("CONNECTIONS", "8").toInt
-  val settings = ConnectionPoolSettings(maxSize = maxSize)
-
-  ConnectionPool.singleton(
-    config.properties("URL"),
-    config.properties("USERNAME"),
-    config.properties("PASSWORD"),
-    settings)
-  /** JDBC connection URL. Connections are managed by ScalikeJDBC. */
-  val client = config.properties("URL")
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/jdbc/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/jdbc/package.scala b/data/src/main/scala/io/prediction/data/storage/jdbc/package.scala
deleted file mode 100644
index df96508..0000000
--- a/data/src/main/scala/io/prediction/data/storage/jdbc/package.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-/** JDBC implementation of storage traits, supporting meta data, event data, and
-  * model data
-  *
-  * @group Implementation
-  */
-package object jdbc {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/localfs/LocalFSModels.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/localfs/LocalFSModels.scala b/data/src/main/scala/io/prediction/data/storage/localfs/LocalFSModels.scala
deleted file mode 100644
index ea3703f..0000000
--- a/data/src/main/scala/io/prediction/data/storage/localfs/LocalFSModels.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.localfs
-
-import java.io.File
-import java.io.FileNotFoundException
-import java.io.FileOutputStream
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.Model
-import io.prediction.data.storage.Models
-import io.prediction.data.storage.StorageClientConfig
-
-import scala.io.Source
-
-class LocalFSModels(f: File, config: StorageClientConfig, prefix: String)
-  extends Models with Logging {
-
-  def insert(i: Model): Unit = {
-    try {
-      val fos = new FileOutputStream(new File(f, s"${prefix}${i.id}"))
-      fos.write(i.models)
-      fos.close
-    } catch {
-      case e: FileNotFoundException => error(e.getMessage)
-    }
-  }
-
-  def get(id: String): Option[Model] = {
-    try {
-      Some(Model(
-        id = id,
-        models = Source.fromFile(new File(f, s"${prefix}${id}"))(
-          scala.io.Codec.ISO8859).map(_.toByte).toArray))
-    } catch {
-      case e: Throwable =>
-        error(e.getMessage)
-        None
-    }
-  }
-
-  def delete(id: String): Unit = {
-    val m = new File(f, s"${prefix}${id}")
-    if (!m.delete) error(s"Unable to delete ${m.getCanonicalPath}!")
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/localfs/StorageClient.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/localfs/StorageClient.scala b/data/src/main/scala/io/prediction/data/storage/localfs/StorageClient.scala
deleted file mode 100644
index 1a38022..0000000
--- a/data/src/main/scala/io/prediction/data/storage/localfs/StorageClient.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.localfs
-
-import java.io.File
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.BaseStorageClient
-import io.prediction.data.storage.StorageClientConfig
-import io.prediction.data.storage.StorageClientException
-
-class StorageClient(val config: StorageClientConfig) extends BaseStorageClient
-    with Logging {
-  override val prefix = "LocalFS"
-  val f = new File(
-    config.properties.getOrElse("PATH", config.properties("HOSTS")))
-  if (f.exists) {
-    if (!f.isDirectory) throw new StorageClientException(
-      s"${f} already exists but it is not a directory!",
-      null)
-    if (!f.canWrite) throw new StorageClientException(
-      s"${f} already exists but it is not writable!",
-      null)
-  } else {
-    if (!f.mkdirs) throw new StorageClientException(
-      s"${f} does not exist and automatic creation failed!",
-      null)
-  }
-  val client = f
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/localfs/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/localfs/package.scala b/data/src/main/scala/io/prediction/data/storage/localfs/package.scala
deleted file mode 100644
index 299ead7..0000000
--- a/data/src/main/scala/io/prediction/data/storage/localfs/package.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-/** Local file system implementation of storage traits, supporting model data only
-  *
-  * @group Implementation
-  */
-package object localfs {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/package.scala b/data/src/main/scala/io/prediction/data/storage/package.scala
deleted file mode 100644
index 4fa4e6d..0000000
--- a/data/src/main/scala/io/prediction/data/storage/package.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data
-
-/** If you are an engine developer, please refer to the [[store]] package.
-  *
-  * This package provides convenient access to underlying data access objects.
-  * The common entry point is [[Storage]].
-  *
-  * Developer APIs are available to advanced developers to add support of other
-  * data store backends.
-  */
-package object storage {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/store/Common.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/store/Common.scala b/data/src/main/scala/io/prediction/data/store/Common.scala
deleted file mode 100644
index 713bfee..0000000
--- a/data/src/main/scala/io/prediction/data/store/Common.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.store
-
-import io.prediction.data.storage.Storage
-import grizzled.slf4j.Logger
-
-private[prediction] object Common {
-
-  @transient lazy val logger = Logger[this.type]
-  @transient lazy private val appsDb = Storage.getMetaDataApps()
-  @transient lazy private val channelsDb = Storage.getMetaDataChannels()
-
-  /* throw exception if invalid app name or channel name */
-  def appNameToId(appName: String, channelName: Option[String]): (Int, Option[Int]) = {
-    val appOpt = appsDb.getByName(appName)
-
-    appOpt.map { app =>
-      val channelMap: Map[String, Int] = channelsDb.getByAppid(app.id)
-        .map(c => (c.name, c.id)).toMap
-
-      val channelId: Option[Int] = channelName.map { ch =>
-        if (channelMap.contains(ch)) {
-          channelMap(ch)
-        } else {
-          logger.error(s"Invalid channel name ${ch}.")
-          throw new IllegalArgumentException(s"Invalid channel name ${ch}.")
-        }
-      }
-
-      (app.id, channelId)
-    }.getOrElse {
-      logger.error(s"Invalid app name ${appName}")
-      throw new IllegalArgumentException(s"Invalid app name ${appName}")
-    }
-  }
-}


[15/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/Storage.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/Storage.scala b/data/src/main/scala/io/prediction/data/storage/Storage.scala
deleted file mode 100644
index 3ad1400..0000000
--- a/data/src/main/scala/io/prediction/data/storage/Storage.scala
+++ /dev/null
@@ -1,403 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import java.lang.reflect.InvocationTargetException
-
-import grizzled.slf4j.Logging
-import io.prediction.annotation.DeveloperApi
-
-import scala.concurrent.ExecutionContext.Implicits.global
-import scala.language.existentials
-import scala.reflect.runtime.universe._
-
-/** :: DeveloperApi ::
-  * Any storage backend drivers will need to implement this trait with exactly
-  * '''StorageClient''' as the class name. PredictionIO storage layer will look
-  * for this class when it instantiates the actual backend for use by higher
-  * level storage access APIs.
-  *
-  * @group Storage System
-  */
-@DeveloperApi
-trait BaseStorageClient {
-  /** Configuration of the '''StorageClient''' */
-  val config: StorageClientConfig
-
-  /** The actual client object. This could be a database connection or any kind
-    * of database access object.
-    */
-  val client: AnyRef
-
-  /** Set a prefix for storage class discovery. As an example, if this prefix
-    * is set as ''JDBC'', when the storage layer instantiates an implementation
-    * of [[Apps]], it will try to look for a class named ''JDBCApps''.
-    */
-  val prefix: String = ""
-}
-
-/** :: DeveloperApi ::
-  * A wrapper of storage client configuration that will be populated by
-  * PredictionIO automatically, and passed to the StorageClient during
-  * instantiation.
-  *
-  * @param parallel This is set to true by PredictionIO when the storage client
-  *                 is instantiated in a parallel data source.
-  * @param test This is set to true by PredictionIO when tests are being run.
-  * @param properties This is populated by PredictionIO automatically from
-  *                   environmental configuration variables. If you have these
-  *                   variables,
-  *                   - PIO_STORAGE_SOURCES_PGSQL_TYPE=jdbc
-  *                   - PIO_STORAGE_SOURCES_PGSQL_USERNAME=abc
-  *                   - PIO_STOARGE_SOURCES_PGSQL_PASSWORD=xyz
-  *
-  *                   this field will be filled as a map of string to string:
-  *                   - TYPE -> jdbc
-  *                   - USERNAME -> abc
-  *                   - PASSWORD -> xyz
-  *
-  * @group Storage System
-  */
-@DeveloperApi
-case class StorageClientConfig(
-  parallel: Boolean = false, // parallelized access (RDD)?
-  test: Boolean = false, // test mode config
-  properties: Map[String, String] = Map())
-
-/** :: DeveloperApi ::
-  * Thrown when a StorageClient runs into an exceptional condition
-  *
-  * @param message Exception error message
-  * @param cause The underlying exception that caused the exception
-  * @group Storage System
-  */
-@DeveloperApi
-class StorageClientException(message: String, cause: Throwable)
-  extends RuntimeException(message, cause)
-
-@deprecated("Use StorageException", "0.9.2")
-private[prediction] case class StorageError(message: String)
-
-/** :: DeveloperApi ::
-  * Thrown by data access objects when they run into exceptional conditions
-  *
-  * @param message Exception error message
-  * @param cause The underlying exception that caused the exception
-  *
-  * @group Storage System
-  */
-@DeveloperApi
-class StorageException(message: String, cause: Throwable)
-  extends Exception(message, cause) {
-
-  def this(message: String) = this(message, null)
-}
-
-/** Backend-agnostic data storage layer with lazy initialization. Use this
-  * object when you need to interface with Event Store in your engine.
-  *
-  * @group Storage System
-  */
-object Storage extends Logging {
-  private case class ClientMeta(
-    sourceType: String,
-    client: BaseStorageClient,
-    config: StorageClientConfig)
-
-  private case class DataObjectMeta(sourceName: String, namespace: String)
-
-  private var errors = 0
-
-  private val sourcesPrefix = "PIO_STORAGE_SOURCES"
-
-  private val sourceTypesRegex = """PIO_STORAGE_SOURCES_([^_]+)_TYPE""".r
-
-  private val sourceKeys: Seq[String] = sys.env.keys.toSeq.flatMap { k =>
-    sourceTypesRegex findFirstIn k match {
-      case Some(sourceTypesRegex(sourceType)) => Seq(sourceType)
-      case None => Nil
-    }
-  }
-
-  if (sourceKeys.size == 0) warn("There is no properly configured data source.")
-
-  private val s2cm = scala.collection.mutable.Map[String, Option[ClientMeta]]()
-
-  /** Reference to the app data repository. */
-  private val EventDataRepository = "EVENTDATA"
-  private val ModelDataRepository = "MODELDATA"
-  private val MetaDataRepository = "METADATA"
-
-  private val repositoriesPrefix = "PIO_STORAGE_REPOSITORIES"
-
-  private val repositoryNamesRegex =
-    """PIO_STORAGE_REPOSITORIES_([^_]+)_NAME""".r
-
-  private val repositoryKeys: Seq[String] = sys.env.keys.toSeq.flatMap { k =>
-    repositoryNamesRegex findFirstIn k match {
-      case Some(repositoryNamesRegex(repositoryName)) => Seq(repositoryName)
-      case None => Nil
-    }
-  }
-
-  if (repositoryKeys.size == 0) {
-    warn("There is no properly configured repository.")
-  }
-
-  private val requiredRepositories = Seq(MetaDataRepository)
-
-  requiredRepositories foreach { r =>
-    if (!repositoryKeys.contains(r)) {
-      error(s"Required repository (${r}) configuration is missing.")
-      errors += 1
-    }
-  }
-  private val repositoriesToDataObjectMeta: Map[String, DataObjectMeta] =
-    repositoryKeys.map(r =>
-      try {
-        val keyedPath = repositoriesPrefixPath(r)
-        val name = sys.env(prefixPath(keyedPath, "NAME"))
-        val sourceName = sys.env(prefixPath(keyedPath, "SOURCE"))
-        if (sourceKeys.contains(sourceName)) {
-          r -> DataObjectMeta(
-            sourceName = sourceName,
-            namespace = name)
-        } else {
-          error(s"$sourceName is not a configured storage source.")
-          r -> DataObjectMeta("", "")
-        }
-      } catch {
-        case e: Throwable =>
-          error(e.getMessage)
-          errors += 1
-          r -> DataObjectMeta("", "")
-      }
-    ).toMap
-
-  if (errors > 0) {
-    error(s"There were $errors configuration errors. Exiting.")
-    sys.exit(errors)
-  }
-
-  // End of constructor and field definitions and begin method definitions
-
-  private def prefixPath(prefix: String, body: String) = s"${prefix}_$body"
-
-  private def sourcesPrefixPath(body: String) = prefixPath(sourcesPrefix, body)
-
-  private def repositoriesPrefixPath(body: String) =
-    prefixPath(repositoriesPrefix, body)
-
-  private def sourcesToClientMeta(
-      source: String,
-      parallel: Boolean,
-      test: Boolean): Option[ClientMeta] = {
-    val sourceName = if (parallel) s"parallel-$source" else source
-    s2cm.getOrElseUpdate(sourceName, updateS2CM(source, parallel, test))
-  }
-
-  private def getClient(
-    clientConfig: StorageClientConfig,
-    pkg: String): BaseStorageClient = {
-    val className = "io.prediction.data.storage." + pkg + ".StorageClient"
-    try {
-      Class.forName(className).getConstructors()(0).newInstance(clientConfig).
-        asInstanceOf[BaseStorageClient]
-    } catch {
-      case e: ClassNotFoundException =>
-        val originalClassName = pkg + ".StorageClient"
-        Class.forName(originalClassName).getConstructors()(0).
-          newInstance(clientConfig).asInstanceOf[BaseStorageClient]
-      case e: java.lang.reflect.InvocationTargetException =>
-        throw e.getCause
-    }
-  }
-
-  /** Get the StorageClient config data from PIO Framework's environment variables */
-  def getConfig(sourceName: String): Option[StorageClientConfig] = {
-    if (s2cm.contains(sourceName) && s2cm.get(sourceName).nonEmpty
-      && s2cm.get(sourceName).get.nonEmpty) {
-      Some(s2cm.get(sourceName).get.get.config)
-    } else None
-  }
-
-  private def updateS2CM(k: String, parallel: Boolean, test: Boolean):
-  Option[ClientMeta] = {
-    try {
-      val keyedPath = sourcesPrefixPath(k)
-      val sourceType = sys.env(prefixPath(keyedPath, "TYPE"))
-      val props = sys.env.filter(t => t._1.startsWith(keyedPath)).map(
-        t => t._1.replace(s"${keyedPath}_", "") -> t._2)
-      val clientConfig = StorageClientConfig(
-        properties = props,
-        parallel = parallel,
-        test = test)
-      val client = getClient(clientConfig, sourceType)
-      Some(ClientMeta(sourceType, client, clientConfig))
-    } catch {
-      case e: Throwable =>
-        error(s"Error initializing storage client for source ${k}", e)
-        errors += 1
-        None
-    }
-  }
-
-  private[prediction]
-  def getDataObjectFromRepo[T](repo: String, test: Boolean = false)
-    (implicit tag: TypeTag[T]): T = {
-    val repoDOMeta = repositoriesToDataObjectMeta(repo)
-    val repoDOSourceName = repoDOMeta.sourceName
-    getDataObject[T](repoDOSourceName, repoDOMeta.namespace, test = test)
-  }
-
-  private[prediction]
-  def getPDataObject[T](repo: String)(implicit tag: TypeTag[T]): T = {
-    val repoDOMeta = repositoriesToDataObjectMeta(repo)
-    val repoDOSourceName = repoDOMeta.sourceName
-    getPDataObject[T](repoDOSourceName, repoDOMeta.namespace)
-  }
-
-  private[prediction] def getDataObject[T](
-      sourceName: String,
-      namespace: String,
-      parallel: Boolean = false,
-      test: Boolean = false)(implicit tag: TypeTag[T]): T = {
-    val clientMeta = sourcesToClientMeta(sourceName, parallel, test) getOrElse {
-      throw new StorageClientException(
-        s"Data source $sourceName was not properly initialized.", null)
-    }
-    val sourceType = clientMeta.sourceType
-    val ctorArgs = dataObjectCtorArgs(clientMeta.client, namespace)
-    val classPrefix = clientMeta.client.prefix
-    val originalClassName = tag.tpe.toString.split('.')
-    val rawClassName = sourceType + "." + classPrefix + originalClassName.last
-    val className = "io.prediction.data.storage." + rawClassName
-    val clazz = try {
-      Class.forName(className)
-    } catch {
-      case e: ClassNotFoundException =>
-        try {
-          Class.forName(rawClassName)
-        } catch {
-          case e: ClassNotFoundException =>
-            throw new StorageClientException("No storage backend " +
-              "implementation can be found (tried both " +
-              s"$className and $rawClassName)", e)
-        }
-    }
-    val constructor = clazz.getConstructors()(0)
-    try {
-      constructor.newInstance(ctorArgs: _*).
-        asInstanceOf[T]
-    } catch {
-      case e: IllegalArgumentException =>
-        error(
-          "Unable to instantiate data object with class '" +
-          constructor.getDeclaringClass.getName + " because its constructor" +
-          " does not have the right number of arguments." +
-          " Number of required constructor arguments: " +
-          ctorArgs.size + "." +
-          " Number of existing constructor arguments: " +
-          constructor.getParameterTypes.size + "." +
-          s" Storage source name: ${sourceName}." +
-          s" Exception message: ${e.getMessage}).", e)
-        errors += 1
-        throw e
-      case e: java.lang.reflect.InvocationTargetException =>
-        throw e.getCause
-    }
-  }
-
-  private def getPDataObject[T](
-      sourceName: String,
-      databaseName: String)(implicit tag: TypeTag[T]): T =
-    getDataObject[T](sourceName, databaseName, true)
-
-  private def dataObjectCtorArgs(
-      client: BaseStorageClient,
-      namespace: String): Seq[AnyRef] = {
-    Seq(client.client, client.config, namespace)
-  }
-
-  private[prediction] def verifyAllDataObjects(): Unit = {
-    info("Verifying Meta Data Backend (Source: " +
-      s"${repositoriesToDataObjectMeta(MetaDataRepository).sourceName})...")
-    getMetaDataEngineManifests()
-    getMetaDataEngineInstances()
-    getMetaDataEvaluationInstances()
-    getMetaDataApps()
-    getMetaDataAccessKeys()
-    info("Verifying Model Data Backend (Source: " +
-      s"${repositoriesToDataObjectMeta(ModelDataRepository).sourceName})...")
-    getModelDataModels()
-    info("Verifying Event Data Backend (Source: " +
-      s"${repositoriesToDataObjectMeta(EventDataRepository).sourceName})...")
-    val eventsDb = getLEvents(test = true)
-    info("Test writing to Event Store (App Id 0)...")
-    // use appId=0 for testing purpose
-    eventsDb.init(0)
-    eventsDb.insert(Event(
-      event = "test",
-      entityType = "test",
-      entityId = "test"), 0)
-    eventsDb.remove(0)
-    eventsDb.close()
-  }
-
-  private[prediction] def getMetaDataEngineManifests(): EngineManifests =
-    getDataObjectFromRepo[EngineManifests](MetaDataRepository)
-
-  private[prediction] def getMetaDataEngineInstances(): EngineInstances =
-    getDataObjectFromRepo[EngineInstances](MetaDataRepository)
-
-  private[prediction] def getMetaDataEvaluationInstances(): EvaluationInstances =
-    getDataObjectFromRepo[EvaluationInstances](MetaDataRepository)
-
-  private[prediction] def getMetaDataApps(): Apps =
-    getDataObjectFromRepo[Apps](MetaDataRepository)
-
-  private[prediction] def getMetaDataAccessKeys(): AccessKeys =
-    getDataObjectFromRepo[AccessKeys](MetaDataRepository)
-
-  private[prediction] def getMetaDataChannels(): Channels =
-    getDataObjectFromRepo[Channels](MetaDataRepository)
-
-  private[prediction] def getModelDataModels(): Models =
-    getDataObjectFromRepo[Models](ModelDataRepository)
-
-  /** Obtains a data access object that returns [[Event]] related local data
-    * structure.
-    */
-  def getLEvents(test: Boolean = false): LEvents =
-    getDataObjectFromRepo[LEvents](EventDataRepository, test = test)
-
-  /** Obtains a data access object that returns [[Event]] related RDD data
-    * structure.
-    */
-  def getPEvents(): PEvents =
-    getPDataObject[PEvents](EventDataRepository)
-
-  def config: Map[String, Map[String, Map[String, String]]] = Map(
-    "sources" -> s2cm.toMap.map { case (source, clientMeta) =>
-      source -> clientMeta.map { cm =>
-        Map(
-          "type" -> cm.sourceType,
-          "config" -> cm.config.properties.map(t => s"${t._1} -> ${t._2}").mkString(", ")
-        )
-      }.getOrElse(Map.empty)
-    }
-  )
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/Utils.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/Utils.scala b/data/src/main/scala/io/prediction/data/storage/Utils.scala
deleted file mode 100644
index bafc5e6..0000000
--- a/data/src/main/scala/io/prediction/data/storage/Utils.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import org.joda.time.DateTime
-import org.joda.time.format.ISODateTimeFormat
-
-/** Backend-agnostic storage utilities. */
-private[prediction] object Utils {
-  /**
-   * Add prefix to custom attribute keys.
-   */
-  def addPrefixToAttributeKeys[T](
-      attributes: Map[String, T],
-      prefix: String = "ca_"): Map[String, T] = {
-    attributes map { case (k, v) => (prefix + k, v) }
-  }
-
-  /** Remove prefix from custom attribute keys. */
-  def removePrefixFromAttributeKeys[T](
-      attributes: Map[String, T],
-      prefix: String = "ca_"): Map[String, T] = {
-    attributes map { case (k, v) => (k.stripPrefix(prefix), v) }
-  }
-
-  /**
-   * Appends App ID to any ID.
-   * Used for distinguishing different app's data within a single collection.
-   */
-  def idWithAppid(appid: Int, id: String): String = appid + "_" + id
-
-  def stringToDateTime(dt: String): DateTime =
-    ISODateTimeFormat.dateTimeParser.parseDateTime(dt)
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESAccessKeys.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESAccessKeys.scala b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESAccessKeys.scala
deleted file mode 100644
index 7da7605..0000000
--- a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESAccessKeys.scala
+++ /dev/null
@@ -1,116 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.elasticsearch
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.StorageClientConfig
-import io.prediction.data.storage.AccessKey
-import io.prediction.data.storage.AccessKeys
-import org.elasticsearch.ElasticsearchException
-import org.elasticsearch.client.Client
-import org.elasticsearch.index.query.FilterBuilders._
-import org.json4s.JsonDSL._
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.read
-import org.json4s.native.Serialization.write
-
-import scala.util.Random
-
-/** Elasticsearch implementation of AccessKeys. */
-class ESAccessKeys(client: Client, config: StorageClientConfig, index: String)
-    extends AccessKeys with Logging {
-  implicit val formats = DefaultFormats.lossless
-  private val estype = "accesskeys"
-
-  val indices = client.admin.indices
-  val indexExistResponse = indices.prepareExists(index).get
-  if (!indexExistResponse.isExists) {
-    indices.prepareCreate(index).get
-  }
-  val typeExistResponse = indices.prepareTypesExists(index).setTypes(estype).get
-  if (!typeExistResponse.isExists) {
-    val json =
-      (estype ->
-        ("properties" ->
-          ("key" -> ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("events" -> ("type" -> "string") ~ ("index" -> "not_analyzed"))))
-    indices.preparePutMapping(index).setType(estype).
-      setSource(compact(render(json))).get
-  }
-
-  def insert(accessKey: AccessKey): Option[String] = {
-    val key = if (accessKey.key.isEmpty) generateKey else accessKey.key
-    update(accessKey.copy(key = key))
-    Some(key)
-  }
-
-  def get(key: String): Option[AccessKey] = {
-    try {
-      val response = client.prepareGet(
-        index,
-        estype,
-        key).get()
-      Some(read[AccessKey](response.getSourceAsString))
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        None
-      case e: NullPointerException => None
-    }
-  }
-
-  def getAll(): Seq[AccessKey] = {
-    try {
-      val builder = client.prepareSearch(index).setTypes(estype)
-      ESUtils.getAll[AccessKey](client, builder)
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        Seq[AccessKey]()
-    }
-  }
-
-  def getByAppid(appid: Int): Seq[AccessKey] = {
-    try {
-      val builder = client.prepareSearch(index).setTypes(estype).
-        setPostFilter(termFilter("appid", appid))
-      ESUtils.getAll[AccessKey](client, builder)
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        Seq[AccessKey]()
-    }
-  }
-
-  def update(accessKey: AccessKey): Unit = {
-    try {
-      client.prepareIndex(index, estype, accessKey.key).setSource(write(accessKey)).get()
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-    }
-  }
-
-  def delete(key: String): Unit = {
-    try {
-      client.prepareDelete(index, estype, key).get
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESApps.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESApps.scala b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESApps.scala
deleted file mode 100644
index 9ea821e..0000000
--- a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESApps.scala
+++ /dev/null
@@ -1,127 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.elasticsearch
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.StorageClientConfig
-import io.prediction.data.storage.App
-import io.prediction.data.storage.Apps
-import org.elasticsearch.ElasticsearchException
-import org.elasticsearch.client.Client
-import org.elasticsearch.index.query.FilterBuilders._
-import org.json4s.JsonDSL._
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.read
-import org.json4s.native.Serialization.write
-
-/** Elasticsearch implementation of Items. */
-class ESApps(client: Client, config: StorageClientConfig, index: String)
-  extends Apps with Logging {
-  implicit val formats = DefaultFormats.lossless
-  private val estype = "apps"
-  private val seq = new ESSequences(client, config, index)
-
-  val indices = client.admin.indices
-  val indexExistResponse = indices.prepareExists(index).get
-  if (!indexExistResponse.isExists) {
-    indices.prepareCreate(index).get
-  }
-  val typeExistResponse = indices.prepareTypesExists(index).setTypes(estype).get
-  if (!typeExistResponse.isExists) {
-    val json =
-      (estype ->
-        ("properties" ->
-          ("name" -> ("type" -> "string") ~ ("index" -> "not_analyzed"))))
-    indices.preparePutMapping(index).setType(estype).
-      setSource(compact(render(json))).get
-  }
-
-  def insert(app: App): Option[Int] = {
-    val id =
-      if (app.id == 0) {
-        var roll = seq.genNext("apps")
-        while (!get(roll).isEmpty) roll = seq.genNext("apps")
-        roll
-      }
-      else app.id
-    val realapp = app.copy(id = id)
-    update(realapp)
-    Some(id)
-  }
-
-  def get(id: Int): Option[App] = {
-    try {
-      val response = client.prepareGet(
-        index,
-        estype,
-        id.toString).get()
-      Some(read[App](response.getSourceAsString))
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        None
-      case e: NullPointerException => None
-    }
-  }
-
-  def getByName(name: String): Option[App] = {
-    try {
-      val response = client.prepareSearch(index).setTypes(estype).
-        setPostFilter(termFilter("name", name)).get
-      val hits = response.getHits().hits()
-      if (hits.size > 0) {
-        Some(read[App](hits.head.getSourceAsString))
-      } else {
-        None
-      }
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        None
-    }
-  }
-
-  def getAll(): Seq[App] = {
-    try {
-      val builder = client.prepareSearch(index).setTypes(estype)
-      ESUtils.getAll[App](client, builder)
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        Seq[App]()
-    }
-  }
-
-  def update(app: App): Unit = {
-    try {
-      val response = client.prepareIndex(index, estype, app.id.toString).
-        setSource(write(app)).get()
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-    }
-  }
-
-  def delete(id: Int): Unit = {
-    try {
-      client.prepareDelete(index, estype, id.toString).get
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala
deleted file mode 100644
index ee5e9e7..0000000
--- a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESChannels.scala
+++ /dev/null
@@ -1,114 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.elasticsearch
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.Channel
-import io.prediction.data.storage.Channels
-import io.prediction.data.storage.StorageClientConfig
-import org.elasticsearch.ElasticsearchException
-import org.elasticsearch.client.Client
-import org.elasticsearch.index.query.FilterBuilders.termFilter
-import org.json4s.DefaultFormats
-import org.json4s.JsonDSL._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.read
-import org.json4s.native.Serialization.write
-
-class ESChannels(client: Client, config: StorageClientConfig, index: String)
-    extends Channels with Logging {
-
-  implicit val formats = DefaultFormats.lossless
-  private val estype = "channels"
-  private val seq = new ESSequences(client, config, index)
-  private val seqName = "channels"
-
-  val indices = client.admin.indices
-  val indexExistResponse = indices.prepareExists(index).get
-  if (!indexExistResponse.isExists) {
-    indices.prepareCreate(index).get
-  }
-  val typeExistResponse = indices.prepareTypesExists(index).setTypes(estype).get
-  if (!typeExistResponse.isExists) {
-    val json =
-      (estype ->
-        ("properties" ->
-          ("name" -> ("type" -> "string") ~ ("index" -> "not_analyzed"))))
-    indices.preparePutMapping(index).setType(estype).
-      setSource(compact(render(json))).get
-  }
-
-  def insert(channel: Channel): Option[Int] = {
-    val id =
-      if (channel.id == 0) {
-        var roll = seq.genNext(seqName)
-        while (!get(roll).isEmpty) roll = seq.genNext(seqName)
-        roll
-      } else channel.id
-
-    val realChannel = channel.copy(id = id)
-    if (update(realChannel)) Some(id) else None
-  }
-
-  def get(id: Int): Option[Channel] = {
-    try {
-      val response = client.prepareGet(
-        index,
-        estype,
-        id.toString).get()
-      Some(read[Channel](response.getSourceAsString))
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        None
-      case e: NullPointerException => None
-    }
-  }
-
-  def getByAppid(appid: Int): Seq[Channel] = {
-    try {
-      val builder = client.prepareSearch(index).setTypes(estype).
-        setPostFilter(termFilter("appid", appid))
-      ESUtils.getAll[Channel](client, builder)
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        Seq[Channel]()
-    }
-  }
-
-  def update(channel: Channel): Boolean = {
-    try {
-      val response = client.prepareIndex(index, estype, channel.id.toString).
-        setSource(write(channel)).get()
-      true
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        false
-    }
-  }
-
-  def delete(id: Int): Unit = {
-    try {
-      client.prepareDelete(index, estype, id.toString).get
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESEngineInstances.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESEngineInstances.scala b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESEngineInstances.scala
deleted file mode 100644
index d9b0c39..0000000
--- a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESEngineInstances.scala
+++ /dev/null
@@ -1,155 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.elasticsearch
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.EngineInstance
-import io.prediction.data.storage.EngineInstanceSerializer
-import io.prediction.data.storage.EngineInstances
-import io.prediction.data.storage.StorageClientConfig
-import org.elasticsearch.ElasticsearchException
-import org.elasticsearch.client.Client
-import org.elasticsearch.index.query.FilterBuilders._
-import org.elasticsearch.search.sort.SortOrder
-import org.json4s.JsonDSL._
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.read
-import org.json4s.native.Serialization.write
-
-class ESEngineInstances(client: Client, config: StorageClientConfig, index: String)
-  extends EngineInstances with Logging {
-  implicit val formats = DefaultFormats + new EngineInstanceSerializer
-  private val estype = "engine_instances"
-
-  val indices = client.admin.indices
-  val indexExistResponse = indices.prepareExists(index).get
-  if (!indexExistResponse.isExists) {
-    indices.prepareCreate(index).get
-  }
-  val typeExistResponse = indices.prepareTypesExists(index).setTypes(estype).get
-  if (!typeExistResponse.isExists) {
-    val json =
-      (estype ->
-        ("properties" ->
-          ("status" -> ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("startTime" -> ("type" -> "date")) ~
-          ("endTime" -> ("type" -> "date")) ~
-          ("engineId" -> ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("engineVersion" ->
-            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("engineVariant" ->
-            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("engineFactory" ->
-            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("batch" ->
-            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("dataSourceParams" ->
-            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("preparatorParams" ->
-            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("algorithmsParams" ->
-            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("servingParams" ->
-            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("status" -> ("type" -> "string") ~ ("index" -> "not_analyzed"))))
-    indices.preparePutMapping(index).setType(estype).
-      setSource(compact(render(json))).get
-  }
-
-  def insert(i: EngineInstance): String = {
-    try {
-      val response = client.prepareIndex(index, estype).
-        setSource(write(i)).get
-      response.getId
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        ""
-    }
-  }
-
-  def get(id: String): Option[EngineInstance] = {
-    try {
-      val response = client.prepareGet(index, estype, id).get
-      if (response.isExists) {
-        Some(read[EngineInstance](response.getSourceAsString))
-      } else {
-        None
-      }
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        None
-    }
-  }
-
-  def getAll(): Seq[EngineInstance] = {
-    try {
-      val builder = client.prepareSearch(index).setTypes(estype)
-      ESUtils.getAll[EngineInstance](client, builder)
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        Seq()
-    }
-  }
-
-  def getCompleted(
-      engineId: String,
-      engineVersion: String,
-      engineVariant: String): Seq[EngineInstance] = {
-    try {
-      val builder = client.prepareSearch(index).setTypes(estype).setPostFilter(
-        andFilter(
-          termFilter("status", "COMPLETED"),
-          termFilter("engineId", engineId),
-          termFilter("engineVersion", engineVersion),
-          termFilter("engineVariant", engineVariant))).
-        addSort("startTime", SortOrder.DESC)
-      ESUtils.getAll[EngineInstance](client, builder)
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        Seq()
-    }
-  }
-
-  def getLatestCompleted(
-      engineId: String,
-      engineVersion: String,
-      engineVariant: String): Option[EngineInstance] =
-    getCompleted(
-      engineId,
-      engineVersion,
-      engineVariant).headOption
-
-  def update(i: EngineInstance): Unit = {
-    try {
-      client.prepareUpdate(index, estype, i.id).setDoc(write(i)).get
-    } catch {
-      case e: ElasticsearchException => error(e.getMessage)
-    }
-  }
-
-  def delete(id: String): Unit = {
-    try {
-      val response = client.prepareDelete(index, estype, id).get
-    } catch {
-      case e: ElasticsearchException => error(e.getMessage)
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESEngineManifests.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESEngineManifests.scala b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESEngineManifests.scala
deleted file mode 100644
index a5333b5..0000000
--- a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESEngineManifests.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.elasticsearch
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.EngineManifestSerializer
-import io.prediction.data.storage.StorageClientConfig
-import io.prediction.data.storage.EngineManifest
-import io.prediction.data.storage.EngineManifests
-import org.elasticsearch.ElasticsearchException
-import org.elasticsearch.client.Client
-import org.json4s._
-import org.json4s.native.Serialization.read
-import org.json4s.native.Serialization.write
-
-class ESEngineManifests(client: Client, config: StorageClientConfig, index: String)
-  extends EngineManifests with Logging {
-  implicit val formats = DefaultFormats + new EngineManifestSerializer
-  private val estype = "engine_manifests"
-  private def esid(id: String, version: String) = s"$id $version"
-
-  def insert(engineManifest: EngineManifest): Unit = {
-    val json = write(engineManifest)
-    val response = client.prepareIndex(
-      index,
-      estype,
-      esid(engineManifest.id, engineManifest.version)).
-      setSource(json).execute().actionGet()
-  }
-
-  def get(id: String, version: String): Option[EngineManifest] = {
-    try {
-      val response = client.prepareGet(index, estype, esid(id, version)).
-        execute().actionGet()
-      if (response.isExists) {
-        Some(read[EngineManifest](response.getSourceAsString))
-      } else {
-        None
-      }
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        None
-    }
-  }
-
-  def getAll(): Seq[EngineManifest] = {
-    try {
-      val builder = client.prepareSearch()
-      ESUtils.getAll[EngineManifest](client, builder)
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        Seq()
-    }
-  }
-
-  def update(engineManifest: EngineManifest, upsert: Boolean = false): Unit =
-    insert(engineManifest)
-
-  def delete(id: String, version: String): Unit = {
-    try {
-      client.prepareDelete(index, estype, esid(id, version)).execute().actionGet()
-    } catch {
-      case e: ElasticsearchException => error(e.getMessage)
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESEvaluationInstances.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESEvaluationInstances.scala b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESEvaluationInstances.scala
deleted file mode 100644
index ae33417..0000000
--- a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESEvaluationInstances.scala
+++ /dev/null
@@ -1,133 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.elasticsearch
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.EvaluationInstance
-import io.prediction.data.storage.EvaluationInstanceSerializer
-import io.prediction.data.storage.EvaluationInstances
-import io.prediction.data.storage.StorageClientConfig
-import org.elasticsearch.ElasticsearchException
-import org.elasticsearch.client.Client
-import org.elasticsearch.index.query.FilterBuilders._
-import org.elasticsearch.search.sort.SortOrder
-import org.json4s.JsonDSL._
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.read
-import org.json4s.native.Serialization.write
-
-class ESEvaluationInstances(client: Client, config: StorageClientConfig, index: String)
-  extends EvaluationInstances with Logging {
-  implicit val formats = DefaultFormats + new EvaluationInstanceSerializer
-  private val estype = "evaluation_instances"
-
-  val indices = client.admin.indices
-  val indexExistResponse = indices.prepareExists(index).get
-  if (!indexExistResponse.isExists) {
-    indices.prepareCreate(index).get
-  }
-  val typeExistResponse = indices.prepareTypesExists(index).setTypes(estype).get
-  if (!typeExistResponse.isExists) {
-    val json =
-      (estype ->
-        ("properties" ->
-          ("status" -> ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("startTime" -> ("type" -> "date")) ~
-          ("endTime" -> ("type" -> "date")) ~
-          ("evaluationClass" ->
-            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("engineParamsGeneratorClass" ->
-            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("batch" ->
-            ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
-          ("evaluatorResults" ->
-            ("type" -> "string") ~ ("index" -> "no")) ~
-          ("evaluatorResultsHTML" ->
-            ("type" -> "string") ~ ("index" -> "no")) ~
-          ("evaluatorResultsJSON" ->
-            ("type" -> "string") ~ ("index" -> "no"))))
-    indices.preparePutMapping(index).setType(estype).
-      setSource(compact(render(json))).get
-  }
-
-  def insert(i: EvaluationInstance): String = {
-    try {
-      val response = client.prepareIndex(index, estype).
-        setSource(write(i)).get
-      response.getId
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        ""
-    }
-  }
-
-  def get(id: String): Option[EvaluationInstance] = {
-    try {
-      val response = client.prepareGet(index, estype, id).get
-      if (response.isExists) {
-        Some(read[EvaluationInstance](response.getSourceAsString))
-      } else {
-        None
-      }
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        None
-    }
-  }
-
-  def getAll(): Seq[EvaluationInstance] = {
-    try {
-      val builder = client.prepareSearch(index).setTypes(estype)
-      ESUtils.getAll[EvaluationInstance](client, builder)
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        Seq()
-    }
-  }
-
-  def getCompleted(): Seq[EvaluationInstance] = {
-    try {
-      val builder = client.prepareSearch(index).setTypes(estype).setPostFilter(
-        termFilter("status", "EVALCOMPLETED")).
-        addSort("startTime", SortOrder.DESC)
-      ESUtils.getAll[EvaluationInstance](client, builder)
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        Seq()
-    }
-  }
-
-  def update(i: EvaluationInstance): Unit = {
-    try {
-      client.prepareUpdate(index, estype, i.id).setDoc(write(i)).get
-    } catch {
-      case e: ElasticsearchException => error(e.getMessage)
-    }
-  }
-
-  def delete(id: String): Unit = {
-    try {
-      client.prepareDelete(index, estype, id).get
-    } catch {
-      case e: ElasticsearchException => error(e.getMessage)
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESSequences.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESSequences.scala b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESSequences.scala
deleted file mode 100644
index 99ab253..0000000
--- a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESSequences.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.elasticsearch
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.StorageClientConfig
-import org.elasticsearch.ElasticsearchException
-import org.elasticsearch.client.Client
-import org.json4s.JsonDSL._
-import org.json4s._
-import org.json4s.native.JsonMethods._
-
-class ESSequences(client: Client, config: StorageClientConfig, index: String) extends Logging {
-  implicit val formats = DefaultFormats
-  private val estype = "sequences"
-
-  val indices = client.admin.indices
-  val indexExistResponse = indices.prepareExists(index).get
-  if (!indexExistResponse.isExists) {
-    // val settingsJson =
-    //   ("number_of_shards" -> 1) ~
-    //   ("auto_expand_replicas" -> "0-all")
-    indices.prepareCreate(index).get
-  }
-  val typeExistResponse = indices.prepareTypesExists(index).setTypes(estype).get
-  if (!typeExistResponse.isExists) {
-    val mappingJson =
-      (estype ->
-        ("_source" -> ("enabled" -> 0)) ~
-        ("_all" -> ("enabled" -> 0)) ~
-        ("_type" -> ("index" -> "no")) ~
-        ("enabled" -> 0))
-    indices.preparePutMapping(index).setType(estype).
-      setSource(compact(render(mappingJson))).get
-  }
-
-  def genNext(name: String): Int = {
-    try {
-      val response = client.prepareIndex(index, estype, name).
-        setSource(compact(render("n" -> name))).get
-      response.getVersion().toInt
-    } catch {
-      case e: ElasticsearchException =>
-        error(e.getMessage)
-        0
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESUtils.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESUtils.scala b/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESUtils.scala
deleted file mode 100644
index 7cf693c..0000000
--- a/data/src/main/scala/io/prediction/data/storage/elasticsearch/ESUtils.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.elasticsearch
-
-import org.elasticsearch.action.search.SearchRequestBuilder
-import org.elasticsearch.client.Client
-import org.elasticsearch.common.unit.TimeValue
-import org.json4s.Formats
-import org.json4s.native.Serialization.read
-
-import scala.collection.mutable.ArrayBuffer
-
-object ESUtils {
-  val scrollLife = new TimeValue(60000)
-
-  def getAll[T : Manifest](
-      client: Client,
-      builder: SearchRequestBuilder)(
-      implicit formats: Formats): Seq[T] = {
-    val results = ArrayBuffer[T]()
-    var response = builder.setScroll(scrollLife).get
-    var hits = response.getHits().hits()
-    results ++= hits.map(h => read[T](h.getSourceAsString))
-    while (hits.size > 0) {
-      response = client.prepareSearchScroll(response.getScrollId).
-        setScroll(scrollLife).get
-      hits = response.getHits().hits()
-      results ++= hits.map(h => read[T](h.getSourceAsString))
-    }
-    results
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/elasticsearch/StorageClient.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/StorageClient.scala b/data/src/main/scala/io/prediction/data/storage/elasticsearch/StorageClient.scala
deleted file mode 100644
index 8f550c2..0000000
--- a/data/src/main/scala/io/prediction/data/storage/elasticsearch/StorageClient.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.elasticsearch
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.BaseStorageClient
-import io.prediction.data.storage.StorageClientConfig
-import io.prediction.data.storage.StorageClientException
-import org.elasticsearch.client.transport.TransportClient
-import org.elasticsearch.common.settings.ImmutableSettings
-import org.elasticsearch.common.transport.InetSocketTransportAddress
-import org.elasticsearch.transport.ConnectTransportException
-
-class StorageClient(val config: StorageClientConfig) extends BaseStorageClient
-    with Logging {
-  override val prefix = "ES"
-  val client = try {
-    val hosts = config.properties.get("HOSTS").
-      map(_.split(",").toSeq).getOrElse(Seq("localhost"))
-    val ports = config.properties.get("PORTS").
-      map(_.split(",").toSeq.map(_.toInt)).getOrElse(Seq(9300))
-    val settings = ImmutableSettings.settingsBuilder()
-      .put("cluster.name", config.properties.getOrElse("CLUSTERNAME", "elasticsearch"))
-    val transportClient = new TransportClient(settings)
-    (hosts zip ports) foreach { hp =>
-      transportClient.addTransportAddress(
-        new InetSocketTransportAddress(hp._1, hp._2))
-    }
-    transportClient
-  } catch {
-    case e: ConnectTransportException =>
-      throw new StorageClientException(e.getMessage, e)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/elasticsearch/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/elasticsearch/package.scala b/data/src/main/scala/io/prediction/data/storage/elasticsearch/package.scala
deleted file mode 100644
index daa3bc3..0000000
--- a/data/src/main/scala/io/prediction/data/storage/elasticsearch/package.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-/** Elasticsearch implementation of storage traits, supporting meta data only
-  *
-  * @group Implementation
-  */
-package object elasticsearch {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/hbase/HBEventsUtil.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/HBEventsUtil.scala b/data/src/main/scala/io/prediction/data/storage/hbase/HBEventsUtil.scala
deleted file mode 100644
index 294961f..0000000
--- a/data/src/main/scala/io/prediction/data/storage/hbase/HBEventsUtil.scala
+++ /dev/null
@@ -1,412 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.hbase
-
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.EventValidation
-import io.prediction.data.storage.DataMap
-
-import org.apache.hadoop.hbase.client.Result
-import org.apache.hadoop.hbase.client.Put
-import org.apache.hadoop.hbase.client.Scan
-import org.apache.hadoop.hbase.util.Bytes
-import org.apache.hadoop.hbase.filter.FilterList
-import org.apache.hadoop.hbase.filter.RegexStringComparator
-import org.apache.hadoop.hbase.filter.SingleColumnValueFilter
-import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
-import org.apache.hadoop.hbase.filter.BinaryComparator
-import org.apache.hadoop.hbase.filter.QualifierFilter
-import org.apache.hadoop.hbase.filter.SkipFilter
-
-import org.json4s.DefaultFormats
-import org.json4s.JObject
-import org.json4s.native.Serialization.{ read, write }
-
-import org.joda.time.DateTime
-import org.joda.time.DateTimeZone
-
-import org.apache.commons.codec.binary.Base64
-import java.security.MessageDigest
-
-import java.util.UUID
-
-/* common utility function for accessing EventsStore in HBase */
-object HBEventsUtil {
-
-  implicit val formats = DefaultFormats
-
-  def tableName(namespace: String, appId: Int, channelId: Option[Int] = None): String = {
-    channelId.map { ch =>
-      s"${namespace}:events_${appId}_${ch}"
-    }.getOrElse {
-      s"${namespace}:events_${appId}"
-    }
-  }
-
-  // column names for "e" column family
-  val colNames: Map[String, Array[Byte]] = Map(
-    "event" -> "e",
-    "entityType" -> "ety",
-    "entityId" -> "eid",
-    "targetEntityType" -> "tety",
-    "targetEntityId" -> "teid",
-    "properties" -> "p",
-    "prId" -> "prid",
-    "eventTime" -> "et",
-    "eventTimeZone" -> "etz",
-    "creationTime" -> "ct",
-    "creationTimeZone" -> "ctz"
-  ).mapValues(Bytes.toBytes(_))
-
-  def hash(entityType: String, entityId: String): Array[Byte] = {
-    val s = entityType + "-" + entityId
-    // get a new MessageDigest object each time for thread-safe
-    val md5 = MessageDigest.getInstance("MD5")
-    md5.digest(Bytes.toBytes(s))
-  }
-
-  class RowKey(
-    val b: Array[Byte]
-  ) {
-    require((b.size == 32), s"Incorrect b size: ${b.size}")
-    lazy val entityHash: Array[Byte] = b.slice(0, 16)
-    lazy val millis: Long = Bytes.toLong(b.slice(16, 24))
-    lazy val uuidLow: Long = Bytes.toLong(b.slice(24, 32))
-
-    lazy val toBytes: Array[Byte] = b
-
-    override def toString: String = {
-      Base64.encodeBase64URLSafeString(toBytes)
-    }
-  }
-
-  object RowKey {
-    def apply(
-      entityType: String,
-      entityId: String,
-      millis: Long,
-      uuidLow: Long): RowKey = {
-        // add UUID least significant bits for multiple actions at the same time
-        // (UUID's most significant bits are actually timestamp,
-        // use eventTime instead).
-        val b = hash(entityType, entityId) ++
-          Bytes.toBytes(millis) ++ Bytes.toBytes(uuidLow)
-        new RowKey(b)
-      }
-
-    // get RowKey from string representation
-    def apply(s: String): RowKey = {
-      try {
-        apply(Base64.decodeBase64(s))
-      } catch {
-        case e: Exception => throw new RowKeyException(
-          s"Failed to convert String ${s} to RowKey because ${e}", e)
-      }
-    }
-
-    def apply(b: Array[Byte]): RowKey = {
-      if (b.size != 32) {
-        val bString = b.mkString(",")
-        throw new RowKeyException(
-          s"Incorrect byte array size. Bytes: ${bString}.")
-      }
-      new RowKey(b)
-    }
-
-  }
-
-  class RowKeyException(val msg: String, val cause: Exception)
-    extends Exception(msg, cause) {
-      def this(msg: String) = this(msg, null)
-    }
-
-  case class PartialRowKey(entityType: String, entityId: String,
-    millis: Option[Long] = None) {
-    val toBytes: Array[Byte] = {
-      hash(entityType, entityId) ++
-        (millis.map(Bytes.toBytes(_)).getOrElse(Array[Byte]()))
-    }
-  }
-
-  def eventToPut(event: Event, appId: Int): (Put, RowKey) = {
-    // generate new rowKey if eventId is None
-    val rowKey = event.eventId.map { id =>
-      RowKey(id) // create rowKey from eventId
-    }.getOrElse {
-      // TOOD: use real UUID. not pseudo random
-      val uuidLow: Long = UUID.randomUUID().getLeastSignificantBits
-      RowKey(
-        entityType = event.entityType,
-        entityId = event.entityId,
-        millis = event.eventTime.getMillis,
-        uuidLow = uuidLow
-      )
-    }
-
-    val eBytes = Bytes.toBytes("e")
-    // use eventTime as HBase's cell timestamp
-    val put = new Put(rowKey.toBytes, event.eventTime.getMillis)
-
-    def addStringToE(col: Array[Byte], v: String): Put = {
-      put.add(eBytes, col, Bytes.toBytes(v))
-    }
-
-    def addLongToE(col: Array[Byte], v: Long): Put = {
-      put.add(eBytes, col, Bytes.toBytes(v))
-    }
-
-    addStringToE(colNames("event"), event.event)
-    addStringToE(colNames("entityType"), event.entityType)
-    addStringToE(colNames("entityId"), event.entityId)
-
-    event.targetEntityType.foreach { targetEntityType =>
-      addStringToE(colNames("targetEntityType"), targetEntityType)
-    }
-
-    event.targetEntityId.foreach { targetEntityId =>
-      addStringToE(colNames("targetEntityId"), targetEntityId)
-    }
-
-    // TODO: make properties Option[]
-    if (!event.properties.isEmpty) {
-      addStringToE(colNames("properties"), write(event.properties.toJObject))
-    }
-
-    event.prId.foreach { prId =>
-      addStringToE(colNames("prId"), prId)
-    }
-
-    addLongToE(colNames("eventTime"), event.eventTime.getMillis)
-    val eventTimeZone = event.eventTime.getZone
-    if (!eventTimeZone.equals(EventValidation.defaultTimeZone)) {
-      addStringToE(colNames("eventTimeZone"), eventTimeZone.getID)
-    }
-
-    addLongToE(colNames("creationTime"), event.creationTime.getMillis)
-    val creationTimeZone = event.creationTime.getZone
-    if (!creationTimeZone.equals(EventValidation.defaultTimeZone)) {
-      addStringToE(colNames("creationTimeZone"), creationTimeZone.getID)
-    }
-
-    // can use zero-length byte array for tag cell value
-    (put, rowKey)
-  }
-
-  def resultToEvent(result: Result, appId: Int): Event = {
-    val rowKey = RowKey(result.getRow())
-
-    val eBytes = Bytes.toBytes("e")
-    // val e = result.getFamilyMap(eBytes)
-
-    def getStringCol(col: String): String = {
-      val r = result.getValue(eBytes, colNames(col))
-      require(r != null,
-        s"Failed to get value for column ${col}. " +
-        s"Rowkey: ${rowKey.toString} " +
-        s"StringBinary: ${Bytes.toStringBinary(result.getRow())}.")
-
-      Bytes.toString(r)
-    }
-
-    def getLongCol(col: String): Long = {
-      val r = result.getValue(eBytes, colNames(col))
-      require(r != null,
-        s"Failed to get value for column ${col}. " +
-        s"Rowkey: ${rowKey.toString} " +
-        s"StringBinary: ${Bytes.toStringBinary(result.getRow())}.")
-
-      Bytes.toLong(r)
-    }
-
-    def getOptStringCol(col: String): Option[String] = {
-      val r = result.getValue(eBytes, colNames(col))
-      if (r == null) {
-        None
-      } else {
-        Some(Bytes.toString(r))
-      }
-    }
-
-    def getTimestamp(col: String): Long = {
-      result.getColumnLatestCell(eBytes, colNames(col)).getTimestamp()
-    }
-
-    val event = getStringCol("event")
-    val entityType = getStringCol("entityType")
-    val entityId = getStringCol("entityId")
-    val targetEntityType = getOptStringCol("targetEntityType")
-    val targetEntityId = getOptStringCol("targetEntityId")
-    val properties: DataMap = getOptStringCol("properties")
-      .map(s => DataMap(read[JObject](s))).getOrElse(DataMap())
-    val prId = getOptStringCol("prId")
-    val eventTimeZone = getOptStringCol("eventTimeZone")
-      .map(DateTimeZone.forID(_))
-      .getOrElse(EventValidation.defaultTimeZone)
-    val eventTime = new DateTime(
-      getLongCol("eventTime"), eventTimeZone)
-    val creationTimeZone = getOptStringCol("creationTimeZone")
-      .map(DateTimeZone.forID(_))
-      .getOrElse(EventValidation.defaultTimeZone)
-    val creationTime: DateTime = new DateTime(
-      getLongCol("creationTime"), creationTimeZone)
-
-    Event(
-      eventId = Some(RowKey(result.getRow()).toString),
-      event = event,
-      entityType = entityType,
-      entityId = entityId,
-      targetEntityType = targetEntityType,
-      targetEntityId = targetEntityId,
-      properties = properties,
-      eventTime = eventTime,
-      tags = Seq(),
-      prId = prId,
-      creationTime = creationTime
-    )
-  }
-
-
-  // for mandatory field. None means don't care.
-  // for optional field. None means don't care.
-  //    Some(None) means not exist.
-  //    Some(Some(x)) means it should match x
-  def createScan(
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    entityType: Option[String] = None,
-    entityId: Option[String] = None,
-    eventNames: Option[Seq[String]] = None,
-    targetEntityType: Option[Option[String]] = None,
-    targetEntityId: Option[Option[String]] = None,
-    reversed: Option[Boolean] = None): Scan = {
-
-    val scan: Scan = new Scan()
-
-    (entityType, entityId) match {
-      case (Some(et), Some(eid)) => {
-        val start = PartialRowKey(et, eid,
-          startTime.map(_.getMillis)).toBytes
-        // if no untilTime, stop when reach next bytes of entityTypeAndId
-        val stop = PartialRowKey(et, eid,
-          untilTime.map(_.getMillis).orElse(Some(-1))).toBytes
-
-        if (reversed.getOrElse(false)) {
-          // Reversed order.
-          // If you specify a startRow and stopRow,
-          // to scan in reverse, the startRow needs to be lexicographically
-          // after the stopRow.
-          scan.setStartRow(stop)
-          scan.setStopRow(start)
-          scan.setReversed(true)
-        } else {
-          scan.setStartRow(start)
-          scan.setStopRow(stop)
-        }
-      }
-      case (_, _) => {
-        val minTime: Long = startTime.map(_.getMillis).getOrElse(0)
-        val maxTime: Long = untilTime.map(_.getMillis).getOrElse(Long.MaxValue)
-        scan.setTimeRange(minTime, maxTime)
-        if (reversed.getOrElse(false)) {
-          scan.setReversed(true)
-        }
-      }
-    }
-
-    val filters = new FilterList(FilterList.Operator.MUST_PASS_ALL)
-
-    val eBytes = Bytes.toBytes("e")
-
-    def createBinaryFilter(col: String, value: Array[Byte]): SingleColumnValueFilter = {
-      val comp = new BinaryComparator(value)
-      new SingleColumnValueFilter(
-        eBytes, colNames(col), CompareOp.EQUAL, comp)
-    }
-
-    // skip the row if the column exists
-    def createSkipRowIfColumnExistFilter(col: String): SkipFilter = {
-      val comp = new BinaryComparator(colNames(col))
-      val q = new QualifierFilter(CompareOp.NOT_EQUAL, comp)
-      // filters an entire row if any of the Cell checks do not pass
-      new SkipFilter(q)
-    }
-
-    entityType.foreach { et =>
-      val compType = new BinaryComparator(Bytes.toBytes(et))
-      val filterType = new SingleColumnValueFilter(
-        eBytes, colNames("entityType"), CompareOp.EQUAL, compType)
-      filters.addFilter(filterType)
-    }
-
-    entityId.foreach { eid =>
-      val compId = new BinaryComparator(Bytes.toBytes(eid))
-      val filterId = new SingleColumnValueFilter(
-        eBytes, colNames("entityId"), CompareOp.EQUAL, compId)
-      filters.addFilter(filterId)
-    }
-
-    eventNames.foreach { eventsList =>
-      // match any of event in the eventsList
-      val eventFilters = new FilterList(FilterList.Operator.MUST_PASS_ONE)
-      eventsList.foreach { e =>
-        val compEvent = new BinaryComparator(Bytes.toBytes(e))
-        val filterEvent = new SingleColumnValueFilter(
-          eBytes, colNames("event"), CompareOp.EQUAL, compEvent)
-        eventFilters.addFilter(filterEvent)
-      }
-      if (!eventFilters.getFilters().isEmpty) {
-        filters.addFilter(eventFilters)
-      }
-    }
-
-    targetEntityType.foreach { tetOpt =>
-      if (tetOpt.isEmpty) {
-        val filter = createSkipRowIfColumnExistFilter("targetEntityType")
-        filters.addFilter(filter)
-      } else {
-        tetOpt.foreach { tet =>
-          val filter = createBinaryFilter(
-            "targetEntityType", Bytes.toBytes(tet))
-          // the entire row will be skipped if the column is not found.
-          filter.setFilterIfMissing(true)
-          filters.addFilter(filter)
-        }
-      }
-    }
-
-    targetEntityId.foreach { teidOpt =>
-      if (teidOpt.isEmpty) {
-        val filter = createSkipRowIfColumnExistFilter("targetEntityId")
-        filters.addFilter(filter)
-      } else {
-        teidOpt.foreach { teid =>
-          val filter = createBinaryFilter(
-            "targetEntityId", Bytes.toBytes(teid))
-          // the entire row will be skipped if the column is not found.
-          filter.setFilterIfMissing(true)
-          filters.addFilter(filter)
-        }
-      }
-    }
-
-    if (!filters.getFilters().isEmpty) {
-      scan.setFilter(filters)
-    }
-
-    scan
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/hbase/HBLEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/HBLEvents.scala b/data/src/main/scala/io/prediction/data/storage/hbase/HBLEvents.scala
deleted file mode 100644
index 6985ebe..0000000
--- a/data/src/main/scala/io/prediction/data/storage/hbase/HBLEvents.scala
+++ /dev/null
@@ -1,192 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.hbase
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.LEvents
-import io.prediction.data.storage.StorageClientConfig
-import io.prediction.data.storage.hbase.HBEventsUtil.RowKey
-import org.apache.hadoop.hbase.HColumnDescriptor
-import org.apache.hadoop.hbase.HTableDescriptor
-import org.apache.hadoop.hbase.NamespaceDescriptor
-import org.apache.hadoop.hbase.TableName
-import org.apache.hadoop.hbase.client._
-import org.joda.time.DateTime
-
-import scala.collection.JavaConversions._
-import scala.concurrent.ExecutionContext
-import scala.concurrent.Future
-
-class HBLEvents(val client: HBClient, config: StorageClientConfig, val namespace: String)
-  extends LEvents with Logging {
-
-  // implicit val formats = DefaultFormats + new EventJson4sSupport.DBSerializer
-
-  def resultToEvent(result: Result, appId: Int): Event =
-    HBEventsUtil.resultToEvent(result, appId)
-
-  def getTable(appId: Int, channelId: Option[Int] = None): HTableInterface =
-    client.connection.getTable(HBEventsUtil.tableName(namespace, appId, channelId))
-
-  override
-  def init(appId: Int, channelId: Option[Int] = None): Boolean = {
-    // check namespace exist
-    val existingNamespace = client.admin.listNamespaceDescriptors()
-      .map(_.getName)
-    if (!existingNamespace.contains(namespace)) {
-      val nameDesc = NamespaceDescriptor.create(namespace).build()
-      info(s"The namespace ${namespace} doesn't exist yet. Creating now...")
-      client.admin.createNamespace(nameDesc)
-    }
-
-    val tableName = TableName.valueOf(HBEventsUtil.tableName(namespace, appId, channelId))
-    if (!client.admin.tableExists(tableName)) {
-      info(s"The table ${tableName.getNameAsString()} doesn't exist yet." +
-        " Creating now...")
-      val tableDesc = new HTableDescriptor(tableName)
-      tableDesc.addFamily(new HColumnDescriptor("e"))
-      tableDesc.addFamily(new HColumnDescriptor("r")) // reserved
-      client.admin.createTable(tableDesc)
-    }
-    true
-  }
-
-  override
-  def remove(appId: Int, channelId: Option[Int] = None): Boolean = {
-    val tableName = TableName.valueOf(HBEventsUtil.tableName(namespace, appId, channelId))
-    try {
-      if (client.admin.tableExists(tableName)) {
-        info(s"Removing table ${tableName.getNameAsString()}...")
-        client.admin.disableTable(tableName)
-        client.admin.deleteTable(tableName)
-      } else {
-        info(s"Table ${tableName.getNameAsString()} doesn't exist." +
-          s" Nothing is deleted.")
-      }
-      true
-    } catch {
-      case e: Exception => {
-        error(s"Fail to remove table for appId ${appId}. Exception: ${e}")
-        false
-      }
-    }
-  }
-
-  override
-  def close(): Unit = {
-    client.admin.close()
-    client.connection.close()
-  }
-
-  override
-  def futureInsert(
-    event: Event, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext):
-    Future[String] = {
-    Future {
-      val table = getTable(appId, channelId)
-      val (put, rowKey) = HBEventsUtil.eventToPut(event, appId)
-      table.put(put)
-      table.flushCommits()
-      table.close()
-      rowKey.toString
-    }
-  }
-
-  override
-  def futureGet(
-    eventId: String, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext):
-    Future[Option[Event]] = {
-      Future {
-        val table = getTable(appId, channelId)
-        val rowKey = RowKey(eventId)
-        val get = new Get(rowKey.toBytes)
-
-        val result = table.get(get)
-        table.close()
-
-        if (!result.isEmpty()) {
-          val event = resultToEvent(result, appId)
-          Some(event)
-        } else {
-          None
-        }
-      }
-    }
-
-  override
-  def futureDelete(
-    eventId: String, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext):
-    Future[Boolean] = {
-    Future {
-      val table = getTable(appId, channelId)
-      val rowKey = RowKey(eventId)
-      val exists = table.exists(new Get(rowKey.toBytes))
-      table.delete(new Delete(rowKey.toBytes))
-      table.close()
-      exists
-    }
-  }
-
-  override
-  def futureFind(
-    appId: Int,
-    channelId: Option[Int] = None,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    entityType: Option[String] = None,
-    entityId: Option[String] = None,
-    eventNames: Option[Seq[String]] = None,
-    targetEntityType: Option[Option[String]] = None,
-    targetEntityId: Option[Option[String]] = None,
-    limit: Option[Int] = None,
-    reversed: Option[Boolean] = None)(implicit ec: ExecutionContext):
-    Future[Iterator[Event]] = {
-      Future {
-
-        require(!((reversed == Some(true)) && (entityType.isEmpty || entityId.isEmpty)),
-          "the parameter reversed can only be used with both entityType and entityId specified.")
-
-        val table = getTable(appId, channelId)
-
-        val scan = HBEventsUtil.createScan(
-          startTime = startTime,
-          untilTime = untilTime,
-          entityType = entityType,
-          entityId = entityId,
-          eventNames = eventNames,
-          targetEntityType = targetEntityType,
-          targetEntityId = targetEntityId,
-          reversed = reversed)
-        val scanner = table.getScanner(scan)
-        table.close()
-
-        val eventsIter = scanner.iterator()
-
-        // Get all events if None or Some(-1)
-        val results: Iterator[Result] = limit match {
-          case Some(-1) => eventsIter
-          case None => eventsIter
-          case Some(x) => eventsIter.take(x)
-        }
-
-        val eventsIt = results.map { resultToEvent(_, appId) }
-
-        eventsIt
-      }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/hbase/HBPEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/HBPEvents.scala b/data/src/main/scala/io/prediction/data/storage/hbase/HBPEvents.scala
deleted file mode 100644
index 9d72529..0000000
--- a/data/src/main/scala/io/prediction/data/storage/hbase/HBPEvents.scala
+++ /dev/null
@@ -1,112 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.hbase
-
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.PEvents
-import io.prediction.data.storage.StorageClientConfig
-import org.apache.hadoop.hbase.HBaseConfiguration
-import org.apache.hadoop.hbase.client.Result
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable
-import org.apache.hadoop.hbase.mapreduce.PIOHBaseUtil
-import org.apache.hadoop.hbase.mapreduce.TableInputFormat
-import org.apache.hadoop.hbase.mapreduce.TableOutputFormat
-import org.apache.hadoop.io.Writable
-import org.apache.hadoop.mapreduce.OutputFormat
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-import org.joda.time.DateTime
-
-class HBPEvents(client: HBClient, config: StorageClientConfig, namespace: String) extends PEvents {
-
-  def checkTableExists(appId: Int, channelId: Option[Int]): Unit = {
-    if (!client.admin.tableExists(HBEventsUtil.tableName(namespace, appId, channelId))) {
-      if (channelId.nonEmpty) {
-        logger.error(s"The appId $appId with channelId $channelId does not exist." +
-          s" Please use valid appId and channelId.")
-        throw new Exception(s"HBase table not found for appId $appId" +
-          s" with channelId $channelId.")
-      } else {
-        logger.error(s"The appId $appId does not exist. Please use valid appId.")
-        throw new Exception(s"HBase table not found for appId $appId.")
-      }
-    }
-  }
-
-  override
-  def find(
-    appId: Int,
-    channelId: Option[Int] = None,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    entityType: Option[String] = None,
-    entityId: Option[String] = None,
-    eventNames: Option[Seq[String]] = None,
-    targetEntityType: Option[Option[String]] = None,
-    targetEntityId: Option[Option[String]] = None
-    )(sc: SparkContext): RDD[Event] = {
-
-    checkTableExists(appId, channelId)
-
-    val conf = HBaseConfiguration.create()
-    conf.set(TableInputFormat.INPUT_TABLE,
-      HBEventsUtil.tableName(namespace, appId, channelId))
-
-    val scan = HBEventsUtil.createScan(
-        startTime = startTime,
-        untilTime = untilTime,
-        entityType = entityType,
-        entityId = entityId,
-        eventNames = eventNames,
-        targetEntityType = targetEntityType,
-        targetEntityId = targetEntityId,
-        reversed = None)
-    scan.setCaching(500) // TODO
-    scan.setCacheBlocks(false) // TODO
-
-    conf.set(TableInputFormat.SCAN, PIOHBaseUtil.convertScanToString(scan))
-
-    // HBase is not accessed until this rdd is actually used.
-    val rdd = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat],
-      classOf[ImmutableBytesWritable],
-      classOf[Result]).map {
-        case (key, row) => HBEventsUtil.resultToEvent(row, appId)
-      }
-
-    rdd
-  }
-
-  override
-  def write(
-    events: RDD[Event], appId: Int, channelId: Option[Int])(sc: SparkContext): Unit = {
-
-    checkTableExists(appId, channelId)
-
-    val conf = HBaseConfiguration.create()
-    conf.set(TableOutputFormat.OUTPUT_TABLE,
-      HBEventsUtil.tableName(namespace, appId, channelId))
-    conf.setClass("mapreduce.outputformat.class",
-      classOf[TableOutputFormat[Object]],
-      classOf[OutputFormat[Object, Writable]])
-
-    events.map { event =>
-      val (put, rowKey) = HBEventsUtil.eventToPut(event, appId)
-      (new ImmutableBytesWritable(rowKey.toBytes), put)
-    }.saveAsNewAPIHadoopDataset(conf)
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/hbase/PIOHBaseUtil.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/PIOHBaseUtil.scala b/data/src/main/scala/io/prediction/data/storage/hbase/PIOHBaseUtil.scala
deleted file mode 100644
index 1027930..0000000
--- a/data/src/main/scala/io/prediction/data/storage/hbase/PIOHBaseUtil.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package org.apache.hadoop.hbase.mapreduce
-
-/* Pretends to be hbase.mapreduce package in order to expose its
- * Package-accessible only static function convertScanToString()
- */
-
-import org.apache.hadoop.hbase.client.Scan
-
-object PIOHBaseUtil {
-  def convertScanToString(scan: Scan): String = {
-    TableMapReduceUtil.convertScanToString(scan)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/hbase/StorageClient.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/StorageClient.scala b/data/src/main/scala/io/prediction/data/storage/hbase/StorageClient.scala
deleted file mode 100644
index bfede39..0000000
--- a/data/src/main/scala/io/prediction/data/storage/hbase/StorageClient.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage.hbase
-
-import io.prediction.data.storage.BaseStorageClient
-import io.prediction.data.storage.StorageClientConfig
-
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.hbase.HBaseConfiguration
-import org.apache.hadoop.hbase.MasterNotRunningException
-import org.apache.hadoop.hbase.ZooKeeperConnectionException
-import org.apache.hadoop.hbase.client.HConnectionManager
-import org.apache.hadoop.hbase.client.HConnection
-import org.apache.hadoop.hbase.client.HBaseAdmin
-
-import grizzled.slf4j.Logging
-
-case class HBClient(
-  val conf: Configuration,
-  val connection: HConnection,
-  val admin: HBaseAdmin
-)
-
-class StorageClient(val config: StorageClientConfig)
-  extends BaseStorageClient with Logging {
-
-  val conf = HBaseConfiguration.create()
-
-  if (config.test) {
-    // use fewer retries and shorter timeout for test mode
-    conf.set("hbase.client.retries.number", "1")
-    conf.set("zookeeper.session.timeout", "30000");
-    conf.set("zookeeper.recovery.retry", "1")
-  }
-
-  try {
-    HBaseAdmin.checkHBaseAvailable(conf)
-  } catch {
-    case e: MasterNotRunningException =>
-      error("HBase master is not running (ZooKeeper ensemble: " +
-        conf.get("hbase.zookeeper.quorum") + "). Please make sure that HBase " +
-        "is running properly, and that the configuration is pointing at the " +
-        "correct ZooKeeper ensemble.")
-      throw e
-    case e: ZooKeeperConnectionException =>
-      error("Cannot connect to ZooKeeper (ZooKeeper ensemble: " +
-        conf.get("hbase.zookeeper.quorum") + "). Please make sure that the " +
-        "configuration is pointing at the correct ZooKeeper ensemble. By " +
-        "default, HBase manages its own ZooKeeper, so if you have not " +
-        "configured HBase to use an external ZooKeeper, that means your " +
-        "HBase is not started or configured properly.")
-      throw e
-    case e: Exception => {
-      error("Failed to connect to HBase." +
-        " Please check if HBase is running properly.")
-      throw e
-    }
-  }
-
-  val connection = HConnectionManager.createConnection(conf)
-
-  val client = HBClient(
-    conf = conf,
-    connection = connection,
-    admin = new HBaseAdmin(connection)
-  )
-
-  override
-  val prefix = "HB"
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/hbase/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/hbase/package.scala b/data/src/main/scala/io/prediction/data/storage/hbase/package.scala
deleted file mode 100644
index 46aa10c..0000000
--- a/data/src/main/scala/io/prediction/data/storage/hbase/package.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-/** HBase implementation of storage traits, supporting event data only
-  *
-  * @group Implementation
-  */
-package object hbase {}


[25/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/EngineParams.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/EngineParams.scala b/core/src/main/scala/io/prediction/controller/EngineParams.scala
deleted file mode 100644
index 32f5de7..0000000
--- a/core/src/main/scala/io/prediction/controller/EngineParams.scala
+++ /dev/null
@@ -1,149 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseDataSource
-import io.prediction.core.BaseAlgorithm
-
-import scala.collection.JavaConversions
-import scala.language.implicitConversions
-
-/** This class serves as a logical grouping of all required engine's parameters.
-  *
-  * @param dataSourceParams Data Source name-parameters tuple.
-  * @param preparatorParams Preparator name-parameters tuple.
-  * @param algorithmParamsList List of algorithm name-parameter pairs.
-  * @param servingParams Serving name-parameters tuple.
-  * @group Engine
-  */
-class EngineParams(
-    val dataSourceParams: (String, Params) = ("", EmptyParams()),
-    val preparatorParams: (String, Params) = ("", EmptyParams()),
-    val algorithmParamsList: Seq[(String, Params)] = Seq(),
-    val servingParams: (String, Params) = ("", EmptyParams()))
-  extends Serializable {
-
-  /** Java-friendly constructor
-    *
-    * @param dataSourceName Data Source name
-    * @param dataSourceParams Data Source parameters
-    * @param preparatorName Preparator name
-    * @param preparatorParams Preparator parameters
-    * @param algorithmParamsList Map of algorithm name-parameters
-    * @param servingName Serving name
-    * @param servingParams Serving parameters
-    */
-  def this(
-    dataSourceName: String,
-    dataSourceParams: Params,
-    preparatorName: String,
-    preparatorParams: Params,
-    algorithmParamsList: _root_.java.util.Map[String, _ <: Params],
-    servingName: String,
-    servingParams: Params) = {
-
-    // To work around a json4s weird limitation, the parameter names can not be changed
-    this(
-      (dataSourceName, dataSourceParams),
-      (preparatorName, preparatorParams),
-      JavaConversions.mapAsScalaMap(algorithmParamsList).toSeq,
-      (servingName, servingParams)
-    )
-  }
-
-  // A case class style copy method.
-  def copy(
-    dataSourceParams: (String, Params) = dataSourceParams,
-    preparatorParams: (String, Params) = preparatorParams,
-    algorithmParamsList: Seq[(String, Params)] = algorithmParamsList,
-    servingParams: (String, Params) = servingParams): EngineParams = {
-
-    new EngineParams(
-      dataSourceParams,
-      preparatorParams,
-      algorithmParamsList,
-      servingParams)
-  }
-}
-
-/** Companion object for creating [[EngineParams]] instances.
-  *
-  * @group Engine
-  */
-object EngineParams {
-  /** Create EngineParams.
-    *
-    * @param dataSourceName Data Source name
-    * @param dataSourceParams Data Source parameters
-    * @param preparatorName Preparator name
-    * @param preparatorParams Preparator parameters
-    * @param algorithmParamsList List of algorithm name-parameter pairs.
-    * @param servingName Serving name
-    * @param servingParams Serving parameters
-    */
-  def apply(
-    dataSourceName: String = "",
-    dataSourceParams: Params = EmptyParams(),
-    preparatorName: String = "",
-    preparatorParams: Params = EmptyParams(),
-    algorithmParamsList: Seq[(String, Params)] = Seq(),
-    servingName: String = "",
-    servingParams: Params = EmptyParams()): EngineParams = {
-      new EngineParams(
-        dataSourceParams = (dataSourceName, dataSourceParams),
-        preparatorParams = (preparatorName, preparatorParams),
-        algorithmParamsList = algorithmParamsList,
-        servingParams = (servingName, servingParams)
-      )
-    }
-}
-
-/** SimpleEngine has only one algorithm, and uses default preparator and serving
-  * layer. Current default preparator is `IdentityPreparator` and serving is
-  * `FirstServing`.
-  *
-  * @tparam TD Training data class.
-  * @tparam EI Evaluation info class.
-  * @tparam Q Input query class.
-  * @tparam P Output prediction class.
-  * @tparam A Actual value class.
-  * @param dataSourceClass Data source class.
-  * @param algorithmClass of algorithm names to classes.
-  * @group Engine
-  */
-class SimpleEngine[TD, EI, Q, P, A](
-    dataSourceClass: Class[_ <: BaseDataSource[TD, EI, Q, A]],
-    algorithmClass: Class[_ <: BaseAlgorithm[TD, _, Q, P]])
-  extends Engine(
-    dataSourceClass,
-    IdentityPreparator(dataSourceClass),
-    Map("" -> algorithmClass),
-    LFirstServing(algorithmClass))
-
-/** This shorthand class serves the `SimpleEngine` class.
-  *
-  * @param dataSourceParams Data source parameters.
-  * @param algorithmParams List of algorithm name-parameter pairs.
-  * @group Engine
-  */
-class SimpleEngineParams(
-    dataSourceParams: Params = EmptyParams(),
-    algorithmParams: Params = EmptyParams())
-  extends EngineParams(
-    dataSourceParams = ("", dataSourceParams),
-    algorithmParamsList = Seq(("", algorithmParams)))
-
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/EngineParamsGenerator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/EngineParamsGenerator.scala b/core/src/main/scala/io/prediction/controller/EngineParamsGenerator.scala
deleted file mode 100644
index a9bf3eb..0000000
--- a/core/src/main/scala/io/prediction/controller/EngineParamsGenerator.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import scala.language.implicitConversions
-
-/** Defines an engine parameters generator.
-  *
-  * Implementations of this trait can be supplied to "pio eval" as the second
-  * command line argument.
-  *
-  * @group Evaluation
-  */
-trait EngineParamsGenerator {
-  protected[this] var epList: Seq[EngineParams] = _
-  protected[this] var epListSet: Boolean = false
-
-  /** Returns the list of [[EngineParams]] of this [[EngineParamsGenerator]]. */
-  def engineParamsList: Seq[EngineParams] = {
-    assert(epListSet, "EngineParamsList not set")
-    epList
-  }
-
-  /** Sets the list of [[EngineParams]] of this [[EngineParamsGenerator]]. */
-  def engineParamsList_=(l: Seq[EngineParams]) {
-    assert(!epListSet, "EngineParamsList can bet set at most once")
-    epList = Seq(l:_*)
-    epListSet = true
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/Evaluation.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/Evaluation.scala b/core/src/main/scala/io/prediction/controller/Evaluation.scala
deleted file mode 100644
index a6ee9a7..0000000
--- a/core/src/main/scala/io/prediction/controller/Evaluation.scala
+++ /dev/null
@@ -1,122 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseEngine
-import io.prediction.core.BaseEvaluator
-import io.prediction.core.BaseEvaluatorResult
-
-import scala.language.implicitConversions
-
-/** Defines an evaluation that contains an engine and a metric.
-  *
-  * Implementations of this trait can be supplied to "pio eval" as the first
-  * argument.
-  *
-  * @group Evaluation
-  */
-trait Evaluation extends Deployment {
-  protected [this] var _evaluatorSet: Boolean = false
-  protected [this] var _evaluator: BaseEvaluator[_, _, _, _, _ <: BaseEvaluatorResult] = _
-
-  private [prediction]
-  def evaluator: BaseEvaluator[_, _, _, _, _ <: BaseEvaluatorResult] = {
-    assert(_evaluatorSet, "Evaluator not set")
-    _evaluator
-  }
-
-  /** Gets the tuple of the [[Engine]] and the implementation of
-    * [[io.prediction.core.BaseEvaluator]]
-    */
-  def engineEvaluator
-  : (BaseEngine[_, _, _, _], BaseEvaluator[_, _, _, _, _]) = {
-    assert(_evaluatorSet, "Evaluator not set")
-    (engine, _evaluator)
-  }
-
-  /** Sets both an [[Engine]] and an implementation of
-    * [[io.prediction.core.BaseEvaluator]] for this [[Evaluation]]
-    *
-    * @param engineEvaluator A tuple an [[Engine]] and an implementation of
-    *                        [[io.prediction.core.BaseEvaluator]]
-    * @tparam EI Evaluation information class
-    * @tparam Q Query class
-    * @tparam P Predicted result class
-    * @tparam A Actual result class
-    * @tparam R Metric result class
-    */
-  def engineEvaluator_=[EI, Q, P, A, R <: BaseEvaluatorResult](
-    engineEvaluator: (
-      BaseEngine[EI, Q, P, A],
-      BaseEvaluator[EI, Q, P, A, R])) {
-    assert(!_evaluatorSet, "Evaluator can be set at most once")
-    engine = engineEvaluator._1
-    _evaluator = engineEvaluator._2
-    _evaluatorSet = true
-  }
-
-  /** Returns both the [[Engine]] and the implementation of [[Metric]] for this
-    * [[Evaluation]]
-    */
-  def engineMetric: (BaseEngine[_, _, _, _], Metric[_, _, _, _, _]) = {
-    throw new NotImplementedError("This method is to keep the compiler happy")
-  }
-
-  /** Sets both an [[Engine]] and an implementation of [[Metric]] for this
-    * [[Evaluation]]
-    *
-    * @param engineMetric A tuple of [[Engine]] and an implementation of
-    *                     [[Metric]]
-    * @tparam EI Evaluation information class
-    * @tparam Q Query class
-    * @tparam P Predicted result class
-    * @tparam A Actual result class
-    */
-  def engineMetric_=[EI, Q, P, A](
-    engineMetric: (BaseEngine[EI, Q, P, A], Metric[EI, Q, P, A, _])) {
-    engineEvaluator = (
-      engineMetric._1,
-      MetricEvaluator(
-        metric = engineMetric._2,
-        otherMetrics = Seq[Metric[EI, Q, P, A, _]](),
-        outputPath = "best.json"))
-  }
-
-  private [prediction]
-  def engineMetrics: (BaseEngine[_, _, _, _], Metric[_, _, _, _, _]) = {
-    throw new NotImplementedError("This method is to keep the compiler happy")
-  }
-
-  /** Sets an [[Engine]], an implementation of [[Metric]], and sequence of
-    * implementations of [[Metric]] for this [[Evaluation]]
-    *
-    * @param engineMetrics A tuple of [[Engine]], an implementation of
-    *                      [[Metric]] and sequence of implementations of [[Metric]]
-    * @tparam EI Evaluation information class
-    * @tparam Q Query class
-    * @tparam P Predicted result class
-    * @tparam A Actual result class
-    */
-  def engineMetrics_=[EI, Q, P, A](
-    engineMetrics: (
-      BaseEngine[EI, Q, P, A],
-      Metric[EI, Q, P, A, _],
-      Seq[Metric[EI, Q, P, A, _]])) {
-    engineEvaluator = (
-      engineMetrics._1,
-      MetricEvaluator(engineMetrics._2, engineMetrics._3))
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala b/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala
deleted file mode 100644
index 8e9727e..0000000
--- a/core/src/main/scala/io/prediction/controller/FastEvalEngine.scala
+++ /dev/null
@@ -1,343 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseDataSource
-import io.prediction.core.BasePreparator
-import io.prediction.core.BaseAlgorithm
-import io.prediction.core.BaseServing
-import io.prediction.core.Doer
-import io.prediction.annotation.Experimental
-
-import grizzled.slf4j.Logger
-import io.prediction.workflow.WorkflowParams
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-import scala.language.implicitConversions
-
-import _root_.java.util.NoSuchElementException
-
-import scala.collection.mutable.{ HashMap => MutableHashMap }
-
-/** :: Experimental ::
-  * Workflow based on [[FastEvalEngine]]
-  *
-  * @group Evaluation
-  */
-@Experimental
-object FastEvalEngineWorkflow  {
-  @transient lazy val logger = Logger[this.type]
-
-  type EX = Int
-  type AX = Int
-  type QX = Long
-
-  case class DataSourcePrefix(dataSourceParams: (String, Params)) {
-    def this(pp: PreparatorPrefix) = this(pp.dataSourceParams)
-    def this(ap: AlgorithmsPrefix) = this(ap.dataSourceParams)
-    def this(sp: ServingPrefix) = this(sp.dataSourceParams)
-  }
-
-  case class PreparatorPrefix(
-    dataSourceParams: (String, Params),
-    preparatorParams: (String, Params)) {
-    def this(ap: AlgorithmsPrefix) = {
-      this(ap.dataSourceParams, ap.preparatorParams)
-    }
-  }
-
-  case class AlgorithmsPrefix(
-    dataSourceParams: (String, Params),
-    preparatorParams: (String, Params),
-    algorithmParamsList: Seq[(String, Params)]) {
-    def this(sp: ServingPrefix) = {
-      this(sp.dataSourceParams, sp.preparatorParams, sp.algorithmParamsList)
-    }
-  }
-
-  case class ServingPrefix(
-    dataSourceParams: (String, Params),
-    preparatorParams: (String, Params),
-    algorithmParamsList: Seq[(String, Params)],
-    servingParams: (String, Params)) {
-    def this(ep: EngineParams) = this(
-      ep.dataSourceParams,
-      ep.preparatorParams,
-      ep.algorithmParamsList,
-      ep.servingParams)
-  }
-
-  def getDataSourceResult[TD, EI, PD, Q, P, A](
-    workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
-    prefix: DataSourcePrefix)
-  : Map[EX, (TD, EI, RDD[(QX, (Q, A))])] = {
-    val cache = workflow.dataSourceCache
-
-    if (!cache.contains(prefix)) {
-      val dataSource = Doer(
-        workflow.engine.dataSourceClassMap(prefix.dataSourceParams._1),
-        prefix.dataSourceParams._2)
-
-      val result = dataSource
-      .readEvalBase(workflow.sc)
-      .map { case (td, ei, qaRDD) => {
-        (td, ei, qaRDD.zipWithUniqueId().map(_.swap))
-      }}
-      .zipWithIndex
-      .map(_.swap)
-      .toMap
-
-      cache += Tuple2(prefix, result)
-    }
-    cache(prefix)
-  }
-
-  def getPreparatorResult[TD, EI, PD, Q, P, A](
-    workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
-    prefix: PreparatorPrefix): Map[EX, PD] = {
-    val cache = workflow.preparatorCache
-
-    if (!cache.contains(prefix)) {
-      val preparator = Doer(
-        workflow.engine.preparatorClassMap(prefix.preparatorParams._1),
-        prefix.preparatorParams._2)
-
-      val result = getDataSourceResult(
-        workflow = workflow,
-        prefix = new DataSourcePrefix(prefix))
-      .mapValues { case (td, _, _) => preparator.prepareBase(workflow.sc, td) }
-
-      cache += Tuple2(prefix, result)
-    }
-    cache(prefix)
-  }
-
-  def computeAlgorithmsResult[TD, EI, PD, Q, P, A](
-    workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
-    prefix: AlgorithmsPrefix): Map[EX, RDD[(QX, Seq[P])]] = {
-
-    val algoMap: Map[AX, BaseAlgorithm[PD, _, Q, P]] = prefix.algorithmParamsList
-      .map { case (algoName, algoParams) => {
-        try {
-          Doer(workflow.engine.algorithmClassMap(algoName), algoParams)
-        } catch {
-          case e: NoSuchElementException => {
-            val algorithmClassMap = workflow.engine.algorithmClassMap
-            if (algoName == "") {
-              logger.error("Empty algorithm name supplied but it could not " +
-                "match with any algorithm in the engine's definition. " +
-                "Existing algorithm name(s) are: " +
-                s"${algorithmClassMap.keys.mkString(", ")}. Aborting.")
-            } else {
-              logger.error(s"${algoName} cannot be found in the engine's " +
-                "definition. Existing algorithm name(s) are: " +
-                s"${algorithmClassMap.keys.mkString(", ")}. Aborting.")
-            }
-            sys.exit(1)
-          }
-        }
-      }}
-      .zipWithIndex
-      .map(_.swap)
-      .toMap
-
-    val algoCount = algoMap.size
-
-    // Model Train
-    val algoModelsMap: Map[EX, Map[AX, Any]] = getPreparatorResult(
-      workflow,
-      new PreparatorPrefix(prefix))
-    .mapValues {
-      pd => algoMap.mapValues(_.trainBase(workflow.sc,pd))
-    }
-
-    // Predict
-    val dataSourceResult =
-      FastEvalEngineWorkflow.getDataSourceResult(
-        workflow = workflow,
-        prefix = new DataSourcePrefix(prefix))
-
-    val algoResult: Map[EX, RDD[(QX, Seq[P])]] = dataSourceResult
-    .par
-    .map { case (ex, (td, ei, iqaRDD)) => {
-      val modelsMap: Map[AX, Any] = algoModelsMap(ex)
-      val qs: RDD[(QX, Q)] = iqaRDD.mapValues(_._1)
-
-      val algoPredicts: Seq[RDD[(QX, (AX, P))]] = (0 until algoCount)
-      .map { ax => {
-        val algo = algoMap(ax)
-        val model = modelsMap(ax)
-        val rawPredicts: RDD[(QX, P)] = algo.batchPredictBase(
-          workflow.sc,
-          model,
-          qs)
-
-        val predicts: RDD[(QX, (AX, P))] = rawPredicts.map {
-          case (qx, p) => (qx, (ax, p))
-        }
-        predicts
-      }}
-
-      val unionAlgoPredicts: RDD[(QX, Seq[P])] = workflow.sc
-      .union(algoPredicts)
-      .groupByKey
-      .mapValues { ps => {
-        assert (ps.size == algoCount, "Must have same length as algoCount")
-        // TODO. Check size == algoCount
-        ps.toSeq.sortBy(_._1).map(_._2)
-      }}
-      (ex, unionAlgoPredicts)
-    }}
-    .seq
-    .toMap
-
-    algoResult
-  }
-
-  def getAlgorithmsResult[TD, EI, PD, Q, P, A](
-    workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
-    prefix: AlgorithmsPrefix): Map[EX, RDD[(QX, Seq[P])]] = {
-    val cache = workflow.algorithmsCache
-    if (!cache.contains(prefix)) {
-      val result = computeAlgorithmsResult(workflow, prefix)
-      cache += Tuple2(prefix, result)
-    }
-    cache(prefix)
-  }
-
-  def getServingResult[TD, EI, PD, Q, P, A](
-    workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
-    prefix: ServingPrefix)
-  : Seq[(EI, RDD[(Q, P, A)])] = {
-    val cache = workflow.servingCache
-    if (!cache.contains(prefix)) {
-      val serving = Doer(
-        workflow.engine.servingClassMap(prefix.servingParams._1),
-        prefix.servingParams._2)
-
-      val algoPredictsMap = getAlgorithmsResult(
-        workflow = workflow,
-        prefix = new AlgorithmsPrefix(prefix))
-
-      val dataSourceResult = getDataSourceResult(
-        workflow = workflow,
-        prefix = new DataSourcePrefix(prefix))
-
-      val evalQAsMap = dataSourceResult.mapValues(_._3)
-      val evalInfoMap = dataSourceResult.mapValues(_._2)
-
-      val servingQPAMap: Map[EX, RDD[(Q, P, A)]] = algoPredictsMap
-      .map { case (ex, psMap) => {
-        val qasMap: RDD[(QX, (Q, A))] = evalQAsMap(ex)
-        val qpsaMap: RDD[(QX, Q, Seq[P], A)] = psMap.join(qasMap)
-        .map { case (qx, t) => (qx, t._2._1, t._1, t._2._2) }
-
-        val qpaMap: RDD[(Q, P, A)] = qpsaMap.map {
-          case (qx, q, ps, a) => (q, serving.serveBase(q, ps), a)
-        }
-        (ex, qpaMap)
-      }}
-
-      val servingResult = (0 until evalQAsMap.size).map { ex => {
-        (evalInfoMap(ex), servingQPAMap(ex))
-      }}
-      .toSeq
-
-      cache += Tuple2(prefix, servingResult)
-    }
-    cache(prefix)
-  }
-
-  def get[TD, EI, PD, Q, P, A](
-    workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
-    engineParamsList: Seq[EngineParams])
-  : Seq[(EngineParams, Seq[(EI, RDD[(Q, P, A)])])] = {
-    engineParamsList.map { engineParams => {
-      (engineParams,
-        getServingResult(workflow, new ServingPrefix(engineParams)))
-    }}
-  }
-}
-
-/** :: Experimental ::
-  * Workflow based on [[FastEvalEngine]]
-  *
-  * @group Evaluation
-  */
-@Experimental
-class FastEvalEngineWorkflow[TD, EI, PD, Q, P, A](
-  val engine: FastEvalEngine[TD, EI, PD, Q, P, A],
-  val sc: SparkContext,
-  val workflowParams: WorkflowParams) extends Serializable {
-
-  import io.prediction.controller.FastEvalEngineWorkflow._
-
-  type DataSourceResult = Map[EX, (TD, EI, RDD[(QX, (Q, A))])]
-  type PreparatorResult = Map[EX, PD]
-  type AlgorithmsResult = Map[EX, RDD[(QX, Seq[P])]]
-  type ServingResult = Seq[(EI, RDD[(Q, P, A)])]
-
-  val dataSourceCache = MutableHashMap[DataSourcePrefix, DataSourceResult]()
-  val preparatorCache = MutableHashMap[PreparatorPrefix, PreparatorResult]()
-  val algorithmsCache = MutableHashMap[AlgorithmsPrefix, AlgorithmsResult]()
-  val servingCache = MutableHashMap[ServingPrefix, ServingResult]()
-}
-
-
-
-/** :: Experimental ::
-  * FastEvalEngine is a subclass of [[Engine]] that exploits the immutability of
-  * controllers to optimize the evaluation process
-  *
-  * @group Evaluation
-  */
-@Experimental
-class FastEvalEngine[TD, EI, PD, Q, P, A](
-    dataSourceClassMap: Map[String, Class[_ <: BaseDataSource[TD, EI, Q, A]]],
-    preparatorClassMap: Map[String, Class[_ <: BasePreparator[TD, PD]]],
-    algorithmClassMap: Map[String, Class[_ <: BaseAlgorithm[PD, _, Q, P]]],
-    servingClassMap: Map[String, Class[_ <: BaseServing[Q, P]]])
-  extends Engine[TD, EI, PD, Q, P, A](
-    dataSourceClassMap,
-    preparatorClassMap,
-    algorithmClassMap,
-    servingClassMap) {
-  @transient override lazy val logger = Logger[this.type]
-
-  override def eval(
-    sc: SparkContext,
-    engineParams: EngineParams,
-    params: WorkflowParams): Seq[(EI, RDD[(Q, P, A)])] = {
-    logger.info("FastEvalEngine.eval")
-    batchEval(sc, Seq(engineParams), params).head._2
-  }
-
-  override def batchEval(
-    sc: SparkContext,
-    engineParamsList: Seq[EngineParams],
-    params: WorkflowParams)
-  : Seq[(EngineParams, Seq[(EI, RDD[(Q, P, A)])])] = {
-
-    val fastEngineWorkflow = new FastEvalEngineWorkflow(
-      this, sc, params)
-
-    FastEvalEngineWorkflow.get(
-      fastEngineWorkflow,
-      engineParamsList)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/IdentityPreparator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/IdentityPreparator.scala b/core/src/main/scala/io/prediction/controller/IdentityPreparator.scala
deleted file mode 100644
index 0bf3cb0..0000000
--- a/core/src/main/scala/io/prediction/controller/IdentityPreparator.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseDataSource
-import io.prediction.core.BasePreparator
-import org.apache.spark.SparkContext
-
-import scala.reflect._
-
-/** A helper concrete implementation of [[io.prediction.core.BasePreparator]]
-  * that passes training data through without any special preparation. This can
-  * be used in place for both [[PPreparator]] and [[LPreparator]].
-  *
-  * @tparam TD Training data class.
-  * @group Preparator
-  */
-class IdentityPreparator[TD] extends BasePreparator[TD, TD] {
-  def prepareBase(sc: SparkContext, td: TD): TD = td
-}
-
-/** Companion object of [[IdentityPreparator]] that conveniently returns an
-  * instance of the class of [[IdentityPreparator]] for use with
-  * [[EngineFactory]].
-  *
-  * @group Preparator
-  */
-object IdentityPreparator {
-  /** Produces an instance of the class of [[IdentityPreparator]].
-    *
-    * @param ds Instance of the class of the data source for this preparator.
-    */
-  def apply[TD](ds: Class[_ <: BaseDataSource[TD, _, _, _]]): Class[IdentityPreparator[TD]] =
-    classOf[IdentityPreparator[TD]]
-}
-
-/** DEPRECATED. Use [[IdentityPreparator]] instead.
-  *
-  * @tparam TD Training data class.
-  * @group Preparator
-  */
-@deprecated("Use IdentityPreparator instead.", "0.9.2")
-class PIdentityPreparator[TD] extends IdentityPreparator[TD]
-
-/** DEPRECATED. Use [[IdentityPreparator]] instead.
-  *
-  * @group Preparator
-  */
-@deprecated("Use IdentityPreparator instead.", "0.9.2")
-object PIdentityPreparator {
-  /** Produces an instance of the class of [[IdentityPreparator]].
-    *
-    * @param ds Instance of the class of the data source for this preparator.
-    */
-  def apply[TD](ds: Class[_ <: BaseDataSource[TD, _, _, _]]): Class[IdentityPreparator[TD]] =
-    classOf[IdentityPreparator[TD]]
-}
-
-/** DEPRECATED. Use [[IdentityPreparator]] instead.
-  *
-  * @tparam TD Training data class.
-  * @group Preparator
-  */
-@deprecated("Use IdentityPreparator instead.", "0.9.2")
-class LIdentityPreparator[TD] extends IdentityPreparator[TD]
-
-/** DEPRECATED. Use [[IdentityPreparator]] instead.
-  *
-  * @group Preparator
-  */
-@deprecated("Use IdentityPreparator instead.", "0.9.2")
-object LIdentityPreparator {
-  /** Produces an instance of the class of [[IdentityPreparator]].
-    *
-    * @param ds Instance of the class of the data source for this preparator.
-    */
-  def apply[TD](ds: Class[_ <: BaseDataSource[TD, _, _, _]]): Class[IdentityPreparator[TD]] =
-    classOf[IdentityPreparator[TD]]
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/LAlgorithm.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/LAlgorithm.scala b/core/src/main/scala/io/prediction/controller/LAlgorithm.scala
deleted file mode 100644
index 467a4a0..0000000
--- a/core/src/main/scala/io/prediction/controller/LAlgorithm.scala
+++ /dev/null
@@ -1,130 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import _root_.io.prediction.annotation.DeveloperApi
-import io.prediction.core.BaseAlgorithm
-import io.prediction.workflow.PersistentModelManifest
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-
-import scala.reflect._
-
-/** Base class of a local algorithm.
-  *
-  * A local algorithm runs locally within a single machine and produces a model
-  * that can fit within a single machine.
-  *
-  * If your input query class requires custom JSON4S serialization, the most
-  * idiomatic way is to implement a trait that extends [[CustomQuerySerializer]],
-  * and mix that into your algorithm class, instead of overriding
-  * [[querySerializer]] directly.
-  *
-  * @tparam PD Prepared data class.
-  * @tparam M Trained model class.
-  * @tparam Q Input query class.
-  * @tparam P Output prediction class.
-  * @group Algorithm
-  */
-abstract class LAlgorithm[PD, M : ClassTag, Q, P]
-  extends BaseAlgorithm[RDD[PD], RDD[M], Q, P] {
-
-  def trainBase(sc: SparkContext, pd: RDD[PD]): RDD[M] = pd.map(train)
-
-  /** Implement this method to produce a model from prepared data.
-    *
-    * @param pd Prepared data for model training.
-    * @return Trained model.
-    */
-  def train(pd: PD): M
-
-  def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
-  : RDD[(Long, P)] = {
-    val mRDD = bm.asInstanceOf[RDD[M]]
-    batchPredict(mRDD, qs)
-  }
-
-  /** This is a default implementation to perform batch prediction. Override
-    * this method for a custom implementation.
-    *
-    * @param mRDD A single model wrapped inside an RDD
-    * @param qs An RDD of index-query tuples. The index is used to keep track of
-    *           predicted results with corresponding queries.
-    * @return Batch of predicted results
-    */
-  def batchPredict(mRDD: RDD[M], qs: RDD[(Long, Q)]): RDD[(Long, P)] = {
-    val glomQs: RDD[Array[(Long, Q)]] = qs.glom()
-    val cartesian: RDD[(M, Array[(Long, Q)])] = mRDD.cartesian(glomQs)
-    cartesian.flatMap { case (m, qArray) =>
-      qArray.map { case (qx, q) => (qx, predict(m, q)) }
-    }
-  }
-
-  def predictBase(localBaseModel: Any, q: Q): P = {
-    predict(localBaseModel.asInstanceOf[M], q)
-  }
-
-  /** Implement this method to produce a prediction from a query and trained
-    * model.
-    *
-    * @param m Trained model produced by [[train]].
-    * @param q An input query.
-    * @return A prediction.
-    */
-  def predict(m: M, q: Q): P
-
-  /** :: DeveloperApi ::
-    * Engine developers should not use this directly (read on to see how local
-    * algorithm models are persisted).
-    *
-    * Local algorithms produce local models. By default, models will be
-    * serialized and stored automatically. Engine developers can override this behavior by
-    * mixing the [[PersistentModel]] trait into the model class, and
-    * PredictionIO will call [[PersistentModel.save]] instead. If it returns
-    * true, a [[io.prediction.workflow.PersistentModelManifest]] will be
-    * returned so that during deployment, PredictionIO will use
-    * [[PersistentModelLoader]] to retrieve the model. Otherwise, Unit will be
-    * returned and the model will be re-trained on-the-fly.
-    *
-    * @param sc Spark context
-    * @param modelId Model ID
-    * @param algoParams Algorithm parameters that trained this model
-    * @param bm Model
-    * @return The model itself for automatic persistence, an instance of
-    *         [[io.prediction.workflow.PersistentModelManifest]] for manual
-    *         persistence, or Unit for re-training on deployment
-    */
-  @DeveloperApi
-  override
-  def makePersistentModel(
-    sc: SparkContext,
-    modelId: String,
-    algoParams: Params,
-    bm: Any): Any = {
-    // Check RDD[M].count == 1
-    val m = bm.asInstanceOf[RDD[M]].first()
-    if (m.isInstanceOf[PersistentModel[_]]) {
-      if (m.asInstanceOf[PersistentModel[Params]].save(
-        modelId, algoParams, sc)) {
-        PersistentModelManifest(className = m.getClass.getName)
-      } else {
-        Unit
-      }
-    } else {
-      m
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/LAverageServing.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/LAverageServing.scala b/core/src/main/scala/io/prediction/controller/LAverageServing.scala
deleted file mode 100644
index 80981ab..0000000
--- a/core/src/main/scala/io/prediction/controller/LAverageServing.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseAlgorithm
-
-/** A concrete implementation of [[LServing]] returning the average of all
-  * algorithms' predictions, where their classes are expected to be all Double.
-  *
-  * @group Serving
-  */
-class LAverageServing[Q] extends LServing[Q, Double] {
-  /** Returns the average of all algorithms' predictions. */
-  def serve(query: Q, predictions: Seq[Double]): Double = {
-    predictions.sum / predictions.length
-  }
-}
-
-/** A concrete implementation of [[LServing]] returning the average of all
-  * algorithms' predictions, where their classes are expected to be all Double.
-  *
-  * @group Serving
-  */
-object LAverageServing {
-  /** Returns an instance of [[LAverageServing]]. */
-  def apply[Q](a: Class[_ <: BaseAlgorithm[_, _, Q, _]]): Class[LAverageServing[Q]] =
-    classOf[LAverageServing[Q]]
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/LDataSource.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/LDataSource.scala b/core/src/main/scala/io/prediction/controller/LDataSource.scala
deleted file mode 100644
index aa53c8f..0000000
--- a/core/src/main/scala/io/prediction/controller/LDataSource.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseDataSource
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-
-import scala.reflect._
-
-/** Base class of a local data source.
-  *
-  * A local data source runs locally within a single machine and return data
-  * that can fit within a single machine.
-  *
-  * @tparam TD Training data class.
-  * @tparam EI Evaluation Info class.
-  * @tparam Q Input query class.
-  * @tparam A Actual value class.
-  * @group Data Source
-  */
-abstract class LDataSource[TD: ClassTag, EI, Q, A]
-  extends BaseDataSource[RDD[TD], EI, Q, A] {
-
-  def readTrainingBase(sc: SparkContext): RDD[TD] = {
-    sc.parallelize(Seq(None)).map(_ => readTraining())
-  }
-
-  /** Implement this method to only return training data from a data source */
-  def readTraining(): TD
-
-  def readEvalBase(sc: SparkContext): Seq[(RDD[TD], EI, RDD[(Q, A)])] = {
-    val localEvalData: Seq[(TD, EI, Seq[(Q, A)])] = readEval()
-
-    localEvalData.map { case (td, ei, qaSeq) => {
-      val tdRDD = sc.parallelize(Seq(None)).map(_ => td)
-      val qaRDD = sc.parallelize(qaSeq)
-      (tdRDD, ei, qaRDD)
-    }}
-  }
-
-  /** To provide evaluation feature for your engine, your must override this
-    * method to return data for evaluation from a data source. Returned data can
-    * optionally include a sequence of query and actual value pairs for
-    * evaluation purpose.
-    *
-    * The default implementation returns an empty sequence as a stub, so that
-    * an engine can be compiled without implementing evaluation.
-    */
-  def readEval(): Seq[(TD, EI, Seq[(Q, A)])] = Seq[(TD, EI, Seq[(Q, A)])]()
-
-  @deprecated("Use readEval() instead.", "0.9.0")
-  def read(): Seq[(TD, EI, Seq[(Q, A)])] = readEval()
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/LFirstServing.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/LFirstServing.scala b/core/src/main/scala/io/prediction/controller/LFirstServing.scala
deleted file mode 100644
index 970815e..0000000
--- a/core/src/main/scala/io/prediction/controller/LFirstServing.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseAlgorithm
-
-/** A concrete implementation of [[LServing]] returning the first algorithm's
-  * prediction result directly without any modification.
-  *
-  * @group Serving
-  */
-class LFirstServing[Q, P] extends LServing[Q, P] {
-  /** Returns the first algorithm's prediction. */
-  def serve(query: Q, predictions: Seq[P]): P = predictions.head
-}
-
-/** A concrete implementation of [[LServing]] returning the first algorithm's
-  * prediction result directly without any modification.
-  *
-  * @group Serving
-  */
-object LFirstServing {
-  /** Returns an instance of [[LFirstServing]]. */
-  def apply[Q, P](a: Class[_ <: BaseAlgorithm[_, _, Q, P]]): Class[LFirstServing[Q, P]] =
-    classOf[LFirstServing[Q, P]]
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/LPreparator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/LPreparator.scala b/core/src/main/scala/io/prediction/controller/LPreparator.scala
deleted file mode 100644
index f66dfc0..0000000
--- a/core/src/main/scala/io/prediction/controller/LPreparator.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BasePreparator
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-
-import scala.reflect._
-
-/** Base class of a local preparator.
-  *
-  * A local preparator runs locally within a single machine and produces
-  * prepared data that can fit within a single machine.
-  *
-  * @tparam TD Training data class.
-  * @tparam PD Prepared data class.
-  * @group Preparator
-  */
-abstract class LPreparator[TD, PD : ClassTag]
-  extends BasePreparator[RDD[TD], RDD[PD]] {
-
-  def prepareBase(sc: SparkContext, rddTd: RDD[TD]): RDD[PD] = {
-    rddTd.map(prepare)
-  }
-
-  /** Implement this method to produce prepared data that is ready for model
-    * training.
-    *
-    * @param trainingData Training data to be prepared.
-    */
-  def prepare(trainingData: TD): PD
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/LServing.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/LServing.scala b/core/src/main/scala/io/prediction/controller/LServing.scala
deleted file mode 100644
index accee48..0000000
--- a/core/src/main/scala/io/prediction/controller/LServing.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.annotation.Experimental
-import io.prediction.core.BaseServing
-
-/** Base class of serving.
-  *
-  * @tparam Q Input query class.
-  * @tparam P Output prediction class.
-  * @group Serving
-  */
-abstract class LServing[Q, P] extends BaseServing[Q, P] {
-  def supplementBase(q: Q): Q = supplement(q)
-
-  /** :: Experimental ::
-    * Implement this method to supplement the query before sending it to
-    * algorithms.
-    *
-    * @param q Query
-    * @return A supplemented Query
-    */
-  @Experimental
-  def supplement(q: Q): Q = q
-
-  def serveBase(q: Q, ps: Seq[P]): P = {
-    serve(q, ps)
-  }
-
-  /** Implement this method to combine multiple algorithms' predictions to
-    * produce a single final prediction. The query is the original query sent to
-    * the engine, not the supplemented produced by [[LServing.supplement]].
-    *
-    * @param query Original input query.
-    * @param predictions A list of algorithms' predictions.
-    */
-  def serve(query: Q, predictions: Seq[P]): P
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/LocalFileSystemPersistentModel.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/LocalFileSystemPersistentModel.scala b/core/src/main/scala/io/prediction/controller/LocalFileSystemPersistentModel.scala
deleted file mode 100644
index e9f0592..0000000
--- a/core/src/main/scala/io/prediction/controller/LocalFileSystemPersistentModel.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import org.apache.spark.SparkContext
-
-/** This trait is a convenience helper for persisting your model to the local
-  * filesystem. This trait and [[LocalFileSystemPersistentModelLoader]] contain
-  * concrete implementation and need not be implemented.
-  *
-  * The underlying implementation is [[Utils.save]].
-  *
-  * {{{
-  * class MyModel extends LocalFileSystemPersistentModel[MyParams] {
-  *   ...
-  * }
-  *
-  * object MyModel extends LocalFileSystemPersistentModelLoader[MyParams, MyModel] {
-  *   ...
-  * }
-  * }}}
-  *
-  * @tparam AP Algorithm parameters class.
-  * @see [[LocalFileSystemPersistentModelLoader]]
-  * @group Algorithm
-  */
-trait LocalFileSystemPersistentModel[AP <: Params] extends PersistentModel[AP] {
-  def save(id: String, params: AP, sc: SparkContext): Boolean = {
-    Utils.save(id, this)
-    true
-  }
-}
-
-/** Implement an object that extends this trait for PredictionIO to support
-  * loading a persisted model from local filesystem during serving deployment.
-  *
-  * The underlying implementation is [[Utils.load]].
-  *
-  * @tparam AP Algorithm parameters class.
-  * @tparam M Model class.
-  * @see [[LocalFileSystemPersistentModel]]
-  * @group Algorithm
-  */
-trait LocalFileSystemPersistentModelLoader[AP <: Params, M]
-  extends PersistentModelLoader[AP, M] {
-  def apply(id: String, params: AP, sc: Option[SparkContext]): M = {
-    Utils.load(id).asInstanceOf[M]
-  }
-}
-
-/** DEPRECATED. Use [[LocalFileSystemPersistentModel]] instead.
-  *
-  * @group Algorithm */
-@deprecated("Use LocalFileSystemPersistentModel instead.", "0.9.2")
-trait IFSPersistentModel[AP <: Params] extends LocalFileSystemPersistentModel[AP]
-
-/** DEPRECATED. Use [[LocalFileSystemPersistentModelLoader]] instead.
-  *
-  * @group Algorithm */
-@deprecated("Use LocalFileSystemPersistentModelLoader instead.", "0.9.2")
-trait IFSPersistentModelLoader[AP <: Params, M] extends LocalFileSystemPersistentModelLoader[AP, M]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/Metric.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/Metric.scala b/core/src/main/scala/io/prediction/controller/Metric.scala
deleted file mode 100644
index 9e56125..0000000
--- a/core/src/main/scala/io/prediction/controller/Metric.scala
+++ /dev/null
@@ -1,266 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import _root_.io.prediction.controller.java.SerializableComparator
-import io.prediction.core.BaseEngine
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-import org.apache.spark.util.StatCounter
-
-import scala.Numeric.Implicits._
-import scala.reflect._
-
-/** Base class of a [[Metric]].
-  *
-  * @tparam EI Evaluation information
-  * @tparam Q Query
-  * @tparam P Predicted result
-  * @tparam A Actual result
-  * @tparam R Metric result
-  * @group Evaluation
-  */
-abstract class Metric[EI, Q, P, A, R](implicit rOrder: Ordering[R])
-extends Serializable {
-  /** Java friendly constructor
-    *
-    * @param comparator A serializable comparator for sorting the metric results.
-    *
-    */
-  def this(comparator: SerializableComparator[R]) = {
-    this()(Ordering.comparatorToOrdering(comparator))
-  }
-
-  /** Class name of this [[Metric]]. */
-  def header: String = this.getClass.getSimpleName
-
-  /** Calculates the result of this [[Metric]]. */
-  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])]): R
-
-  /** Comparison function for R's ordering. */
-  def compare(r0: R, r1: R): Int = rOrder.compare(r0, r1)
-}
-
-private [prediction] trait StatsMetricHelper[EI, Q, P, A] {
-  def calculate(q: Q, p: P, a: A): Double
-
-  def calculateStats(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
-  : StatCounter = {
-    val doubleRDD = sc.union(
-      evalDataSet.map { case (_, qpaRDD) =>
-        qpaRDD.map { case (q, p, a) => calculate(q, p, a) }
-      }
-    )
-
-    doubleRDD.stats()
-  }
-}
-
-private [prediction] trait StatsOptionMetricHelper[EI, Q, P, A] {
-  def calculate(q: Q, p: P, a: A): Option[Double]
-
-  def calculateStats(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
-  : StatCounter = {
-    val doubleRDD = sc.union(
-      evalDataSet.map { case (_, qpaRDD) =>
-        qpaRDD.flatMap { case (q, p, a) => calculate(q, p, a) }
-      }
-    )
-
-    doubleRDD.stats()
-  }
-}
-
-/** Returns the global average of the score returned by the calculate method.
-  *
-  * @tparam EI Evaluation information
-  * @tparam Q Query
-  * @tparam P Predicted result
-  * @tparam A Actual result
-  *
-  * @group Evaluation
-  */
-abstract class AverageMetric[EI, Q, P, A]
-    extends Metric[EI, Q, P, A, Double]
-    with StatsMetricHelper[EI, Q, P, A]
-    with QPAMetric[Q, P, A, Double] {
-  /** Implement this method to return a score that will be used for averaging
-    * across all QPA tuples.
-    */
-  def calculate(q: Q, p: P, a: A): Double
-
-  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
-  : Double = {
-    calculateStats(sc, evalDataSet).mean
-  }
-}
-
-/** Returns the global average of the non-None score returned by the calculate
-  * method.
-  *
-  * @tparam EI Evaluation information
-  * @tparam Q Query
-  * @tparam P Predicted result
-  * @tparam A Actual result
-  *
-  * @group Evaluation
-  */
-abstract class OptionAverageMetric[EI, Q, P, A]
-    extends Metric[EI, Q, P, A, Double]
-    with StatsOptionMetricHelper[EI, Q, P, A]
-    with QPAMetric[Q, P, A, Option[Double]] {
-  /** Implement this method to return a score that will be used for averaging
-    * across all QPA tuples.
-    */
-  def calculate(q: Q, p: P, a: A): Option[Double]
-
-  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
-  : Double = {
-    calculateStats(sc, evalDataSet).mean
-  }
-}
-
-/** Returns the global standard deviation of the score returned by the calculate method
-  *
-  * This method uses org.apache.spark.util.StatCounter library, a one pass
-  * method is used for calculation
-  *
-  * @tparam EI Evaluation information
-  * @tparam Q Query
-  * @tparam P Predicted result
-  * @tparam A Actual result
-  *
-  * @group Evaluation
-  */
-abstract class StdevMetric[EI, Q, P, A]
-    extends Metric[EI, Q, P, A, Double]
-    with StatsMetricHelper[EI, Q, P, A]
-    with QPAMetric[Q, P, A, Double] {
-  /** Implement this method to return a score that will be used for calculating
-    * the stdev
-    * across all QPA tuples.
-    */
-  def calculate(q: Q, p: P, a: A): Double
-
-  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
-  : Double = {
-    calculateStats(sc, evalDataSet).stdev
-  }
-}
-
-/** Returns the global standard deviation of the non-None score returned by the calculate method
-  *
-  * This method uses org.apache.spark.util.StatCounter library, a one pass
-  * method is used for calculation
-  *
-  * @tparam EI Evaluation information
-  * @tparam Q Query
-  * @tparam P Predicted result
-  * @tparam A Actual result
-  *
-  * @group Evaluation
-  */
-abstract class OptionStdevMetric[EI, Q, P, A]
-    extends Metric[EI, Q, P, A, Double]
-    with StatsOptionMetricHelper[EI, Q, P, A]
-    with QPAMetric[Q, P, A, Option[Double]] {
-  /** Implement this method to return a score that will be used for calculating
-    * the stdev
-    * across all QPA tuples.
-    */
-  def calculate(q: Q, p: P, a: A): Option[Double]
-
-  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
-  : Double = {
-    calculateStats(sc, evalDataSet).stdev
-  }
-}
-
-/** Returns the sum of the score returned by the calculate method.
-  *
-  * @tparam EI Evaluation information
-  * @tparam Q Query
-  * @tparam P Predicted result
-  * @tparam A Actual result
-  * @tparam R Result, output of the function calculate, must be Numeric
-  *
-  * @group Evaluation
-  */
-abstract class SumMetric[EI, Q, P, A, R: ClassTag](implicit num: Numeric[R])
-    extends Metric[EI, Q, P, A, R]()(num)
-    with QPAMetric[Q, P, A, R] {
-  /** Implement this method to return a score that will be used for summing
-    * across all QPA tuples.
-    */
-  def calculate(q: Q, p: P, a: A): R
-
-  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
-  : R = {
-    val union: RDD[R] = sc.union(
-      evalDataSet.map { case (_, qpaRDD) =>
-        qpaRDD.map { case (q, p, a) => calculate(q, p, a) }
-      }
-    )
-
-    union.aggregate[R](num.zero)(_ + _, _ + _)
-  }
-}
-
-/** Returns zero. Useful as a placeholder during evaluation development when not all components are
-  * implemented.
-  * @tparam EI Evaluation information
-  * @tparam Q Query
-  * @tparam P Predicted result
-  * @tparam A Actual result
-  *
-  * @group Evaluation
-  */
-class ZeroMetric[EI, Q, P, A] extends Metric[EI, Q, P, A, Double]() {
-   def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])]): Double = 0.0
-}
-
-/** Companion object of [[ZeroMetric]]
-  *
-  * @group Evaluation
-  */
-object ZeroMetric {
-  /** Returns a ZeroMetric instance using Engine's type parameters. */
-  def apply[EI, Q, P, A](engine: BaseEngine[EI, Q, P, A]): ZeroMetric[EI, Q, P, A] = {
-    new ZeroMetric[EI, Q, P, A]()
-  }
-}
-
-
-/** Trait for metric which returns a score based on Query, PredictedResult,
-  * and ActualResult
-  *
-  * @tparam Q Query class
-  * @tparam P Predicted result class
-  * @tparam A Actual result class
-  * @tparam R Metric result class
-  * @group Evaluation
-  */
-trait QPAMetric[Q, P, A, R] {
-  /** Calculate a metric result based on query, predicted result, and actual
-    * result
-    *
-    * @param q Query
-    * @param p Predicted result
-    * @param a Actual result
-    * @return Metric result
-    */
-  def calculate(q: Q, p: P, a: A): R
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala b/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala
deleted file mode 100644
index 41ccc9c..0000000
--- a/core/src/main/scala/io/prediction/controller/MetricEvaluator.scala
+++ /dev/null
@@ -1,260 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import _root_.java.io.File
-import _root_.java.io.PrintWriter
-
-import com.github.nscala_time.time.Imports.DateTime
-import grizzled.slf4j.Logger
-import io.prediction.annotation.DeveloperApi
-import io.prediction.core.BaseEvaluator
-import io.prediction.core.BaseEvaluatorResult
-import io.prediction.data.storage.Storage
-import io.prediction.workflow.JsonExtractor
-import io.prediction.workflow.JsonExtractorOption.Both
-import io.prediction.workflow.NameParamsSerializer
-import io.prediction.workflow.WorkflowParams
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-import org.json4s.native.Serialization.write
-import org.json4s.native.Serialization.writePretty
-
-import scala.language.existentials
-
-/** Case class storing a primary score, and other scores
-  *
-  * @param score Primary metric score
-  * @param otherScores Other scores this metric might have
-  * @tparam R Type of the primary metric score
-  * @group Evaluation
-  */
-case class MetricScores[R](
-  score: R,
-  otherScores: Seq[Any])
-
-/** Contains all results of a [[MetricEvaluator]]
-  *
-  * @param bestScore The best score among all iterations
-  * @param bestEngineParams The set of engine parameters that yielded the best score
-  * @param bestIdx The index of iteration that yielded the best score
-  * @param metricHeader Brief description of the primary metric score
-  * @param otherMetricHeaders Brief descriptions of other metric scores
-  * @param engineParamsScores All sets of engine parameters and corresponding metric scores
-  * @param outputPath An optional output path where scores are saved
-  * @tparam R Type of the primary metric score
-  * @group Evaluation
-  */
-case class MetricEvaluatorResult[R](
-  bestScore: MetricScores[R],
-  bestEngineParams: EngineParams,
-  bestIdx: Int,
-  metricHeader: String,
-  otherMetricHeaders: Seq[String],
-  engineParamsScores: Seq[(EngineParams, MetricScores[R])],
-  outputPath: Option[String])
-extends BaseEvaluatorResult {
-
-  override def toOneLiner(): String = {
-    val idx = engineParamsScores.map(_._1).indexOf(bestEngineParams)
-    s"Best Params Index: $idx Score: ${bestScore.score}"
-  }
-
-  override def toJSON(): String = {
-    implicit lazy val formats = Utils.json4sDefaultFormats +
-      new NameParamsSerializer
-    write(this)
-  }
-
-  override def toHTML(): String = html.metric_evaluator().toString()
-
-  override def toString: String = {
-    implicit lazy val formats = Utils.json4sDefaultFormats +
-      new NameParamsSerializer
-
-    val bestEPStr = JsonExtractor.engineParamstoPrettyJson(Both, bestEngineParams)
-
-    val strings = Seq(
-      "MetricEvaluatorResult:",
-      s"  # engine params evaluated: ${engineParamsScores.size}") ++
-      Seq(
-        "Optimal Engine Params:",
-        s"  $bestEPStr",
-        "Metrics:",
-        s"  $metricHeader: ${bestScore.score}") ++
-      otherMetricHeaders.zip(bestScore.otherScores).map {
-        case (h, s) => s"  $h: $s"
-      } ++
-      outputPath.toSeq.map {
-        p => s"The best variant params can be found in $p"
-      }
-
-    strings.mkString("\n")
-  }
-}
-
-/** Companion object of [[MetricEvaluator]]
-  *
-  * @group Evaluation
-  */
-object MetricEvaluator {
-  def apply[EI, Q, P, A, R](
-    metric: Metric[EI, Q, P, A, R],
-    otherMetrics: Seq[Metric[EI, Q, P, A, _]],
-    outputPath: String): MetricEvaluator[EI, Q, P, A, R] = {
-    new MetricEvaluator[EI, Q, P, A, R](
-      metric,
-      otherMetrics,
-      Some(outputPath))
-  }
-
-  def apply[EI, Q, P, A, R](
-    metric: Metric[EI, Q, P, A, R],
-    otherMetrics: Seq[Metric[EI, Q, P, A, _]])
-  : MetricEvaluator[EI, Q, P, A, R] = {
-    new MetricEvaluator[EI, Q, P, A, R](
-      metric,
-      otherMetrics,
-      None)
-  }
-
-  def apply[EI, Q, P, A, R](metric: Metric[EI, Q, P, A, R])
-  : MetricEvaluator[EI, Q, P, A, R] = {
-    new MetricEvaluator[EI, Q, P, A, R](
-      metric,
-      Seq[Metric[EI, Q, P, A, _]](),
-      None)
-  }
-
-  case class NameParams(name: String, params: Params) {
-    def this(np: (String, Params)) = this(np._1, np._2)
-  }
-
-  case class EngineVariant(
-    id: String,
-    description: String,
-    engineFactory: String,
-    datasource: NameParams,
-    preparator: NameParams,
-    algorithms: Seq[NameParams],
-    serving: NameParams) {
-
-    def this(evaluation: Evaluation, engineParams: EngineParams) = this(
-      id = "",
-      description = "",
-      engineFactory = evaluation.getClass.getName,
-      datasource = new NameParams(engineParams.dataSourceParams),
-      preparator = new NameParams(engineParams.preparatorParams),
-      algorithms = engineParams.algorithmParamsList.map(np => new NameParams(np)),
-      serving = new NameParams(engineParams.servingParams))
-  }
-}
-
-/** :: DeveloperApi ::
-  * Do no use this directly. Use [[MetricEvaluator$]] instead. This is an
-  * implementation of [[io.prediction.core.BaseEvaluator]] that evaluates
-  * prediction performance based on metric scores.
-  *
-  * @param metric Primary metric
-  * @param otherMetrics Other metrics
-  * @param outputPath Optional output path to save evaluation results
-  * @tparam EI Evaluation information type
-  * @tparam Q Query class
-  * @tparam P Predicted result class
-  * @tparam A Actual result class
-  * @tparam R Metric result class
-  * @group Evaluation
-  */
-@DeveloperApi
-class MetricEvaluator[EI, Q, P, A, R] (
-  val metric: Metric[EI, Q, P, A, R],
-  val otherMetrics: Seq[Metric[EI, Q, P, A, _]],
-  val outputPath: Option[String])
-  extends BaseEvaluator[EI, Q, P, A, MetricEvaluatorResult[R]] {
-  @transient lazy val logger = Logger[this.type]
-  @transient val engineInstances = Storage.getMetaDataEngineInstances()
-
-  def saveEngineJson(
-    evaluation: Evaluation,
-    engineParams: EngineParams,
-    outputPath: String) {
-
-    val now = DateTime.now
-    val evalClassName = evaluation.getClass.getName
-
-    val variant = MetricEvaluator.EngineVariant(
-      id = s"$evalClassName $now",
-      description = "",
-      engineFactory = evalClassName,
-      datasource = new MetricEvaluator.NameParams(engineParams.dataSourceParams),
-      preparator = new MetricEvaluator.NameParams(engineParams.preparatorParams),
-      algorithms = engineParams.algorithmParamsList.map(np => new MetricEvaluator.NameParams(np)),
-      serving = new MetricEvaluator.NameParams(engineParams.servingParams))
-
-    implicit lazy val formats = Utils.json4sDefaultFormats
-
-    logger.info(s"Writing best variant params to disk ($outputPath)...")
-    val writer = new PrintWriter(new File(outputPath))
-    writer.write(writePretty(variant))
-    writer.close()
-  }
-
-  def evaluateBase(
-    sc: SparkContext,
-    evaluation: Evaluation,
-    engineEvalDataSet: Seq[(EngineParams, Seq[(EI, RDD[(Q, P, A)])])],
-    params: WorkflowParams): MetricEvaluatorResult[R] = {
-
-    val evalResultList: Seq[(EngineParams, MetricScores[R])] = engineEvalDataSet
-    .zipWithIndex
-    .par
-    .map { case ((engineParams, evalDataSet), idx) =>
-      val metricScores = MetricScores[R](
-        metric.calculate(sc, evalDataSet),
-        otherMetrics.map(_.calculate(sc, evalDataSet)))
-      (engineParams, metricScores)
-    }
-    .seq
-
-    implicit lazy val formats = Utils.json4sDefaultFormats +
-      new NameParamsSerializer
-
-    evalResultList.zipWithIndex.foreach { case ((ep, r), idx) =>
-      logger.info(s"Iteration $idx")
-      logger.info(s"EngineParams: ${JsonExtractor.engineParamsToJson(Both, ep)}")
-      logger.info(s"Result: $r")
-    }
-
-    // use max. take implicit from Metric.
-    val ((bestEngineParams, bestScore), bestIdx) = evalResultList
-    .zipWithIndex
-    .reduce { (x, y) =>
-      if (metric.compare(x._1._2.score, y._1._2.score) >= 0) x else y
-    }
-
-    // save engine params if it is set.
-    outputPath.foreach { path => saveEngineJson(evaluation, bestEngineParams, path) }
-
-    MetricEvaluatorResult(
-      bestScore = bestScore,
-      bestEngineParams = bestEngineParams,
-      bestIdx = bestIdx,
-      metricHeader = metric.header,
-      otherMetricHeaders = otherMetrics.map(_.header),
-      engineParamsScores = evalResultList,
-      outputPath = outputPath)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/P2LAlgorithm.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/P2LAlgorithm.scala b/core/src/main/scala/io/prediction/controller/P2LAlgorithm.scala
deleted file mode 100644
index c59b9af..0000000
--- a/core/src/main/scala/io/prediction/controller/P2LAlgorithm.scala
+++ /dev/null
@@ -1,121 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import _root_.io.prediction.annotation.DeveloperApi
-import io.prediction.core.BaseAlgorithm
-import io.prediction.workflow.PersistentModelManifest
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-import scala.reflect._
-
-/** Base class of a parallel-to-local algorithm.
-  *
-  * A parallel-to-local algorithm can be run in parallel on a cluster and
-  * produces a model that can fit within a single machine.
-  *
-  * If your input query class requires custom JSON4S serialization, the most
-  * idiomatic way is to implement a trait that extends [[CustomQuerySerializer]],
-  * and mix that into your algorithm class, instead of overriding
-  * [[querySerializer]] directly.
-  *
-  * @tparam PD Prepared data class.
-  * @tparam M Trained model class.
-  * @tparam Q Input query class.
-  * @tparam P Output prediction class.
-  * @group Algorithm
-  */
-abstract class P2LAlgorithm[PD, M: ClassTag, Q: ClassTag, P]
-  extends BaseAlgorithm[PD, M, Q, P] {
-
-  def trainBase(sc: SparkContext, pd: PD): M = train(sc, pd)
-
-  /** Implement this method to produce a model from prepared data.
-    *
-    * @param pd Prepared data for model training.
-    * @return Trained model.
-    */
-  def train(sc: SparkContext, pd: PD): M
-
-  def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
-  : RDD[(Long, P)] = batchPredict(bm.asInstanceOf[M], qs)
-
-  /** This is a default implementation to perform batch prediction. Override
-    * this method for a custom implementation.
-    *
-    * @param m A model
-    * @param qs An RDD of index-query tuples. The index is used to keep track of
-    *           predicted results with corresponding queries.
-    * @return Batch of predicted results
-    */
-  def batchPredict(m: M, qs: RDD[(Long, Q)]): RDD[(Long, P)] = {
-    qs.mapValues { q => predict(m, q) }
-  }
-
-  def predictBase(bm: Any, q: Q): P = predict(bm.asInstanceOf[M], q)
-
-  /** Implement this method to produce a prediction from a query and trained
-    * model.
-    *
-    * @param model Trained model produced by [[train]].
-    * @param query An input query.
-    * @return A prediction.
-    */
-  def predict(model: M, query: Q): P
-
-  /** :: DeveloperApi ::
-    * Engine developers should not use this directly (read on to see how
-    * parallel-to-local algorithm models are persisted).
-    *
-    * Parallel-to-local algorithms produce local models. By default, models will be
-    * serialized and stored automatically. Engine developers can override this behavior by
-    * mixing the [[PersistentModel]] trait into the model class, and
-    * PredictionIO will call [[PersistentModel.save]] instead. If it returns
-    * true, a [[io.prediction.workflow.PersistentModelManifest]] will be
-    * returned so that during deployment, PredictionIO will use
-    * [[PersistentModelLoader]] to retrieve the model. Otherwise, Unit will be
-    * returned and the model will be re-trained on-the-fly.
-    *
-    * @param sc Spark context
-    * @param modelId Model ID
-    * @param algoParams Algorithm parameters that trained this model
-    * @param bm Model
-    * @return The model itself for automatic persistence, an instance of
-    *         [[io.prediction.workflow.PersistentModelManifest]] for manual
-    *         persistence, or Unit for re-training on deployment
-    */
-  @DeveloperApi
-  override
-  def makePersistentModel(
-    sc: SparkContext,
-    modelId: String,
-    algoParams: Params,
-    bm: Any): Any = {
-    val m = bm.asInstanceOf[M]
-    if (m.isInstanceOf[PersistentModel[_]]) {
-      if (m.asInstanceOf[PersistentModel[Params]].save(
-        modelId, algoParams, sc)) {
-        PersistentModelManifest(className = m.getClass.getName)
-      } else {
-        Unit
-      }
-    } else {
-      m
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/PAlgorithm.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/PAlgorithm.scala b/core/src/main/scala/io/prediction/controller/PAlgorithm.scala
deleted file mode 100644
index e9916be..0000000
--- a/core/src/main/scala/io/prediction/controller/PAlgorithm.scala
+++ /dev/null
@@ -1,126 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.annotation.DeveloperApi
-import io.prediction.core.BaseAlgorithm
-import io.prediction.workflow.PersistentModelManifest
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-
-/** Base class of a parallel algorithm.
-  *
-  * A parallel algorithm can be run in parallel on a cluster and produces a
-  * model that can also be distributed across a cluster.
-  *
-  * If your input query class requires custom JSON4S serialization, the most
-  * idiomatic way is to implement a trait that extends [[CustomQuerySerializer]],
-  * and mix that into your algorithm class, instead of overriding
-  * [[querySerializer]] directly.
-  *
-  * To provide evaluation feature, one must override and implement the
-  * [[batchPredict]] method. Otherwise, an exception will be thrown when pio eval`
-  * is used.
-  *
-  * @tparam PD Prepared data class.
-  * @tparam M Trained model class.
-  * @tparam Q Input query class.
-  * @tparam P Output prediction class.
-  * @group Algorithm
-  */
-abstract class PAlgorithm[PD, M, Q, P]
-  extends BaseAlgorithm[PD, M, Q, P] {
-
-  def trainBase(sc: SparkContext, pd: PD): M = train(sc, pd)
-
-  /** Implement this method to produce a model from prepared data.
-    *
-    * @param pd Prepared data for model training.
-    * @return Trained model.
-    */
-  def train(sc: SparkContext, pd: PD): M
-
-  def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
-  : RDD[(Long, P)] = batchPredict(bm.asInstanceOf[M], qs)
-
-  /** To provide evaluation feature, one must override and implement this method
-    * to generate many predictions in batch. Otherwise, an exception will be
-    * thrown when `pio eval` is used.
-    *
-    * The default implementation throws an exception.
-    *
-    * @param m Trained model produced by [[train]].
-    * @param qs An RDD of index-query tuples. The index is used to keep track of
-    *           predicted results with corresponding queries.
-    */
-  def batchPredict(m: M, qs: RDD[(Long, Q)]): RDD[(Long, P)] =
-    throw new NotImplementedError("batchPredict not implemented")
-
-  def predictBase(baseModel: Any, query: Q): P = {
-    predict(baseModel.asInstanceOf[M], query)
-  }
-
-  /** Implement this method to produce a prediction from a query and trained
-    * model.
-    *
-    * @param model Trained model produced by [[train]].
-    * @param query An input query.
-    * @return A prediction.
-    */
-  def predict(model: M, query: Q): P
-
-  /** :: DeveloperApi ::
-    * Engine developers should not use this directly (read on to see how parallel
-    * algorithm models are persisted).
-    *
-    * In general, parallel models may contain multiple RDDs. It is not easy to
-    * infer and persist them programmatically since these RDDs may be
-    * potentially huge. To persist these models, engine developers need to  mix
-    * the [[PersistentModel]] trait into the model class and implement
-    * [[PersistentModel.save]]. If it returns true, a
-    * [[io.prediction.workflow.PersistentModelManifest]] will be
-    * returned so that during deployment, PredictionIO will use
-    * [[PersistentModelLoader]] to retrieve the model. Otherwise, Unit will be
-    * returned and the model will be re-trained on-the-fly.
-    *
-    * @param sc Spark context
-    * @param modelId Model ID
-    * @param algoParams Algorithm parameters that trained this model
-    * @param bm Model
-    * @return The model itself for automatic persistence, an instance of
-    *         [[io.prediction.workflow.PersistentModelManifest]] for manual
-    *         persistence, or Unit for re-training on deployment
-    */
-  @DeveloperApi
-  override
-  def makePersistentModel(
-    sc: SparkContext,
-    modelId: String,
-    algoParams: Params,
-    bm: Any): Any = {
-    val m = bm.asInstanceOf[M]
-    if (m.isInstanceOf[PersistentModel[_]]) {
-      if (m.asInstanceOf[PersistentModel[Params]].save(
-        modelId, algoParams, sc)) {
-        PersistentModelManifest(className = m.getClass.getName)
-      } else {
-        Unit
-      }
-    } else {
-      Unit
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/PDataSource.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/PDataSource.scala b/core/src/main/scala/io/prediction/controller/PDataSource.scala
deleted file mode 100644
index 55a2cf9..0000000
--- a/core/src/main/scala/io/prediction/controller/PDataSource.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseDataSource
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-
-/** Base class of a parallel data source.
-  *
-  * A parallel data source runs locally within a single machine, or in parallel
-  * on a cluster, to return data that is distributed across a cluster.
-  *
-  * @tparam TD Training data class.
-  * @tparam EI Evaluation Info class.
-  * @tparam Q Input query class.
-  * @tparam A Actual value class.
-  * @group Data Source
-  */
-
-abstract class PDataSource[TD, EI, Q, A]
-  extends BaseDataSource[TD, EI, Q, A] {
-
-  def readTrainingBase(sc: SparkContext): TD = readTraining(sc)
-
-  /** Implement this method to only return training data from a data source */
-  def readTraining(sc: SparkContext): TD
-
-  def readEvalBase(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] = readEval(sc)
-
-  /** To provide evaluation feature for your engine, your must override this
-    * method to return data for evaluation from a data source. Returned data can
-    * optionally include a sequence of query and actual value pairs for
-    * evaluation purpose.
-    *
-    * The default implementation returns an empty sequence as a stub, so that
-    * an engine can be compiled without implementing evaluation.
-    */
-  def readEval(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] =
-    Seq[(TD, EI, RDD[(Q, A)])]()
-
-  @deprecated("Use readEval() instead.", "0.9.0")
-  def read(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] = readEval(sc)
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/PPreparator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/PPreparator.scala b/core/src/main/scala/io/prediction/controller/PPreparator.scala
deleted file mode 100644
index 154560b..0000000
--- a/core/src/main/scala/io/prediction/controller/PPreparator.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BasePreparator
-import org.apache.spark.SparkContext
-
-/** Base class of a parallel preparator.
-  *
-  * A parallel preparator can be run in parallel on a cluster and produces a
-  * prepared data that is distributed across a cluster.
-  *
-  * @tparam TD Training data class.
-  * @tparam PD Prepared data class.
-  * @group Preparator
-  */
-abstract class PPreparator[TD, PD]
-  extends BasePreparator[TD, PD] {
-
-  def prepareBase(sc: SparkContext, td: TD): PD = {
-    prepare(sc, td)
-  }
-
-  /** Implement this method to produce prepared data that is ready for model
-    * training.
-    *
-    * @param sc An Apache Spark context.
-    * @param trainingData Training data to be prepared.
-    */
-  def prepare(sc: SparkContext, trainingData: TD): PD
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/Params.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/Params.scala b/core/src/main/scala/io/prediction/controller/Params.scala
deleted file mode 100644
index 0d5d149..0000000
--- a/core/src/main/scala/io/prediction/controller/Params.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-/** Base trait for all kinds of parameters that will be passed to constructors
-  * of different controller classes.
-  *
-  * @group Helper
-  */
-trait Params extends Serializable {}
-
-/** A concrete implementation of [[Params]] representing empty parameters.
-  *
-  * @group Helper
-  */
-case class EmptyParams() extends Params {
-  override def toString(): String = "Empty"
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/PersistentModel.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/PersistentModel.scala b/core/src/main/scala/io/prediction/controller/PersistentModel.scala
deleted file mode 100644
index 5d0ec41..0000000
--- a/core/src/main/scala/io/prediction/controller/PersistentModel.scala
+++ /dev/null
@@ -1,112 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import org.apache.spark.SparkContext
-
-/** Mix in and implement this trait if your model cannot be persisted by
-  * PredictionIO automatically. A companion object extending
-  * IPersistentModelLoader is required for PredictionIO to load the persisted
-  * model automatically during deployment.
-  *
-  * Notice that models generated by [[PAlgorithm]] cannot be persisted
-  * automatically by nature and must implement these traits if model persistence
-  * is desired.
-  *
-  * {{{
-  * class MyModel extends PersistentModel[MyParams] {
-  *   def save(id: String, params: MyParams, sc: SparkContext): Boolean = {
-  *     ...
-  *   }
-  * }
-  *
-  * object MyModel extends PersistentModelLoader[MyParams, MyModel] {
-  *   def apply(id: String, params: MyParams, sc: Option[SparkContext]): MyModel = {
-  *     ...
-  *   }
-  * }
-  * }}}
-  *
-  * In Java, all you need to do is to implement this interface, and add a static
-  * method with 3 arguments of type String, [[Params]], and SparkContext.
-  *
-  * {{{
-  * public class MyModel implements PersistentModel<MyParams>, Serializable {
-  *   ...
-  *   public boolean save(String id, MyParams params, SparkContext sc) {
-  *     ...
-  *   }
-  *
-  *   public static MyModel load(String id, Params params, SparkContext sc) {
-  *     ...
-  *   }
-  *   ...
-  * }
-  * }}}
-  *
-  * @tparam AP Algorithm parameters class.
-  * @see [[PersistentModelLoader]]
-  * @group Algorithm
-  */
-trait PersistentModel[AP <: Params] {
-  /** Save the model to some persistent storage.
-    *
-    * This method should return true if the model has been saved successfully so
-    * that PredictionIO knows that it can be restored later during deployment.
-    * This method should return false if the model cannot be saved (or should
-    * not be saved due to configuration) so that PredictionIO will re-train the
-    * model during deployment. All arguments of this method are provided by
-    * automatically by PredictionIO.
-    *
-    * @param id ID of the run that trained this model.
-    * @param params Algorithm parameters that were used to train this model.
-    * @param sc An Apache Spark context.
-    */
-  def save(id: String, params: AP, sc: SparkContext): Boolean
-}
-
-/** Implement an object that extends this trait for PredictionIO to support
-  * loading a persisted model during serving deployment.
-  *
-  * @tparam AP Algorithm parameters class.
-  * @tparam M Model class.
-  * @see [[PersistentModel]]
-  * @group Algorithm
-  */
-trait PersistentModelLoader[AP <: Params, M] {
-  /** Implement this method to restore a persisted model that extends the
-    * [[PersistentModel]] trait. All arguments of this method are provided
-    * automatically by PredictionIO.
-    *
-    * @param id ID of the run that trained this model.
-    * @param params Algorithm parameters that were used to train this model.
-    * @param sc An optional Apache Spark context. This will be injected if the
-    *           model was generated by a [[PAlgorithm]].
-    */
-  def apply(id: String, params: AP, sc: Option[SparkContext]): M
-}
-
-/** DEPRECATED. Use [[PersistentModel]] instead.
-  *
-  * @group Algorithm */
-@deprecated("Use PersistentModel instead.", "0.9.2")
-trait IPersistentModel[AP <: Params] extends PersistentModel[AP]
-
-/** DEPRECATED. Use [[PersistentModelLoader]] instead.
-  *
-  * @group Algorithm */
-@deprecated("Use PersistentModelLoader instead.", "0.9.2")
-trait IPersistentModelLoader[AP <: Params, M] extends PersistentModelLoader[AP, M]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/SanityCheck.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/SanityCheck.scala b/core/src/main/scala/io/prediction/controller/SanityCheck.scala
deleted file mode 100644
index bb5342f..0000000
--- a/core/src/main/scala/io/prediction/controller/SanityCheck.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-*
-* Licensed under the Apache License, Version 2.0 (the "License");
-* you may not use this file except in compliance with the License.
-* You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-
-package io.prediction.controller
-
-/** Extends a data class with this trait if you want PredictionIO to
-  * automatically perform sanity check on your data classes during training.
-  * This is very useful when you need to debug your engine.
-  *
-  * @group Helper
-  */
-trait SanityCheck {
-  /** Implement this method to perform checks on your data. This method should
-    * contain assertions that throw exceptions when your data does not meet
-    * your pre-defined requirement.
-    */
-  def sanityCheck(): Unit
-}



[18/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/io/prediction/controller/SampleEngine.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/io/prediction/controller/SampleEngine.scala b/core/src/test/scala/io/prediction/controller/SampleEngine.scala
deleted file mode 100644
index 3a28ca9..0000000
--- a/core/src/test/scala/io/prediction/controller/SampleEngine.scala
+++ /dev/null
@@ -1,472 +0,0 @@
-package io.prediction.controller
-
-import io.prediction.controller.{Params => PIOParams}
-import io.prediction.core._
-
-import grizzled.slf4j.Logger
-import io.prediction.workflow.WorkflowParams
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-object Engine0 {
-  @transient lazy val logger = Logger[this.type] 
-
-  case class TrainingData(id: Int, error: Boolean = false) extends SanityCheck {
-    def sanityCheck(): Unit = {
-      Predef.assert(!error, "Not Error")
-    }
-  }
-
-  case class EvalInfo(id: Int)
-  case class ProcessedData(id: Int, td: TrainingData)
-
-  case class Query(id: Int, ex: Int = 0, qx: Int = 0, supp: Boolean = false)
-  case class Actual(id: Int, ex: Int = 0, qx: Int = 0)
-  case class Prediction(
-    id: Int, q: Query, models: Option[Any] = None, 
-    ps: Seq[Prediction] = Seq[Prediction]())
-
-  class PDataSource0(id: Int = 0) 
-  extends PDataSource[TrainingData, EvalInfo, Query, Actual] {
-    def readTraining(sc: SparkContext): TrainingData = {
-      TrainingData(id)
-    }
-  }
-  
-  class PDataSource1(id: Int = 0, en: Int = 0, qn: Int = 0)
-  extends PDataSource[TrainingData, EvalInfo, Query, Actual] {
-    def readTraining(sc: SparkContext): TrainingData = TrainingData(id)
-    
-    override
-    def readEval(sc: SparkContext)
-    : Seq[(TrainingData, EvalInfo, RDD[(Query, Actual)])] = {
-      (0 until en).map { ex => {
-        val qaSeq: Seq[(Query, Actual)] = (0 until qn).map { qx => {
-          (Query(id, ex=ex, qx=qx), Actual(id, ex, qx))
-        }}
-        (TrainingData(id), EvalInfo(id), sc.parallelize(qaSeq))
-      }}
-    }
-  }
-
-  object PDataSource2 {
-    case class Params(id: Int, en: Int = 0, qn: Int = 0) extends PIOParams
-  }
-  
-  class PDataSource2(params: PDataSource2.Params)
-  extends PDataSource[TrainingData, EvalInfo, Query, Actual] {
-    val id = params.id
-    def readTraining(sc: SparkContext): TrainingData = TrainingData(id)
-    
-    override
-    def readEval(sc: SparkContext)
-    : Seq[(TrainingData, EvalInfo, RDD[(Query, Actual)])] = {
-      (0 until params.en).map { ex => {
-        val qaSeq: Seq[(Query, Actual)] = (0 until params.qn).map { qx => {
-          (Query(id, ex=ex, qx=qx), Actual(id, ex, qx))
-        }}
-        (TrainingData(id), EvalInfo(id), sc.parallelize(qaSeq))
-      }}
-    }
-  }
-  
-  class PDataSource3(id: Int = 0, error: Boolean = false) 
-  extends PDataSource[TrainingData, EvalInfo, Query, Actual] {
-    def readTraining(sc: SparkContext): TrainingData = {
-      TrainingData(id = id, error = error)
-    }
-  }
-  
-  object PDataSource4 {
-    class Params(val id: Int, val en: Int = 0, val qn: Int = 0) 
-      extends PIOParams
-  }
-  
-  class PDataSource4(params: PDataSource4.Params)
-  extends PDataSource[TrainingData, EvalInfo, Query, Actual] {
-    val id = params.id
-    def readTraining(sc: SparkContext): TrainingData = TrainingData(id)
-    
-    override
-    def readEval(sc: SparkContext)
-    : Seq[(TrainingData, EvalInfo, RDD[(Query, Actual)])] = {
-      (0 until params.en).map { ex => {
-        val qaSeq: Seq[(Query, Actual)] = (0 until params.qn).map { qx => {
-          (Query(id, ex=ex, qx=qx), Actual(id, ex, qx))
-        }}
-        (TrainingData(id), EvalInfo(id), sc.parallelize(qaSeq))
-      }}
-    }
-  }
-  
-  class LDataSource0(id: Int, en: Int = 0, qn: Int = 0) 
-    extends LDataSource[TrainingData, EvalInfo, Query, Actual] {
-    def readTraining(): TrainingData = TrainingData(id)
-   
-    override
-    def readEval()
-    : Seq[(TrainingData, EvalInfo, Seq[(Query, Actual)])] = {
-      (0 until en).map { ex => {
-        val qaSeq: Seq[(Query, Actual)] = (0 until qn).map { qx => {
-          (Query(id, ex=ex, qx=qx), Actual(id, ex, qx))
-        }}
-        (TrainingData(id), EvalInfo(id), qaSeq)
-      }}
-    }
-  }
-  
-  object LDataSource1 {
-    case class Params(id: Int, en: Int = 0, qn: Int = 0) extends PIOParams
-  }
-  
-  class LDataSource1(params: LDataSource1.Params)
-  extends LDataSource[TrainingData, EvalInfo, Query, Actual] {
-    val id = params.id
-    def readTraining(): TrainingData = TrainingData(id)
-    
-    override
-    def readEval(): Seq[(TrainingData, EvalInfo, Seq[(Query, Actual)])] = {
-      (0 until params.en).map { ex => {
-        val qaSeq: Seq[(Query, Actual)] = (0 until params.qn).map { qx => {
-          (Query(id, ex=ex, qx=qx), Actual(id, ex, qx))
-        }}
-        (TrainingData(id), EvalInfo(id), qaSeq)
-      }}
-    }
-  }
-  
-  class PPreparator0(id: Int = 0)
-  extends PPreparator[TrainingData, ProcessedData] {
-    def prepare(sc: SparkContext, td: TrainingData): ProcessedData = {
-      ProcessedData(id, td)
-    }
-  }
-
-  object PPreparator1 {
-    case class Params(id: Int  = 0) extends PIOParams
-  }
-
-  class PPreparator1(params: PPreparator1.Params)
-  extends PPreparator[TrainingData, ProcessedData] {
-    def prepare(sc: SparkContext, td: TrainingData): ProcessedData = {
-      ProcessedData(params.id, td)
-    }
-  }
-
-  class LPreparator0(id: Int = 0) 
-  extends LPreparator[TrainingData, ProcessedData] {
-    def prepare(td: TrainingData): ProcessedData = {
-      ProcessedData(id, td)
-    }
-  }
-  
-  object LPreparator1 {
-    case class Params(id: Int  = 0) extends PIOParams
-  }
-
-  class LPreparator1(params: LPreparator1.Params)
-  extends LPreparator[TrainingData, ProcessedData] {
-    def prepare(td: TrainingData): ProcessedData = {
-      ProcessedData(params.id, td)
-    }
-  }
-
-  object PAlgo0 {
-    case class Model(id: Int, pd: ProcessedData)
-  }
-
-  class PAlgo0(id: Int = 0)
-  extends PAlgorithm[ProcessedData, PAlgo0.Model, Query, Prediction] {
-    def train(sc: SparkContext, pd: ProcessedData)
-    : PAlgo0.Model = PAlgo0.Model(id, pd)
-
-    override
-    def batchPredict(m: PAlgo0.Model, qs: RDD[(Long, Query)])
-    : RDD[(Long, Prediction)] = {
-      qs.mapValues(q => Prediction(id, q, Some(m)))
-    }
-    
-    def predict(m: PAlgo0.Model, q: Query): Prediction = {
-      Prediction(id, q, Some(m))
-    }
-  }
-
-  object PAlgo1 {
-    case class Model(id: Int, pd: ProcessedData)
-  }
-
-  class PAlgo1(id: Int = 0)
-  extends PAlgorithm[ProcessedData, PAlgo1.Model, Query, Prediction] {
-    def train(sc: SparkContext, pd: ProcessedData)
-    : PAlgo1.Model = PAlgo1.Model(id, pd)
-
-    override
-    def batchPredict(m: PAlgo1.Model, qs: RDD[(Long, Query)])
-    : RDD[(Long, Prediction)] = {
-      qs.mapValues(q => Prediction(id, q, Some(m)))
-    }
-
-    def predict(m: PAlgo1.Model, q: Query): Prediction = {
-      Prediction(id, q, Some(m))
-    }
-  }
-  
-  object PAlgo2 {
-    case class Model(id: Int, pd: ProcessedData)
-    case class Params(id: Int) extends PIOParams
-  }
-
-  class PAlgo2(params: PAlgo2.Params)
-  extends PAlgorithm[ProcessedData, PAlgo2.Model, Query, Prediction] {
-    val id = params.id
-
-    def train(sc: SparkContext, pd: ProcessedData)
-    : PAlgo2.Model = PAlgo2.Model(id, pd)
-
-    override
-    def batchPredict(m: PAlgo2.Model, qs: RDD[(Long, Query)])
-    : RDD[(Long, Prediction)] = {
-      qs.mapValues(q => Prediction(id, q, Some(m)))
-    }
-
-    def predict(m: PAlgo2.Model, q: Query): Prediction = {
-      Prediction(id, q, Some(m))
-    }
-  }
-  
-  object PAlgo3 {
-    case class Model(id: Int, pd: ProcessedData)
-    extends LocalFileSystemPersistentModel[Params]
-    
-    object Model extends LocalFileSystemPersistentModelLoader[Params, Model]
-
-    case class Params(id: Int) extends PIOParams
-  }
-
-  class PAlgo3(params: PAlgo3.Params)
-  extends PAlgorithm[ProcessedData, PAlgo3.Model, Query, Prediction] {
-    val id = params.id
-
-    def train(sc: SparkContext, pd: ProcessedData)
-    : PAlgo3.Model = PAlgo3.Model(id, pd)
-
-    override
-    def batchPredict(m: PAlgo3.Model, qs: RDD[(Long, Query)])
-    : RDD[(Long, Prediction)] = {
-      qs.mapValues(q => Prediction(id, q, Some(m)))
-    }
-
-    def predict(m: PAlgo3.Model, q: Query): Prediction = {
-      Prediction(id, q, Some(m))
-    }
-  }
-  
-  object LAlgo0 {
-    case class Model(id: Int, pd: ProcessedData)
-  }
-
-  class LAlgo0(id: Int = 0) 
-  extends LAlgorithm[ProcessedData, LAlgo0.Model, Query, Prediction] {
-    def train(pd: ProcessedData): LAlgo0.Model = LAlgo0.Model(id, pd)
-
-    def predict(m: LAlgo0.Model, q: Query): Prediction = {
-      Prediction(id, q, Some(m))
-    }
-  }
-  
-  object LAlgo1 {
-    case class Model(id: Int, pd: ProcessedData)
-  }
-
-  class LAlgo1(id: Int = 0) 
-  extends LAlgorithm[ProcessedData, LAlgo1.Model, Query, Prediction] {
-    def train(pd: ProcessedData): LAlgo1.Model = LAlgo1.Model(id, pd)
-    
-    def predict(m: LAlgo1.Model, q: Query): Prediction = {
-      Prediction(id, q, Some(m))
-    }
-  }
-  
-  object LAlgo2 {
-    case class Params(id: Int) extends PIOParams
-
-    case class Model(id: Int, pd: ProcessedData)
-    extends LocalFileSystemPersistentModel[EmptyParams]
-    
-    object Model extends LocalFileSystemPersistentModelLoader[EmptyParams, Model]
-  }
-
-  class LAlgo2(params: LAlgo2.Params) 
-  extends LAlgorithm[ProcessedData, LAlgo2.Model, Query, Prediction] {
-    def train(pd: ProcessedData): LAlgo2.Model = LAlgo2.Model(params.id, pd)
-    
-    def predict(m: LAlgo2.Model, q: Query): Prediction = {
-      Prediction(params.id, q, Some(m))
-    }
-  }
-
-  object LAlgo3 {
-    case class Params(id: Int) extends PIOParams
-
-    case class Model(id: Int, pd: ProcessedData)
-  }
-
-  class LAlgo3(params: LAlgo3.Params) 
-  extends LAlgorithm[ProcessedData, LAlgo3.Model, Query, Prediction] {
-    def train(pd: ProcessedData): LAlgo3.Model = LAlgo3.Model(params.id, pd)
-    
-    def predict(m: LAlgo3.Model, q: Query): Prediction = {
-      Prediction(params.id, q, Some(m))
-    }
-  }
-
-  // N : P2L. As N is in the middle of P and L.
-  object NAlgo0 {
-    case class Model(id: Int, pd: ProcessedData)
-  }
-
-  class NAlgo0 (id: Int = 0)
-  extends P2LAlgorithm[ProcessedData, NAlgo0.Model, Query, Prediction] {
-    def train(sc: SparkContext, pd: ProcessedData)
-    : NAlgo0.Model = NAlgo0.Model(id, pd)
-  
-    def predict(m: NAlgo0.Model, q: Query): Prediction = {
-      Prediction(id, q, Some(m))
-    }
-  }
-
-  object NAlgo1 {
-    case class Model(id: Int, pd: ProcessedData)
-  }
-
-  class NAlgo1 (id: Int = 0)
-  extends P2LAlgorithm[ProcessedData, NAlgo1.Model, Query, Prediction] {
-    def train(sc: SparkContext, pd: ProcessedData)
-    : NAlgo1.Model = NAlgo1.Model(id, pd)
-   
-    def predict(m: NAlgo1.Model, q: Query): Prediction = {
-      Prediction(id, q, Some(m))
-    }
-  }
-  
-  object NAlgo2 {
-    case class Params(id: Int) extends PIOParams
-
-    case class Model(id: Int, pd: ProcessedData)
-    extends LocalFileSystemPersistentModel[EmptyParams]
-    
-    object Model extends LocalFileSystemPersistentModelLoader[EmptyParams, Model]
-  }
-
-  class NAlgo2(params: NAlgo2.Params) 
-  extends P2LAlgorithm[ProcessedData, NAlgo2.Model, Query, Prediction] {
-    def train(sc: SparkContext, pd: ProcessedData)
-    : NAlgo2.Model = NAlgo2.Model(params.id, pd)
-    
-    def predict(m: NAlgo2.Model, q: Query): Prediction = {
-      Prediction(params.id, q, Some(m))
-    }
-  }
-
-  object NAlgo3 {
-    case class Params(id: Int) extends PIOParams
-
-    case class Model(id: Int, pd: ProcessedData)
-  }
-
-  class NAlgo3(params: NAlgo3.Params) 
-  extends P2LAlgorithm[ProcessedData, NAlgo3.Model, Query, Prediction] {
-    def train(sc: SparkContext, pd: ProcessedData)
-    : NAlgo3.Model = NAlgo3.Model(params.id, pd)
-    
-    def predict(m: NAlgo3.Model, q: Query): Prediction = {
-      Prediction(params.id, q, Some(m))
-    }
-  }
-
-  class LServing0(id: Int = 0) extends LServing[Query, Prediction] {
-    def serve(q: Query, ps: Seq[Prediction]): Prediction = {
-      Prediction(id, q, ps=ps)
-    }
-  }
-
-  object LServing1 {
-    case class Params(id: Int) extends PIOParams
-  }
-  
-  class LServing1(params: LServing1.Params) extends LServing[Query, Prediction] {
-    def serve(q: Query, ps: Seq[Prediction]): Prediction = {
-      Prediction(params.id, q, ps=ps)
-    }
-  }
-  
-  class LServing2(id: Int) extends LServing[Query, Prediction] {
-    override
-    def supplement(q: Query): Query = q.copy(supp = true)
-
-    def serve(q: Query, ps: Seq[Prediction]): Prediction = {
-      Prediction(id, q, ps=ps)
-    }
-  }
-}
-
-object Engine1 {
-  case class EvalInfo(v: Double) extends Serializable
-  case class Query() extends Serializable
-  case class Prediction() extends Serializable
-  case class Actual() extends Serializable
-  case class DSP(v: Double) extends Params
-}
-
-class Engine1 
-extends BaseEngine[
-  Engine1.EvalInfo, Engine1.Query, Engine1.Prediction, Engine1.Actual] {
-
-  def train(
-    sc: SparkContext, 
-    engineParams: EngineParams,
-    engineInstanceId: String = "",
-    params: WorkflowParams = WorkflowParams()): Seq[Any] = Seq[Any]()
-
-  def eval(sc: SparkContext, engineParams: EngineParams, params: WorkflowParams)
-  : Seq[(Engine1.EvalInfo, 
-      RDD[(Engine1.Query, Engine1.Prediction, Engine1.Actual)])] = {
-    val dsp = engineParams.dataSourceParams._2.asInstanceOf[Engine1.DSP]
-    Seq(
-      (Engine1.EvalInfo(dsp.v),
-        sc.emptyRDD[(Engine1.Query, Engine1.Prediction, Engine1.Actual)]))
-  }
-}
-
-
-class Metric0
-extends Metric[Engine1.EvalInfo, Engine1.Query, Engine1.Prediction,
-Engine1.Actual, Double] {
-  override def header: String = "Metric0"
-
-  def calculate(
-    sc: SparkContext, 
-    evalDataSet: Seq[(Engine1.EvalInfo, RDD[(Engine1.Query, Engine1.Prediction,
-    Engine1.Actual)])]): Double = {
-    evalDataSet.head._1.v
-  }
-}
-
-object Metric1 {
-  case class Result(c: Int, v: Double) extends Serializable
-}
-
-class Metric1
-extends Metric[Engine1.EvalInfo, Engine1.Query, Engine1.Prediction,
-Engine1.Actual, Metric1.Result]()(Ordering.by[Metric1.Result, Double](_.v)) {
-  override def header: String = "Metric1"
-
-  def calculate(
-    sc: SparkContext, 
-    evalDataSet: Seq[(Engine1.EvalInfo, RDD[(Engine1.Query, Engine1.Prediction,
-    Engine1.Actual)])]): Metric1.Result = {
-    Metric1.Result(0, evalDataSet.head._1.v)
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/io/prediction/workflow/BaseTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/io/prediction/workflow/BaseTest.scala b/core/src/test/scala/io/prediction/workflow/BaseTest.scala
deleted file mode 100644
index 4925558..0000000
--- a/core/src/test/scala/io/prediction/workflow/BaseTest.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-//package org.apache.spark
-package io.prediction.workflow
-
-import _root_.io.netty.util.internal.logging.{Slf4JLoggerFactory, InternalLoggerFactory}
-import org.scalatest.BeforeAndAfterAll
-import org.scalatest.BeforeAndAfterEach
-import org.scalatest.Suite
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkConf
-
-
-/** Manages a local `sc` {@link SparkContext} variable, correctly stopping it
-  * after each test. */
-trait LocalSparkContext 
-extends BeforeAndAfterEach with BeforeAndAfterAll { self: Suite =>
-
-  @transient var sc: SparkContext = _
-
-  override def beforeAll() {
-    InternalLoggerFactory.setDefaultFactory(new Slf4JLoggerFactory())
-    super.beforeAll()
-  }
-
-  override def afterEach() {
-    resetSparkContext()
-    super.afterEach()
-  }
-
-  def resetSparkContext() = {
-    LocalSparkContext.stop(sc)
-    sc = null
-  }
-
-}
-
-object LocalSparkContext {
-  def stop(sc: SparkContext) {
-    if (sc != null) {
-      sc.stop()
-    }
-    // To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
-    System.clearProperty("spark.driver.port")
-  }
-
-  /** Runs `f` by passing in `sc` and ensures that `sc` is stopped. */
-  def withSpark[T](sc: SparkContext)(f: SparkContext => T) = {
-    try {
-      f(sc)
-    } finally {
-      stop(sc)
-    }
-  }
-
-}
-/** Shares a local `SparkContext` between all tests in a suite and closes it at the end */
-trait SharedSparkContext extends BeforeAndAfterAll { self: Suite =>
-
-  @transient private var _sc: SparkContext = _
-
-  def sc: SparkContext = _sc
-
-  var conf = new SparkConf(false)
-
-  override def beforeAll() {
-    _sc = new SparkContext("local[4]", "test", conf)
-    super.beforeAll()
-  }
-
-  override def afterAll() {
-    LocalSparkContext.stop(_sc)
-    _sc = null
-    super.afterAll()
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/io/prediction/workflow/EngineWorkflowTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/io/prediction/workflow/EngineWorkflowTest.scala b/core/src/test/scala/io/prediction/workflow/EngineWorkflowTest.scala
deleted file mode 100644
index e69de29..0000000

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/io/prediction/workflow/EvaluationWorkflowTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/io/prediction/workflow/EvaluationWorkflowTest.scala b/core/src/test/scala/io/prediction/workflow/EvaluationWorkflowTest.scala
deleted file mode 100644
index 7a50d33..0000000
--- a/core/src/test/scala/io/prediction/workflow/EvaluationWorkflowTest.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package io.prediction.workflow
-
-import io.prediction.controller._
-
-import org.scalatest.FunSuite
-import org.scalatest.Matchers._
-
-class EvaluationWorkflowSuite extends FunSuite with SharedSparkContext {
-
-  test("Evaluation return best engine params, simple result type: Double") {
-    val engine = new Engine1()
-    val ep0 = EngineParams(dataSourceParams = Engine1.DSP(0.2))
-    val ep1 = EngineParams(dataSourceParams = Engine1.DSP(0.3))
-    val ep2 = EngineParams(dataSourceParams = Engine1.DSP(0.3))
-    val ep3 = EngineParams(dataSourceParams = Engine1.DSP(-0.2))
-    val engineParamsList = Seq(ep0, ep1, ep2, ep3)
-
-    val evaluator = MetricEvaluator(new Metric0())
-  
-    object Eval extends Evaluation {
-      engineEvaluator = (new Engine1(), MetricEvaluator(new Metric0()))
-    }
-
-    val result = EvaluationWorkflow.runEvaluation(
-      sc,
-      Eval,
-      engine,
-      engineParamsList,
-      evaluator,
-      WorkflowParams())
-
-    result.bestScore.score shouldBe 0.3
-    result.bestEngineParams shouldBe ep1
-  }
-
-  test("Evaluation return best engine params, complex result type") {
-    val engine = new Engine1()
-    val ep0 = EngineParams(dataSourceParams = Engine1.DSP(0.2))
-    val ep1 = EngineParams(dataSourceParams = Engine1.DSP(0.3))
-    val ep2 = EngineParams(dataSourceParams = Engine1.DSP(0.3))
-    val ep3 = EngineParams(dataSourceParams = Engine1.DSP(-0.2))
-    val engineParamsList = Seq(ep0, ep1, ep2, ep3)
-
-    val evaluator = MetricEvaluator(new Metric1())
-    
-    object Eval extends Evaluation {
-      engineEvaluator = (new Engine1(), MetricEvaluator(new Metric1()))
-    }
-
-    val result = EvaluationWorkflow.runEvaluation(
-      sc,
-      Eval,
-      engine,
-      engineParamsList,
-      evaluator,
-      WorkflowParams())
-  
-    result.bestScore.score shouldBe Metric1.Result(0, 0.3)
-    result.bestEngineParams shouldBe ep1
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/io/prediction/workflow/JsonExtractorSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/io/prediction/workflow/JsonExtractorSuite.scala b/core/src/test/scala/io/prediction/workflow/JsonExtractorSuite.scala
deleted file mode 100644
index 34ff751..0000000
--- a/core/src/test/scala/io/prediction/workflow/JsonExtractorSuite.scala
+++ /dev/null
@@ -1,383 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-import io.prediction.controller.EngineParams
-import io.prediction.controller.Params
-import io.prediction.controller.Utils
-import org.json4s.CustomSerializer
-import org.json4s.JsonAST.JField
-import org.json4s.JsonAST.JObject
-import org.json4s.JsonAST.JString
-import org.json4s.MappingException
-import org.json4s.native.JsonMethods.compact
-import org.json4s.native.JsonMethods.render
-import org.scalatest.FunSuite
-import org.scalatest.Matchers
-
-class JsonExtractorSuite extends FunSuite with Matchers {
-
-  test("Extract Scala object using option Json4sNative works with optional and default value " +
-    "provided") {
-
-    val json = """{"string": "query string", "optional": "optional string", "default": "d"}"""
-
-    val query = JsonExtractor.extract(
-      JsonExtractorOption.Json4sNative,
-      json,
-      classOf[ScalaQuery])
-
-    query should be (ScalaQuery("query string", Some("optional string"), "d"))
-  }
-
-  test("Extract Scala object using option Json4sNative works with no optional and no default " +
-    "value provided") {
-
-    val json = """{"string": "query string"}"""
-
-    val query = JsonExtractor.extract(
-      JsonExtractorOption.Json4sNative,
-      json,
-      classOf[ScalaQuery])
-
-    query should be (ScalaQuery("query string", None, "default"))
-  }
-
-  test("Extract Scala object using option Json4sNative works with null optional and null default" +
-    " value") {
-
-    val json = """{"string": "query string", "optional": null, "default": null}"""
-
-    val query = JsonExtractor.extract(
-      JsonExtractorOption.Json4sNative,
-      json,
-      classOf[ScalaQuery])
-
-    query should be (ScalaQuery("query string", None, "default"))
-  }
-
-  test("Extract Scala object using option Both works with optional and default value provided") {
-
-    val json = """{"string": "query string", "optional": "optional string", "default": "d"}"""
-
-    val query = JsonExtractor.extract(
-      JsonExtractorOption.Json4sNative,
-      json,
-      classOf[ScalaQuery])
-
-    query should be (ScalaQuery("query string", Some("optional string"), "d"))
-  }
-
-  test("Extract Scala object using option Both works with no optional and no default value " +
-    "provided") {
-
-    val json = """{"string": "query string"}"""
-
-    val query = JsonExtractor.extract(
-      JsonExtractorOption.Json4sNative,
-      json,
-      classOf[ScalaQuery])
-
-    query should be (ScalaQuery("query string", None, "default"))
-  }
-
-  test("Extract Scala object using option Both works with null optional and null default value") {
-
-    val json = """{"string": "query string", "optional": null, "default": null}"""
-
-    val query = JsonExtractor.extract(
-      JsonExtractorOption.Json4sNative,
-      json,
-      classOf[ScalaQuery])
-
-    query should be (ScalaQuery("query string", None, "default"))
-  }
-
-  test("Extract Scala object using option Gson should not get default value and optional none" +
-    " value") {
-
-    val json = """{"string": "query string"}"""
-    val query = JsonExtractor.extract(
-      JsonExtractorOption.Gson,
-      json,
-      classOf[ScalaQuery])
-
-    query should be (ScalaQuery("query string", null, null))
-  }
-
-  test("Extract Scala object using option Gson should throw an exception with optional " +
-    "value provided") {
-
-    val json = """{"string": "query string", "optional": "o", "default": "d"}"""
-    intercept[RuntimeException] {
-      JsonExtractor.extract(
-        JsonExtractorOption.Gson,
-        json,
-        classOf[ScalaQuery])
-    }
-  }
-
-  test("Extract Java object using option Gson works") {
-
-    val json = """{"q": "query string"}"""
-
-    val query = JsonExtractor.extract(
-      JsonExtractorOption.Gson,
-      json,
-      classOf[JavaQuery])
-
-    query should be (new JavaQuery("query string"))
-  }
-
-  test("Extract Java object using option Both works") {
-
-    val json = """{"q": "query string"}"""
-
-    val query = JsonExtractor.extract(
-      JsonExtractorOption.Both,
-      json,
-      classOf[JavaQuery])
-
-    query should be (new JavaQuery("query string"))
-  }
-
-  test("Extract Java object using option Json4sNative should throw an exception") {
-
-    val json = """{"q": "query string"}"""
-
-    intercept[MappingException] {
-      JsonExtractor.extract(
-        JsonExtractorOption.Json4sNative,
-        json,
-        classOf[JavaQuery])
-    }
-  }
-
-  test("Extract Scala object using option Json4sNative with custom deserializer") {
-    val json = """{"string": "query string", "optional": "o", "default": "d"}"""
-
-    val query = JsonExtractor.extract(
-      JsonExtractorOption.Json4sNative,
-      json,
-      classOf[ScalaQuery],
-      Utils.json4sDefaultFormats + new UpperCaseFormat
-    )
-
-    query should be(ScalaQuery("QUERY STRING", Some("O"), "D"))
-  }
-
-  test("Extract Java object usingoption Gson with custom deserializer") {
-    val json = """{"q": "query string"}"""
-
-    val query = JsonExtractor.extract(
-      extractorOption = JsonExtractorOption.Gson,
-      json = json,
-      clazz = classOf[JavaQuery],
-      gsonTypeAdapterFactories = Seq(new JavaQueryTypeAdapterFactory)
-    )
-
-    query should be(new JavaQuery("QUERY STRING"))
-  }
-
-  test("Java object to JValue using option Both works") {
-    val query = new JavaQuery("query string")
-    val jValue = JsonExtractor.toJValue(JsonExtractorOption.Both, query)
-
-    compact(render(jValue)) should be ("""{"q":"query string"}""")
-  }
-
-  test("Java object to JValue using option Gson works") {
-    val query = new JavaQuery("query string")
-    val jValue = JsonExtractor.toJValue(JsonExtractorOption.Gson, query)
-
-    compact(render(jValue)) should be ("""{"q":"query string"}""")
-  }
-
-  test("Java object to JValue using option Json4sNative results in empty Json") {
-    val query = new JavaQuery("query string")
-    val jValue = JsonExtractor.toJValue(JsonExtractorOption.Json4sNative, query)
-
-    compact(render(jValue)) should be ("""{}""")
-  }
-
-  test("Scala object to JValue using option Both works") {
-    val query = new ScalaQuery("query string", Some("option"))
-    val jValue = JsonExtractor.toJValue(JsonExtractorOption.Both, query)
-
-    compact(render(jValue)) should
-      be ("""{"string":"query string","optional":"option","default":"default"}""")
-  }
-
-  test("Scala object to JValue using option Gson does not serialize optional") {
-    val query = new ScalaQuery("query string", Some("option"))
-    val jValue = JsonExtractor.toJValue(JsonExtractorOption.Gson, query)
-
-    compact(render(jValue)) should
-      be ("""{"string":"query string","optional":{},"default":"default"}""")
-  }
-
-  test("Scala object to JValue using option Json4sNative works") {
-    val query = new ScalaQuery("query string", Some("option"))
-    val jValue = JsonExtractor.toJValue(JsonExtractorOption.Json4sNative, query)
-
-    compact(render(jValue)) should
-      be ("""{"string":"query string","optional":"option","default":"default"}""")
-  }
-
-  test("Scala object to JValue using option Json4sNative with custom serializer") {
-    val query = new ScalaQuery("query string", Some("option"))
-    val jValue = JsonExtractor.toJValue(
-      JsonExtractorOption.Json4sNative,
-      query,
-      Utils.json4sDefaultFormats + new UpperCaseFormat
-    )
-
-    compact(render(jValue)) should
-      be ("""{"string":"QUERY STRING","optional":"OPTION","default":"DEFAULT"}""")
-  }
-
-  test("Java object to JValue using option Gson with custom serializer") {
-    val query = new JavaQuery("query string")
-    val jValue = JsonExtractor.toJValue(
-      extractorOption = JsonExtractorOption.Gson,
-      o = query,
-      gsonTypeAdapterFactories = Seq(new JavaQueryTypeAdapterFactory)
-    )
-
-    compact(render(jValue)) should be ("""{"q":"QUERY STRING"}""")
-  }
-
-  test("Java Param to Json using option Both") {
-    val param = ("algo", new JavaParams("parameter"))
-    val json = JsonExtractor.paramToJson(JsonExtractorOption.Both, param)
-
-    json should be ("""{"algo":{"p":"parameter"}}""")
-  }
-
-  test("Java Param to Json using option Gson") {
-    val param = ("algo", new JavaParams("parameter"))
-    val json = JsonExtractor.paramToJson(JsonExtractorOption.Gson, param)
-
-    json should be ("""{"algo":{"p":"parameter"}}""")
-  }
-
-  test("Scala Param to Json using option Both") {
-    val param = ("algo", AlgorithmParams("parameter"))
-    val json = JsonExtractor.paramToJson(JsonExtractorOption.Both, param)
-
-    json should be ("""{"algo":{"a":"parameter"}}""")
-  }
-
-  test("Scala Param to Json using option Json4sNative") {
-    val param = ("algo", AlgorithmParams("parameter"))
-    val json = JsonExtractor.paramToJson(JsonExtractorOption.Json4sNative, param)
-
-    json should be ("""{"algo":{"a":"parameter"}}""")
-  }
-
-  test("Java Params to Json using option Both") {
-    val params = Seq(("algo", new JavaParams("parameter")), ("algo2", new JavaParams("parameter2")))
-    val json = JsonExtractor.paramsToJson(JsonExtractorOption.Both, params)
-
-    json should be ("""[{"algo":{"p":"parameter"}},{"algo2":{"p":"parameter2"}}]""")
-  }
-
-  test("Java Params to Json using option Gson") {
-    val params = Seq(("algo", new JavaParams("parameter")), ("algo2", new JavaParams("parameter2")))
-    val json = JsonExtractor.paramsToJson(JsonExtractorOption.Gson, params)
-
-    json should be ("""[{"algo":{"p":"parameter"}},{"algo2":{"p":"parameter2"}}]""")
-  }
-
-  test("Scala Params to Json using option Both") {
-    val params =
-      Seq(("algo", AlgorithmParams("parameter")), ("algo2", AlgorithmParams("parameter2")))
-    val json = JsonExtractor.paramsToJson(JsonExtractorOption.Both, params)
-
-    json should be (org.json4s.native.Serialization.write(params)(Utils.json4sDefaultFormats))
-  }
-
-  test("Scala Params to Json using option Json4sNative") {
-    val params =
-      Seq(("algo", AlgorithmParams("parameter")), ("algo2", AlgorithmParams("parameter2")))
-    val json = JsonExtractor.paramsToJson(JsonExtractorOption.Json4sNative, params)
-
-    json should be (org.json4s.native.Serialization.write(params)(Utils.json4sDefaultFormats))
-  }
-
-  test("Mixed Java and Scala Params to Json using option Both") {
-    val params =
-      Seq(("scala", AlgorithmParams("parameter")), ("java", new JavaParams("parameter2")))
-    val json = JsonExtractor.paramsToJson(JsonExtractorOption.Both, params)
-
-    json should be ("""[{"scala":{"a":"parameter"}},{"java":{"p":"parameter2"}}]""")
-  }
-
-  test("Serializing Scala EngineParams works using option Json4sNative") {
-    val ep = new EngineParams(
-      dataSourceParams = ("ds", DataSourceParams("dsp")),
-      algorithmParamsList = Seq(("a0", AlgorithmParams("ap"))))
-
-    val json = JsonExtractor.engineParamsToJson(JsonExtractorOption.Json4sNative, ep)
-
-    json should be (
-      """{"dataSourceParams":{"ds":{"a":"dsp"}},"preparatorParams":{"":{}},""" +
-        """"algorithmParamsList":[{"a0":{"a":"ap"}}],"servingParams":{"":{}}}""")
-  }
-
-  test("Serializing Java EngineParams works using option Gson") {
-    val ep = new EngineParams(
-      dataSourceParams = ("ds", new JavaParams("dsp")),
-      algorithmParamsList = Seq(("a0", new JavaParams("ap")), ("a1", new JavaParams("ap2"))))
-
-    val json = JsonExtractor.engineParamsToJson(JsonExtractorOption.Gson, ep)
-
-    json should be (
-      """{"dataSourceParams":{"ds":{"p":"dsp"}},"preparatorParams":{"":{}},""" +
-        """"algorithmParamsList":[{"a0":{"p":"ap"}},{"a1":{"p":"ap2"}}],"servingParams":{"":{}}}""")
-  }
-
-  test("Serializing Java EngineParams works using option Both") {
-    val ep = new EngineParams(
-      dataSourceParams = ("ds", new JavaParams("dsp")),
-      algorithmParamsList = Seq(("a0", new JavaParams("ap")), ("a1", new JavaParams("ap2"))))
-
-    val json = JsonExtractor.engineParamsToJson(JsonExtractorOption.Both, ep)
-
-    json should be (
-      """{"dataSourceParams":{"ds":{"p":"dsp"}},"preparatorParams":{"":{}},""" +
-        """"algorithmParamsList":[{"a0":{"p":"ap"}},{"a1":{"p":"ap2"}}],"servingParams":{"":{}}}""")
-  }
-}
-
-private case class AlgorithmParams(a: String) extends Params
-
-private case class DataSourceParams(a: String) extends Params
-
-private case class ScalaQuery(string: String, optional: Option[String], default: String = "default")
-
-private class UpperCaseFormat extends CustomSerializer[ScalaQuery](format => ( {
-  case JObject(JField("string", JString(string)) ::
-    JField("optional", JString(optional)) ::
-    JField("default", JString(default)) ::
-    Nil) => ScalaQuery(string.toUpperCase, Some(optional.toUpperCase), default.toUpperCase)
-}, {
-  case x: ScalaQuery =>
-    JObject(
-      JField("string", JString(x.string.toUpperCase)),
-      JField("optional", JString(x.optional.get.toUpperCase)),
-      JField("default", JString(x.default.toUpperCase)))
-}))
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/org/apache/predictionio/controller/EngineTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/predictionio/controller/EngineTest.scala b/core/src/test/scala/org/apache/predictionio/controller/EngineTest.scala
new file mode 100644
index 0000000..eebe0af
--- /dev/null
+++ b/core/src/test/scala/org/apache/predictionio/controller/EngineTest.scala
@@ -0,0 +1,615 @@
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.workflow.PersistentModelManifest
+import org.apache.predictionio.workflow.SharedSparkContext
+import org.apache.predictionio.workflow.StopAfterPrepareInterruption
+import org.apache.predictionio.workflow.StopAfterReadInterruption
+
+import grizzled.slf4j.Logger
+import org.apache.predictionio.workflow.WorkflowParams
+import org.apache.spark.rdd.RDD
+import org.scalatest.Inspectors._
+import org.scalatest.Matchers._
+import org.scalatest.FunSuite
+import org.scalatest.Inside
+
+import scala.util.Random
+
+class EngineSuite
+extends FunSuite with Inside with SharedSparkContext {
+  import org.apache.predictionio.controller.Engine0._
+  @transient lazy val logger = Logger[this.type] 
+
+  test("Engine.train") {
+    val engine = new Engine(
+      classOf[PDataSource2],
+      classOf[PPreparator1],
+      Map("" -> classOf[PAlgo2]),
+      classOf[LServing1])
+
+    val engineParams = EngineParams(
+      dataSourceParams = PDataSource2.Params(0),
+      preparatorParams = PPreparator1.Params(1),
+      algorithmParamsList = Seq(("", PAlgo2.Params(2))),
+      servingParams = LServing1.Params(3))
+
+    val models = engine.train(
+      sc, 
+      engineParams, 
+      engineInstanceId = "",
+      params = WorkflowParams())
+    
+    val pd = ProcessedData(1, TrainingData(0))
+
+    // PAlgo2.Model doesn't have IPersistentModel trait implemented. Hence the
+    // model extract after train is Unit.
+    models should contain theSameElementsAs Seq(Unit)
+  }
+
+  test("Engine.train persisting PAlgo.Model") {
+    val engine = new Engine(
+      classOf[PDataSource2],
+      classOf[PPreparator1],
+      Map(
+        "PAlgo2" -> classOf[PAlgo2],
+        "PAlgo3" -> classOf[PAlgo3]
+      ),
+      classOf[LServing1])
+
+    val engineParams = EngineParams(
+      dataSourceParams = PDataSource2.Params(0),
+      preparatorParams = PPreparator1.Params(1),
+      algorithmParamsList = Seq(
+        ("PAlgo2", PAlgo2.Params(2)),
+        ("PAlgo3", PAlgo3.Params(21)),
+        ("PAlgo3", PAlgo3.Params(22))
+      ),
+      servingParams = LServing1.Params(3))
+
+    val pd = ProcessedData(1, TrainingData(0))
+    val model21 = PAlgo3.Model(21, pd)
+    val model22 = PAlgo3.Model(22, pd)
+
+    val models = engine.train(
+      sc, 
+      engineParams, 
+      engineInstanceId = "",
+      params = WorkflowParams())
+
+    val pModel21 = PersistentModelManifest(model21.getClass.getName)
+    val pModel22 = PersistentModelManifest(model22.getClass.getName)
+    
+    models should contain theSameElementsAs Seq(Unit, pModel21, pModel22)
+  }
+
+  test("Engine.train persisting LAlgo.Model") {
+    val engine = Engine(
+      classOf[LDataSource1],
+      classOf[LPreparator1],
+      Map(
+        "LAlgo1" -> classOf[LAlgo1],
+        "LAlgo2" -> classOf[LAlgo2],
+        "LAlgo3" -> classOf[LAlgo3]
+      ),
+      classOf[LServing1])
+
+    val engineParams = EngineParams(
+      dataSourceParams = LDataSource1.Params(0),
+      preparatorParams = LPreparator1.Params(1),
+      algorithmParamsList = Seq(
+        ("LAlgo2", LAlgo2.Params(20)),
+        ("LAlgo2", LAlgo2.Params(21)),
+        ("LAlgo3", LAlgo3.Params(22))),
+      servingParams = LServing1.Params(3))
+
+    val pd = ProcessedData(1, TrainingData(0))
+    val model20 = LAlgo2.Model(20, pd)
+    val model21 = LAlgo2.Model(21, pd)
+    val model22 = LAlgo3.Model(22, pd)
+
+    //val models = engine.train(sc, engineParams, WorkflowParams())
+    val models = engine.train(
+      sc, 
+      engineParams, 
+      engineInstanceId = "",
+      params = WorkflowParams())
+
+    val pModel20 = PersistentModelManifest(model20.getClass.getName)
+    val pModel21 = PersistentModelManifest(model21.getClass.getName)
+    
+    models should contain theSameElementsAs Seq(pModel20, pModel21, model22)
+  }
+  
+  test("Engine.train persisting P&NAlgo.Model") {
+    val engine = new Engine(
+      classOf[PDataSource2],
+      classOf[PPreparator1],
+      Map(
+        "PAlgo2" -> classOf[PAlgo2],
+        "PAlgo3" -> classOf[PAlgo3],
+        "NAlgo2" -> classOf[NAlgo2],
+        "NAlgo3" -> classOf[NAlgo3]
+      ),
+      classOf[LServing1])
+
+    val engineParams = EngineParams(
+      dataSourceParams = PDataSource2.Params(0),
+      preparatorParams = PPreparator1.Params(1),
+      algorithmParamsList = Seq(
+        ("PAlgo2", PAlgo2.Params(20)),
+        ("PAlgo3", PAlgo3.Params(21)),
+        ("PAlgo3", PAlgo3.Params(22)),
+        ("NAlgo2", NAlgo2.Params(23)),
+        ("NAlgo3", NAlgo3.Params(24)),
+        ("NAlgo3", NAlgo3.Params(25))
+      ),
+      servingParams = LServing1.Params(3))
+
+    val pd = ProcessedData(1, TrainingData(0))
+    val model21 = PAlgo3.Model(21, pd)
+    val model22 = PAlgo3.Model(22, pd)
+    val model23 = NAlgo2.Model(23, pd)
+    val model24 = NAlgo3.Model(24, pd)
+    val model25 = NAlgo3.Model(25, pd)
+
+    //val models = engine.train(sc, engineParams, WorkflowParams())
+    val models = engine.train(
+      sc, 
+      engineParams, 
+      engineInstanceId = "",
+      params = WorkflowParams())
+
+    val pModel21 = PersistentModelManifest(model21.getClass.getName)
+    val pModel22 = PersistentModelManifest(model22.getClass.getName)
+    val pModel23 = PersistentModelManifest(model23.getClass.getName)
+    
+    models should contain theSameElementsAs Seq(
+      Unit, pModel21, pModel22, pModel23, model24, model25)
+  }
+
+  test("Engine.eval") {
+    val engine = new Engine(
+      classOf[PDataSource2],
+      classOf[PPreparator1],
+      Map("" -> classOf[PAlgo2]),
+      classOf[LServing1])
+
+    val qn = 10
+    val en = 3
+
+    val engineParams = EngineParams(
+      dataSourceParams = PDataSource2.Params(id = 0, en = en, qn = qn),
+      preparatorParams = PPreparator1.Params(1),
+      algorithmParamsList = Seq(("", PAlgo2.Params(2))),
+      servingParams = LServing1.Params(3))
+
+    val algoCount = engineParams.algorithmParamsList.size
+    val pd = ProcessedData(1, TrainingData(0))
+    val model0 = PAlgo2.Model(2, pd)
+
+    val evalDataSet = engine.eval(sc, engineParams, WorkflowParams())
+
+    evalDataSet should have size en
+
+    forAll(evalDataSet.zipWithIndex) { case (evalData, ex) => {
+      val (evalInfo, qpaRDD) = evalData
+      evalInfo shouldBe EvalInfo(0)
+
+      val qpaSeq: Seq[(Query, Prediction, Actual)] = qpaRDD.collect
+
+      qpaSeq should have size qn
+
+      forAll (qpaSeq) { case (q, p, a) => 
+        val Query(qId, qEx, qQx, _) = q
+        val Actual(aId, aEx, aQx) = a
+        qId shouldBe aId
+        qEx shouldBe ex
+        aEx shouldBe ex
+        qQx shouldBe aQx
+
+        inside (p) { case Prediction(pId, pQ, pModels, pPs) => {
+          pId shouldBe 3
+          pQ shouldBe q
+          pModels shouldBe None
+          pPs should have size algoCount
+          pPs shouldBe Seq(
+            Prediction(id = 2, q = q, models = Some(model0)))
+        }}
+      }
+    }}
+  }
+
+  test("Engine.prepareDeploy PAlgo") {
+    val engine = new Engine(
+      classOf[PDataSource2],
+      classOf[PPreparator1],
+      Map(
+        "PAlgo2" -> classOf[PAlgo2],
+        "PAlgo3" -> classOf[PAlgo3],
+        "NAlgo2" -> classOf[NAlgo2],
+        "NAlgo3" -> classOf[NAlgo3]
+      ),
+      classOf[LServing1])
+
+    val engineParams = EngineParams(
+      dataSourceParams = PDataSource2.Params(0),
+      preparatorParams = PPreparator1.Params(1),
+      algorithmParamsList = Seq(
+        ("PAlgo2", PAlgo2.Params(20)),
+        ("PAlgo3", PAlgo3.Params(21)),
+        ("PAlgo3", PAlgo3.Params(22)),
+        ("NAlgo2", NAlgo2.Params(23)),
+        ("NAlgo3", NAlgo3.Params(24)),
+        ("NAlgo3", NAlgo3.Params(25))
+      ),
+      servingParams = LServing1.Params(3))
+
+    val pd = ProcessedData(1, TrainingData(0))
+    val model20 = PAlgo2.Model(20, pd)
+    val model21 = PAlgo3.Model(21, pd)
+    val model22 = PAlgo3.Model(22, pd)
+    val model23 = NAlgo2.Model(23, pd)
+    val model24 = NAlgo3.Model(24, pd)
+    val model25 = NAlgo3.Model(25, pd)
+
+    val rand = new Random()
+
+    val fakeEngineInstanceId = s"FakeInstanceId-${rand.nextLong()}"
+
+    val persistedModels = engine.train(
+      sc,
+      engineParams,
+      engineInstanceId = fakeEngineInstanceId,
+      params = WorkflowParams()
+    )
+
+    val deployableModels = engine.prepareDeploy(
+      sc,
+      engineParams,
+      fakeEngineInstanceId,
+      persistedModels,
+      params = WorkflowParams()
+    )
+
+    deployableModels should contain theSameElementsAs Seq(
+      model20, model21, model22, model23, model24, model25)
+  }
+}
+
+class EngineTrainSuite extends FunSuite with SharedSparkContext {
+  import org.apache.predictionio.controller.Engine0._
+  val defaultWorkflowParams: WorkflowParams = WorkflowParams()
+
+  test("Parallel DS/P/Algos") {
+    val models = Engine.train(
+      sc,
+      new PDataSource0(0),
+      new PPreparator0(1),
+      Seq(
+        new PAlgo0(2),
+        new PAlgo1(3),
+        new PAlgo0(4)),
+      defaultWorkflowParams
+    )
+
+    val pd = ProcessedData(1, TrainingData(0))
+
+    models should contain theSameElementsAs Seq(
+      PAlgo0.Model(2, pd), PAlgo1.Model(3, pd), PAlgo0.Model(4, pd))
+  }
+
+  test("Local DS/P/Algos") {
+    val models = Engine.train(
+      sc,
+      new LDataSource0(0),
+      new LPreparator0(1),
+      Seq(
+        new LAlgo0(2),
+        new LAlgo1(3),
+        new LAlgo0(4)),
+      defaultWorkflowParams
+    )
+    
+    val pd = ProcessedData(1, TrainingData(0))
+
+    val expectedResults = Seq(
+      LAlgo0.Model(2, pd),
+      LAlgo1.Model(3, pd),
+      LAlgo0.Model(4, pd))
+
+    forAll(models.zip(expectedResults)) { case (model, expected) => 
+      model shouldBe a [RDD[_]]
+      val localModel = model.asInstanceOf[RDD[_]].collect
+      localModel should contain theSameElementsAs Seq(expected)
+    }
+  }
+
+  test("P2L DS/P/Algos") {
+    val models = Engine.train(
+      sc,
+      new PDataSource0(0),
+      new PPreparator0(1),
+      Seq(
+        new NAlgo0(2),
+        new NAlgo1(3),
+        new NAlgo0(4)),
+      defaultWorkflowParams
+    )
+
+    val pd = ProcessedData(1, TrainingData(0))
+    
+    models should contain theSameElementsAs Seq(
+      NAlgo0.Model(2, pd), NAlgo1.Model(3, pd), NAlgo0.Model(4, pd))
+  }
+  
+  test("Parallel DS/P/Algos Stop-After-Read") {
+    val workflowParams = defaultWorkflowParams.copy(
+      stopAfterRead = true)
+
+    an [StopAfterReadInterruption] should be thrownBy Engine.train(
+      sc,
+      new PDataSource0(0),
+      new PPreparator0(1),
+      Seq(
+        new PAlgo0(2),
+        new PAlgo1(3),
+        new PAlgo0(4)),
+      workflowParams
+    )
+  }
+  
+  test("Parallel DS/P/Algos Stop-After-Prepare") {
+    val workflowParams = defaultWorkflowParams.copy(
+      stopAfterPrepare = true)
+
+    an [StopAfterPrepareInterruption] should be thrownBy Engine.train(
+      sc,
+      new PDataSource0(0),
+      new PPreparator0(1),
+      Seq(
+        new PAlgo0(2),
+        new PAlgo1(3),
+        new PAlgo0(4)),
+      workflowParams
+    )
+  }
+  
+  test("Parallel DS/P/Algos Dirty TrainingData") {
+    val workflowParams = defaultWorkflowParams.copy(
+      skipSanityCheck = false)
+
+    an [AssertionError] should be thrownBy Engine.train(
+      sc,
+      new PDataSource3(0, error = true),
+      new PPreparator0(1),
+      Seq(
+        new PAlgo0(2),
+        new PAlgo1(3),
+        new PAlgo0(4)),
+      workflowParams
+    )
+  }
+  
+  test("Parallel DS/P/Algos Dirty TrainingData But Skip Check") {
+    val workflowParams = defaultWorkflowParams.copy(
+      skipSanityCheck = true)
+
+    val models = Engine.train(
+      sc,
+      new PDataSource3(0, error = true),
+      new PPreparator0(1),
+      Seq(
+        new PAlgo0(2),
+        new PAlgo1(3),
+        new PAlgo0(4)),
+      workflowParams
+    )
+    
+  val pd = ProcessedData(1, TrainingData(0, error = true))
+
+    models should contain theSameElementsAs Seq(
+      PAlgo0.Model(2, pd), PAlgo1.Model(3, pd), PAlgo0.Model(4, pd))
+  }
+}
+
+
+class EngineEvalSuite
+extends FunSuite with Inside with SharedSparkContext {
+  import org.apache.predictionio.controller.Engine0._
+
+  @transient lazy val logger = Logger[this.type] 
+  
+  test("Simple Parallel DS/P/A/S") {
+    val en = 2
+    val qn = 5
+
+    val evalDataSet: Seq[(EvalInfo, RDD[(Query, Prediction, Actual)])] = 
+    Engine.eval(
+      sc,
+      new PDataSource1(id = 1, en = en, qn = qn),
+      new PPreparator0(id = 2),
+      Seq(new PAlgo0(id = 3)),
+      new LServing0(id = 10))
+
+    val pd = ProcessedData(2, TrainingData(1))
+    val model0 = PAlgo0.Model(3, pd)
+
+    forAll(evalDataSet.zipWithIndex) { case (evalData, ex) => {
+      val (evalInfo, qpaRDD) = evalData
+      evalInfo shouldBe EvalInfo(1)
+
+      val qpaSeq: Seq[(Query, Prediction, Actual)] = qpaRDD.collect
+      forAll (qpaSeq) { case (q, p, a) => 
+        val Query(qId, qEx, qQx, _) = q
+        val Actual(aId, aEx, aQx) = a
+        qId shouldBe aId
+        qEx shouldBe ex
+        aEx shouldBe ex
+        qQx shouldBe aQx
+
+        inside (p) { case Prediction(pId, pQ, pModels, pPs) => {
+          pId shouldBe 10
+          pQ shouldBe q
+          pModels shouldBe None
+          pPs should have size 1
+          pPs shouldBe Seq(
+            Prediction(id = 3, q = q, models = Some(model0)))
+        }}
+      }
+
+    }}
+
+  }
+
+  test("Parallel DS/P/A/S") {
+    val en = 2
+    val qn = 5
+
+    val evalDataSet: Seq[(EvalInfo, RDD[(Query, Prediction, Actual)])] = 
+    Engine.eval(
+      sc,
+      new PDataSource1(id = 1, en = en, qn = qn),
+      new PPreparator0(id = 2),
+      Seq(
+        new PAlgo0(id = 3), 
+        new PAlgo1(id = 4),
+        new NAlgo1(id = 5)),
+      new LServing0(id = 10))
+
+    val pd = ProcessedData(2, TrainingData(1))
+    val model0 = PAlgo0.Model(3, pd)
+    val model1 = PAlgo1.Model(4, pd)
+    val model2 = NAlgo1.Model(5, pd)
+
+    forAll(evalDataSet.zipWithIndex) { case (evalData, ex) => {
+      val (evalInfo, qpaRDD) = evalData
+      evalInfo shouldBe EvalInfo(1)
+
+      val qpaSeq: Seq[(Query, Prediction, Actual)] = qpaRDD.collect
+      forAll (qpaSeq) { case (q, p, a) => 
+        val Query(qId, qEx, qQx, _) = q
+        val Actual(aId, aEx, aQx) = a
+        qId shouldBe aId
+        qEx shouldBe ex
+        aEx shouldBe ex
+        qQx shouldBe aQx
+
+        inside (p) { case Prediction(pId, pQ, pModels, pPs) => {
+          pId shouldBe 10
+          pQ shouldBe q
+          pModels shouldBe None
+          pPs should have size 3
+          pPs shouldBe Seq(
+            Prediction(id = 3, q = q, models = Some(model0)),
+            Prediction(id = 4, q = q, models = Some(model1)),
+            Prediction(id = 5, q = q, models = Some(model2))
+          )
+        }}
+      }
+    }}
+  }
+  
+  test("Parallel DS/P/A/S with Supplemented Query") {
+    val en = 2
+    val qn = 5
+
+    val evalDataSet: Seq[(EvalInfo, RDD[(Query, Prediction, Actual)])] = 
+    Engine.eval(
+      sc,
+      new PDataSource1(id = 1, en = en, qn = qn),
+      new PPreparator0(id = 2),
+      Seq(
+        new PAlgo0(id = 3), 
+        new PAlgo1(id = 4),
+        new NAlgo1(id = 5)),
+      new LServing2(id = 10))
+
+    val pd = ProcessedData(2, TrainingData(1))
+    val model0 = PAlgo0.Model(3, pd)
+    val model1 = PAlgo1.Model(4, pd)
+    val model2 = NAlgo1.Model(5, pd)
+
+    forAll(evalDataSet.zipWithIndex) { case (evalData, ex) => {
+      val (evalInfo, qpaRDD) = evalData
+      evalInfo shouldBe EvalInfo(1)
+
+      val qpaSeq: Seq[(Query, Prediction, Actual)] = qpaRDD.collect
+      forAll (qpaSeq) { case (q, p, a) => 
+        val Query(qId, qEx, qQx, qSupp) = q
+        val Actual(aId, aEx, aQx) = a
+        qId shouldBe aId
+        qEx shouldBe ex
+        aEx shouldBe ex
+        qQx shouldBe aQx
+        qSupp shouldBe false
+
+        inside (p) { case Prediction(pId, pQ, pModels, pPs) => {
+          pId shouldBe 10
+          pQ shouldBe q
+          pModels shouldBe None
+          pPs should have size 3
+          // queries inside prediction should have supp set to true, since it
+          // represents what the algorithms see.
+          val qSupp = q.copy(supp = true)
+          pPs shouldBe Seq(
+            Prediction(id = 3, q = qSupp, models = Some(model0)),
+            Prediction(id = 4, q = qSupp, models = Some(model1)),
+            Prediction(id = 5, q = qSupp, models = Some(model2))
+          )
+        }}
+      }
+    }}
+  }
+  
+  test("Local DS/P/A/S") {
+    val en = 2
+    val qn = 5
+
+    val evalDataSet: Seq[(EvalInfo, RDD[(Query, Prediction, Actual)])] = 
+    Engine.eval(
+      sc,
+      new LDataSource0(id = 1, en = en, qn = qn),
+      new LPreparator0(id = 2),
+      Seq(
+        new LAlgo0(id = 3), 
+        new LAlgo1(id = 4),
+        new LAlgo1(id = 5)),
+      new LServing0(id = 10))
+
+    val pd = ProcessedData(2, TrainingData(1))
+    val model0 = LAlgo0.Model(3, pd)
+    val model1 = LAlgo1.Model(4, pd)
+    val model2 = LAlgo1.Model(5, pd)
+
+    forAll(evalDataSet.zipWithIndex) { case (evalData, ex) => {
+      val (evalInfo, qpaRDD) = evalData
+      evalInfo shouldBe EvalInfo(1)
+
+      val qpaSeq: Seq[(Query, Prediction, Actual)] = qpaRDD.collect
+      forAll (qpaSeq) { case (q, p, a) => 
+        val Query(qId, qEx, qQx, _) = q
+        val Actual(aId, aEx, aQx) = a
+        qId shouldBe aId
+        qEx shouldBe ex
+        aEx shouldBe ex
+        qQx shouldBe aQx
+
+        inside (p) { case Prediction(pId, pQ, pModels, pPs) => {
+          pId shouldBe 10
+          pQ shouldBe q
+          pModels shouldBe None
+          pPs should have size 3
+          pPs shouldBe Seq(
+            Prediction(id = 3, q = q, models = Some(model0)),
+            Prediction(id = 4, q = q, models = Some(model1)),
+            Prediction(id = 5, q = q, models = Some(model2))
+          )
+        }}
+      }
+
+    }}
+
+  }
+}
+
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/org/apache/predictionio/controller/EvaluationTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/predictionio/controller/EvaluationTest.scala b/core/src/test/scala/org/apache/predictionio/controller/EvaluationTest.scala
new file mode 100644
index 0000000..86fe68c
--- /dev/null
+++ b/core/src/test/scala/org/apache/predictionio/controller/EvaluationTest.scala
@@ -0,0 +1,46 @@
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.workflow.SharedSparkContext
+
+import org.scalatest.FunSuite
+import org.scalatest.Inside
+import org.scalatest.Matchers._
+
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+object EvaluationSuite {
+  import org.apache.predictionio.controller.TestEvaluator._
+
+  class Metric0 extends Metric[EvalInfo, Query, Prediction, Actual, Int] {
+    def calculate(
+      sc: SparkContext,
+      evalDataSet: Seq[(EvalInfo, RDD[(Query, Prediction, Actual)])]): Int = 1
+  }
+
+  object Evaluation0 extends Evaluation {
+    engineMetric = (new FakeEngine(1, 1, 1), new Metric0())
+  }
+}
+
+
+class EvaluationSuite
+extends FunSuite with Inside with SharedSparkContext {
+  import org.apache.predictionio.controller.EvaluationSuite._
+
+  test("Evaluation makes MetricEvaluator") {
+    // MetricEvaluator is typed [EvalInfo, Query, Prediction, Actual, Int],
+    // however this information is erased on JVM. scalatest doc recommends to
+    // use wildcards.
+    Evaluation0.evaluator shouldBe a [MetricEvaluator[_, _, _, _, _]]
+  }
+
+  test("Load from class path") {
+    val r = org.apache.predictionio.workflow.WorkflowUtils.getEvaluation(
+      "org.apache.predictionio.controller.EvaluationSuite.Evaluation0",
+      getClass.getClassLoader)
+
+    r._2 shouldBe EvaluationSuite.Evaluation0
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/org/apache/predictionio/controller/EvaluatorTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/predictionio/controller/EvaluatorTest.scala b/core/src/test/scala/org/apache/predictionio/controller/EvaluatorTest.scala
new file mode 100644
index 0000000..c2668ac
--- /dev/null
+++ b/core/src/test/scala/org/apache/predictionio/controller/EvaluatorTest.scala
@@ -0,0 +1,93 @@
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.core._
+import org.apache.predictionio.workflow.WorkflowParams
+
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+object TestEvaluator {
+  case class EvalInfo(id: Int, ex: Int)
+  case class Query(id: Int, ex: Int, qx: Int)
+  case class Prediction(id: Int, ex: Int, qx: Int)
+  case class Actual(id: Int, ex: Int, qx: Int)
+
+  class FakeEngine(val id: Int, val en: Int, val qn: Int)
+  extends BaseEngine[EvalInfo, Query, Prediction, Actual] {
+    def train(
+      sc: SparkContext, 
+      engineParams: EngineParams,
+      instanceId: String = "",
+      params: WorkflowParams = WorkflowParams()
+    ): Seq[Any] = {
+      Seq[Any]()
+    }
+
+    def eval(
+      sc: SparkContext, 
+      engineParams: EngineParams, 
+      params: WorkflowParams)
+    : Seq[(EvalInfo, RDD[(Query, Prediction, Actual)])] = {
+      (0 until en).map { ex => {
+        val qpas = (0 until qn).map { qx => {
+          (Query(id, ex, qx), Prediction(id, ex, qx), Actual(id, ex, qx))
+        }}
+  
+        (EvalInfo(id = id, ex = ex), sc.parallelize(qpas))
+      }}
+    }
+  
+  }
+
+  /*
+  class Evaluator0 extends Evaluator[EvalInfo, Query, Prediction, Actual,
+      (Query, Prediction, Actual), 
+      (EvalInfo, Seq[(Query, Prediction, Actual)]),
+      Seq[(EvalInfo, (EvalInfo, Seq[(Query, Prediction, Actual)]))]
+      ] {
+
+    def evaluateUnit(q: Query, p: Prediction, a: Actual)
+    : (Query, Prediction, Actual) = (q, p, a)
+
+    def evaluateSet(
+        evalInfo: EvalInfo, 
+        eus: Seq[(Query, Prediction, Actual)])
+    : (EvalInfo, Seq[(Query, Prediction, Actual)]) = (evalInfo, eus)
+
+    def evaluateAll(
+      input: Seq[(EvalInfo, (EvalInfo, Seq[(Query, Prediction, Actual)]))]) 
+    = input
+  }
+  */
+
+}
+
+/*
+class EvaluatorSuite
+extends FunSuite with Inside with SharedSparkContext {
+  import org.apache.predictionio.controller.TestEvaluator._
+  @transient lazy val logger = Logger[this.type] 
+
+  test("Evaluator.evaluate") {
+    val engine = new FakeEngine(1, 3, 10)
+    val evaluator = new Evaluator0()
+  
+    val evalDataSet = engine.eval(sc, null.asInstanceOf[EngineParams])
+    val er: Seq[(EvalInfo, (EvalInfo, Seq[(Query, Prediction, Actual)]))] =
+      evaluator.evaluateBase(sc, evalDataSet)
+
+    evalDataSet.zip(er).map { case (input, output) => {
+      val (inputEvalInfo, inputQpaRDD) = input
+      val (outputEvalInfo, (outputEvalInfo2, outputQpaSeq)) = output
+      
+      inputEvalInfo shouldBe outputEvalInfo
+      inputEvalInfo shouldBe outputEvalInfo2
+      
+      val inputQpaSeq: Array[(Query, Prediction, Actual)] = inputQpaRDD.collect
+
+      inputQpaSeq.size should be (outputQpaSeq.size)
+      // TODO. match inputQpa and outputQpa content.
+    }}
+  }
+}
+*/

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/org/apache/predictionio/controller/FastEvalEngineTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/predictionio/controller/FastEvalEngineTest.scala b/core/src/test/scala/org/apache/predictionio/controller/FastEvalEngineTest.scala
new file mode 100644
index 0000000..a4dc42f
--- /dev/null
+++ b/core/src/test/scala/org/apache/predictionio/controller/FastEvalEngineTest.scala
@@ -0,0 +1,181 @@
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.workflow.WorkflowParams
+import org.scalatest.FunSuite
+import org.scalatest.Inside
+import org.scalatest.Matchers._
+import org.scalatest.Inspectors._
+
+import org.apache.predictionio.workflow.SharedSparkContext
+
+class FastEngineSuite
+extends FunSuite with Inside with SharedSparkContext {
+  import org.apache.predictionio.controller.Engine0._
+  
+  test("Single Evaluation") {
+    val engine = new FastEvalEngine(
+      Map("" -> classOf[PDataSource2]),
+      Map("" -> classOf[PPreparator1]),
+      Map(
+        "PAlgo2" -> classOf[PAlgo2],
+        "PAlgo3" -> classOf[PAlgo3]
+      ),
+      Map("" -> classOf[LServing1]))
+
+    val qn = 10
+    val en = 3
+
+    val engineParams = EngineParams(
+      dataSourceParams = PDataSource2.Params(id = 0, en = en, qn = qn),
+      preparatorParams = PPreparator1.Params(1),
+      algorithmParamsList = Seq(
+        ("PAlgo2", PAlgo2.Params(20)),
+        ("PAlgo2", PAlgo2.Params(21)),
+        ("PAlgo3", PAlgo3.Params(22))
+      ),
+      servingParams = LServing1.Params(3))
+
+    val algoCount = engineParams.algorithmParamsList.size
+    val pd = ProcessedData(1, TrainingData(0))
+    val model0 = PAlgo2.Model(20, pd)
+    val model1 = PAlgo2.Model(21, pd)
+    val model2 = PAlgo3.Model(22, pd)
+
+    val evalDataSet = engine.eval(sc, engineParams, WorkflowParams())
+
+    evalDataSet should have size en
+
+    forAll(evalDataSet.zipWithIndex) { case (evalData, ex) => {
+      val (evalInfo, qpaRDD) = evalData
+      evalInfo shouldBe EvalInfo(0)
+
+      val qpaSeq: Seq[(Query, Prediction, Actual)] = qpaRDD.collect
+
+      qpaSeq should have size qn
+
+      forAll (qpaSeq) { case (q, p, a) => 
+        val Query(qId, qEx, qQx, _) = q
+        val Actual(aId, aEx, aQx) = a
+        qId shouldBe aId
+        qEx shouldBe ex
+        aEx shouldBe ex
+        qQx shouldBe aQx
+
+        inside (p) { case Prediction(pId, pQ, pModels, pPs) => {
+          pId shouldBe 3
+          pQ shouldBe q
+          pModels shouldBe None
+          pPs should have size algoCount
+          pPs shouldBe Seq(
+            Prediction(id = 20, q = q, models = Some(model0)),
+            Prediction(id = 21, q = q, models = Some(model1)),
+            Prediction(id = 22, q = q, models = Some(model2))
+          )
+        }}
+      }
+    }}
+  }
+
+  test("Batch Evaluation") {
+    val engine = new FastEvalEngine(
+      Map("" -> classOf[PDataSource2]),
+      Map("" -> classOf[PPreparator1]),
+      Map("" -> classOf[PAlgo2]),
+      Map("" -> classOf[LServing1]))
+
+    val qn = 10
+    val en = 3
+
+    val baseEngineParams = EngineParams(
+      dataSourceParams = PDataSource2.Params(id = 0, en = en, qn = qn),
+      preparatorParams = PPreparator1.Params(1),
+      algorithmParamsList = Seq(("", PAlgo2.Params(2))),
+      servingParams = LServing1.Params(3))
+
+    val ep0 = baseEngineParams
+    val ep1 = baseEngineParams.copy(
+      algorithmParamsList = Seq(("", PAlgo2.Params(2))))
+    val ep2 = baseEngineParams.copy(
+      algorithmParamsList = Seq(("", PAlgo2.Params(20))))
+
+    val engineEvalDataSet = engine.batchEval(
+      sc,
+      Seq(ep0, ep1, ep2),
+      WorkflowParams())
+
+    val evalDataSet0 = engineEvalDataSet(0)._2
+    val evalDataSet1 = engineEvalDataSet(1)._2
+    val evalDataSet2 = engineEvalDataSet(2)._2
+
+    evalDataSet0 shouldBe evalDataSet1
+    evalDataSet0 should not be evalDataSet2
+    evalDataSet1 should not be evalDataSet2
+
+    // evalDataSet0._1 should be theSameInstanceAs evalDataSet1._1
+    // When things are cached correctly, evalDataSet0 and 1 should share the
+    // same EI
+    evalDataSet0.zip(evalDataSet1).foreach { case (e0, e1) => {
+      e0._1 should be theSameInstanceAs e1._1
+      e0._2 should be theSameInstanceAs e1._2
+    }}
+   
+    // So as set1 and set2, however, the QPA-RDD should be different.
+    evalDataSet1.zip(evalDataSet2).foreach { case (e1, e2) => {
+      e1._1 should be theSameInstanceAs e2._1
+      val e1Qpa = e1._2
+      val e2Qpa = e2._2
+      e1Qpa should not be theSameInstanceAs (e2Qpa)
+    }}
+  }
+  
+  test("Not cached when isEqual not implemented") {
+    // PDataSource3.Params is a class not case class. Need to implement the
+    // isEqual function for hashing.
+    val engine = new FastEvalEngine(
+      Map("" -> classOf[PDataSource4]),
+      Map("" -> classOf[PPreparator1]),
+      Map("" -> classOf[PAlgo2]),
+      Map("" -> classOf[LServing1]))
+
+    val qn = 10
+    val en = 3
+
+    val baseEngineParams = EngineParams(
+      dataSourceParams = new PDataSource4.Params(id = 0, en = en, qn = qn),
+      preparatorParams = PPreparator1.Params(1),
+      algorithmParamsList = Seq(("", PAlgo2.Params(2))),
+      servingParams = LServing1.Params(3))
+
+    val ep0 = baseEngineParams
+    val ep1 = baseEngineParams.copy(
+      algorithmParamsList = Seq(("", PAlgo2.Params(3))))
+    // ep2.dataSource is different from ep0.
+    val ep2 = baseEngineParams.copy(
+      dataSourceParams = ("", new PDataSource4.Params(id = 0, en = en, qn = qn)),
+      algorithmParamsList = Seq(("", PAlgo2.Params(3))))
+
+    val engineEvalDataSet = engine.batchEval(
+      sc,
+      Seq(ep0, ep1, ep2),
+      WorkflowParams())
+
+    val evalDataSet0 = engineEvalDataSet(0)._2
+    val evalDataSet1 = engineEvalDataSet(1)._2
+    val evalDataSet2 = engineEvalDataSet(2)._2
+
+    evalDataSet0 should not be evalDataSet1
+    evalDataSet0 should not be evalDataSet2
+    evalDataSet1 should not be evalDataSet2
+
+    // Set0 should have same EI as Set1, since their dsp are the same instance.
+    evalDataSet0.zip(evalDataSet1).foreach { case (e0, e1) => {
+      e0._1 should be theSameInstanceAs (e1._1)
+    }}
+  
+    // Set1 should have different EI as Set2, since Set2's dsp is another
+    // instance
+    evalDataSet1.zip(evalDataSet2).foreach { case (e1, e2) => {
+      e1._1 should not be theSameInstanceAs (e2._1)
+    }}
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/org/apache/predictionio/controller/MetricEvaluatorTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/predictionio/controller/MetricEvaluatorTest.scala b/core/src/test/scala/org/apache/predictionio/controller/MetricEvaluatorTest.scala
new file mode 100644
index 0000000..a7e397a
--- /dev/null
+++ b/core/src/test/scala/org/apache/predictionio/controller/MetricEvaluatorTest.scala
@@ -0,0 +1,52 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.workflow.SharedSparkContext
+import org.apache.predictionio.workflow.WorkflowParams
+import org.scalatest.FunSuite
+
+object MetricEvaluatorSuite {
+  case class Metric0() extends SumMetric[EmptyParams, Int, Int, Int, Int] {
+    def calculate(q: Int, p: Int, a: Int): Int = q
+  }
+
+  object Evaluation0 extends Evaluation {}
+}
+
+class MetricEvaluatorDevSuite extends FunSuite with SharedSparkContext {
+  import org.apache.predictionio.controller.MetricEvaluatorSuite._
+
+  test("a") {
+    val metricEvaluator = MetricEvaluator(
+      Metric0(),
+      Seq(Metric0(), Metric0())
+    )
+ 
+    val engineEvalDataSet = Seq(
+      (EngineParams(), Seq(
+        (EmptyParams(), sc.parallelize(Seq((1,0,0), (2,0,0)))))),
+      (EngineParams(), Seq(
+        (EmptyParams(), sc.parallelize(Seq((1,0,0), (2,0,0)))))))
+
+    val r = metricEvaluator.evaluateBase(
+      sc,
+      Evaluation0,
+      engineEvalDataSet,
+      WorkflowParams())
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/org/apache/predictionio/controller/MetricTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/predictionio/controller/MetricTest.scala b/core/src/test/scala/org/apache/predictionio/controller/MetricTest.scala
new file mode 100644
index 0000000..67975b1
--- /dev/null
+++ b/core/src/test/scala/org/apache/predictionio/controller/MetricTest.scala
@@ -0,0 +1,143 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.workflow.SharedSparkContext
+
+import grizzled.slf4j.Logger
+import org.scalatest.Matchers._
+import org.scalatest.FunSuite
+import org.scalatest.Inside
+
+object MetricDevSuite {
+  class QIntSumMetric extends SumMetric[EmptyParams, Int, Int, Int, Int] {
+    def calculate(q: Int, p: Int, a: Int): Int = q
+  }
+  
+  class QDoubleSumMetric extends SumMetric[EmptyParams, Int, Int, Int, Double] {
+    def calculate(q: Int, p: Int, a: Int): Double = q.toDouble
+  }
+  
+  class QAverageMetric extends AverageMetric[EmptyParams, Int, Int, Int] {
+    def calculate(q: Int, p: Int, a: Int): Double = q.toDouble
+  }
+  
+  class QOptionAverageMetric extends OptionAverageMetric[EmptyParams, Int, Int, Int] {
+    def calculate(q: Int, p: Int, a: Int): Option[Double] = {
+      if (q < 0) { None } else { Some(q.toDouble) }
+    }
+  }
+  
+  class QStdevMetric extends StdevMetric[EmptyParams, Int, Int, Int] {
+    def calculate(q: Int, p: Int, a: Int): Double = q.toDouble
+  }
+  
+  class QOptionStdevMetric extends OptionStdevMetric[EmptyParams, Int, Int, Int] {
+    def calculate(q: Int, p: Int, a: Int): Option[Double] = {
+      if (q < 0) { None } else { Some(q.toDouble) }
+    }
+  }
+  
+}
+
+class MetricDevSuite
+extends FunSuite with Inside with SharedSparkContext {
+  @transient lazy val logger = Logger[this.type] 
+  
+  test("Average Metric") {
+    val qpaSeq0 = Seq((1, 0, 0), (2, 0, 0), (3, 0, 0))
+    val qpaSeq1 = Seq((4, 0, 0), (5, 0, 0), (6, 0, 0))
+
+    val evalDataSet = Seq(
+      (EmptyParams(), sc.parallelize(qpaSeq0)),
+      (EmptyParams(), sc.parallelize(qpaSeq1)))
+  
+    val m = new MetricDevSuite.QAverageMetric()
+    val result = m.calculate(sc, evalDataSet)
+    
+    result shouldBe (21.0 / 6)
+  }
+  
+  test("Option Average Metric") {
+    val qpaSeq0 = Seq((1, 0, 0), (2, 0, 0), (3, 0, 0))
+    val qpaSeq1 = Seq((-4, 0, 0), (-5, 0, 0), (6, 0, 0))
+
+    val evalDataSet = Seq(
+      (EmptyParams(), sc.parallelize(qpaSeq0)),
+      (EmptyParams(), sc.parallelize(qpaSeq1)))
+  
+    val m = new MetricDevSuite.QOptionAverageMetric()
+    val result = m.calculate(sc, evalDataSet)
+    
+    result shouldBe (12.0 / 4)
+  }
+  
+  test("Stdev Metric") {
+    val qpaSeq0 = Seq((1, 0, 0), (1, 0, 0), (1, 0, 0), (1, 0, 0))
+    val qpaSeq1 = Seq((5, 0, 0), (5, 0, 0), (5, 0, 0), (5, 0, 0))
+
+    val evalDataSet = Seq(
+      (EmptyParams(), sc.parallelize(qpaSeq0)),
+      (EmptyParams(), sc.parallelize(qpaSeq1)))
+  
+    val m = new MetricDevSuite.QStdevMetric()
+    val result = m.calculate(sc, evalDataSet)
+    
+    result shouldBe 2.0
+  }
+  
+  test("Option Stdev Metric") {
+    val qpaSeq0 = Seq((1, 0, 0), (1, 0, 0), (1, 0, 0), (1, 0, 0))
+    val qpaSeq1 = Seq((5, 0, 0), (5, 0, 0), (5, 0, 0), (5, 0, 0), (-5, 0, 0))
+
+    val evalDataSet = Seq(
+      (EmptyParams(), sc.parallelize(qpaSeq0)),
+      (EmptyParams(), sc.parallelize(qpaSeq1)))
+  
+    val m = new MetricDevSuite.QOptionStdevMetric()
+    val result = m.calculate(sc, evalDataSet)
+    
+    result shouldBe 2.0
+  }
+
+  test("Sum Metric [Int]") {
+    val qpaSeq0 = Seq((1, 0, 0), (2, 0, 0), (3, 0, 0))
+    val qpaSeq1 = Seq((4, 0, 0), (5, 0, 0), (6, 0, 0))
+
+    val evalDataSet = Seq(
+      (EmptyParams(), sc.parallelize(qpaSeq0)),
+      (EmptyParams(), sc.parallelize(qpaSeq1)))
+  
+    val m = new MetricDevSuite.QIntSumMetric()
+    val result = m.calculate(sc, evalDataSet)
+    
+    result shouldBe 21
+  }
+
+  test("Sum Metric [Double]") {
+    val qpaSeq0 = Seq((1, 0, 0), (2, 0, 0), (3, 0, 0))
+    val qpaSeq1 = Seq((4, 0, 0), (5, 0, 0), (6, 0, 0))
+
+    val evalDataSet = Seq(
+      (EmptyParams(), sc.parallelize(qpaSeq0)),
+      (EmptyParams(), sc.parallelize(qpaSeq1)))
+  
+    val m = new MetricDevSuite.QDoubleSumMetric()
+    val result = m.calculate(sc, evalDataSet)
+    
+    result shouldBe 21.0
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/org/apache/predictionio/controller/SampleEngine.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/predictionio/controller/SampleEngine.scala b/core/src/test/scala/org/apache/predictionio/controller/SampleEngine.scala
new file mode 100644
index 0000000..e238e86
--- /dev/null
+++ b/core/src/test/scala/org/apache/predictionio/controller/SampleEngine.scala
@@ -0,0 +1,472 @@
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.controller.{Params => PIOParams}
+import org.apache.predictionio.core._
+
+import grizzled.slf4j.Logger
+import org.apache.predictionio.workflow.WorkflowParams
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+object Engine0 {
+  @transient lazy val logger = Logger[this.type] 
+
+  case class TrainingData(id: Int, error: Boolean = false) extends SanityCheck {
+    def sanityCheck(): Unit = {
+      Predef.assert(!error, "Not Error")
+    }
+  }
+
+  case class EvalInfo(id: Int)
+  case class ProcessedData(id: Int, td: TrainingData)
+
+  case class Query(id: Int, ex: Int = 0, qx: Int = 0, supp: Boolean = false)
+  case class Actual(id: Int, ex: Int = 0, qx: Int = 0)
+  case class Prediction(
+    id: Int, q: Query, models: Option[Any] = None, 
+    ps: Seq[Prediction] = Seq[Prediction]())
+
+  class PDataSource0(id: Int = 0) 
+  extends PDataSource[TrainingData, EvalInfo, Query, Actual] {
+    def readTraining(sc: SparkContext): TrainingData = {
+      TrainingData(id)
+    }
+  }
+  
+  class PDataSource1(id: Int = 0, en: Int = 0, qn: Int = 0)
+  extends PDataSource[TrainingData, EvalInfo, Query, Actual] {
+    def readTraining(sc: SparkContext): TrainingData = TrainingData(id)
+    
+    override
+    def readEval(sc: SparkContext)
+    : Seq[(TrainingData, EvalInfo, RDD[(Query, Actual)])] = {
+      (0 until en).map { ex => {
+        val qaSeq: Seq[(Query, Actual)] = (0 until qn).map { qx => {
+          (Query(id, ex=ex, qx=qx), Actual(id, ex, qx))
+        }}
+        (TrainingData(id), EvalInfo(id), sc.parallelize(qaSeq))
+      }}
+    }
+  }
+
+  object PDataSource2 {
+    case class Params(id: Int, en: Int = 0, qn: Int = 0) extends PIOParams
+  }
+  
+  class PDataSource2(params: PDataSource2.Params)
+  extends PDataSource[TrainingData, EvalInfo, Query, Actual] {
+    val id = params.id
+    def readTraining(sc: SparkContext): TrainingData = TrainingData(id)
+    
+    override
+    def readEval(sc: SparkContext)
+    : Seq[(TrainingData, EvalInfo, RDD[(Query, Actual)])] = {
+      (0 until params.en).map { ex => {
+        val qaSeq: Seq[(Query, Actual)] = (0 until params.qn).map { qx => {
+          (Query(id, ex=ex, qx=qx), Actual(id, ex, qx))
+        }}
+        (TrainingData(id), EvalInfo(id), sc.parallelize(qaSeq))
+      }}
+    }
+  }
+  
+  class PDataSource3(id: Int = 0, error: Boolean = false) 
+  extends PDataSource[TrainingData, EvalInfo, Query, Actual] {
+    def readTraining(sc: SparkContext): TrainingData = {
+      TrainingData(id = id, error = error)
+    }
+  }
+  
+  object PDataSource4 {
+    class Params(val id: Int, val en: Int = 0, val qn: Int = 0) 
+      extends PIOParams
+  }
+  
+  class PDataSource4(params: PDataSource4.Params)
+  extends PDataSource[TrainingData, EvalInfo, Query, Actual] {
+    val id = params.id
+    def readTraining(sc: SparkContext): TrainingData = TrainingData(id)
+    
+    override
+    def readEval(sc: SparkContext)
+    : Seq[(TrainingData, EvalInfo, RDD[(Query, Actual)])] = {
+      (0 until params.en).map { ex => {
+        val qaSeq: Seq[(Query, Actual)] = (0 until params.qn).map { qx => {
+          (Query(id, ex=ex, qx=qx), Actual(id, ex, qx))
+        }}
+        (TrainingData(id), EvalInfo(id), sc.parallelize(qaSeq))
+      }}
+    }
+  }
+  
+  class LDataSource0(id: Int, en: Int = 0, qn: Int = 0) 
+    extends LDataSource[TrainingData, EvalInfo, Query, Actual] {
+    def readTraining(): TrainingData = TrainingData(id)
+   
+    override
+    def readEval()
+    : Seq[(TrainingData, EvalInfo, Seq[(Query, Actual)])] = {
+      (0 until en).map { ex => {
+        val qaSeq: Seq[(Query, Actual)] = (0 until qn).map { qx => {
+          (Query(id, ex=ex, qx=qx), Actual(id, ex, qx))
+        }}
+        (TrainingData(id), EvalInfo(id), qaSeq)
+      }}
+    }
+  }
+  
+  object LDataSource1 {
+    case class Params(id: Int, en: Int = 0, qn: Int = 0) extends PIOParams
+  }
+  
+  class LDataSource1(params: LDataSource1.Params)
+  extends LDataSource[TrainingData, EvalInfo, Query, Actual] {
+    val id = params.id
+    def readTraining(): TrainingData = TrainingData(id)
+    
+    override
+    def readEval(): Seq[(TrainingData, EvalInfo, Seq[(Query, Actual)])] = {
+      (0 until params.en).map { ex => {
+        val qaSeq: Seq[(Query, Actual)] = (0 until params.qn).map { qx => {
+          (Query(id, ex=ex, qx=qx), Actual(id, ex, qx))
+        }}
+        (TrainingData(id), EvalInfo(id), qaSeq)
+      }}
+    }
+  }
+  
+  class PPreparator0(id: Int = 0)
+  extends PPreparator[TrainingData, ProcessedData] {
+    def prepare(sc: SparkContext, td: TrainingData): ProcessedData = {
+      ProcessedData(id, td)
+    }
+  }
+
+  object PPreparator1 {
+    case class Params(id: Int  = 0) extends PIOParams
+  }
+
+  class PPreparator1(params: PPreparator1.Params)
+  extends PPreparator[TrainingData, ProcessedData] {
+    def prepare(sc: SparkContext, td: TrainingData): ProcessedData = {
+      ProcessedData(params.id, td)
+    }
+  }
+
+  class LPreparator0(id: Int = 0) 
+  extends LPreparator[TrainingData, ProcessedData] {
+    def prepare(td: TrainingData): ProcessedData = {
+      ProcessedData(id, td)
+    }
+  }
+  
+  object LPreparator1 {
+    case class Params(id: Int  = 0) extends PIOParams
+  }
+
+  class LPreparator1(params: LPreparator1.Params)
+  extends LPreparator[TrainingData, ProcessedData] {
+    def prepare(td: TrainingData): ProcessedData = {
+      ProcessedData(params.id, td)
+    }
+  }
+
+  object PAlgo0 {
+    case class Model(id: Int, pd: ProcessedData)
+  }
+
+  class PAlgo0(id: Int = 0)
+  extends PAlgorithm[ProcessedData, PAlgo0.Model, Query, Prediction] {
+    def train(sc: SparkContext, pd: ProcessedData)
+    : PAlgo0.Model = PAlgo0.Model(id, pd)
+
+    override
+    def batchPredict(m: PAlgo0.Model, qs: RDD[(Long, Query)])
+    : RDD[(Long, Prediction)] = {
+      qs.mapValues(q => Prediction(id, q, Some(m)))
+    }
+    
+    def predict(m: PAlgo0.Model, q: Query): Prediction = {
+      Prediction(id, q, Some(m))
+    }
+  }
+
+  object PAlgo1 {
+    case class Model(id: Int, pd: ProcessedData)
+  }
+
+  class PAlgo1(id: Int = 0)
+  extends PAlgorithm[ProcessedData, PAlgo1.Model, Query, Prediction] {
+    def train(sc: SparkContext, pd: ProcessedData)
+    : PAlgo1.Model = PAlgo1.Model(id, pd)
+
+    override
+    def batchPredict(m: PAlgo1.Model, qs: RDD[(Long, Query)])
+    : RDD[(Long, Prediction)] = {
+      qs.mapValues(q => Prediction(id, q, Some(m)))
+    }
+
+    def predict(m: PAlgo1.Model, q: Query): Prediction = {
+      Prediction(id, q, Some(m))
+    }
+  }
+  
+  object PAlgo2 {
+    case class Model(id: Int, pd: ProcessedData)
+    case class Params(id: Int) extends PIOParams
+  }
+
+  class PAlgo2(params: PAlgo2.Params)
+  extends PAlgorithm[ProcessedData, PAlgo2.Model, Query, Prediction] {
+    val id = params.id
+
+    def train(sc: SparkContext, pd: ProcessedData)
+    : PAlgo2.Model = PAlgo2.Model(id, pd)
+
+    override
+    def batchPredict(m: PAlgo2.Model, qs: RDD[(Long, Query)])
+    : RDD[(Long, Prediction)] = {
+      qs.mapValues(q => Prediction(id, q, Some(m)))
+    }
+
+    def predict(m: PAlgo2.Model, q: Query): Prediction = {
+      Prediction(id, q, Some(m))
+    }
+  }
+  
+  object PAlgo3 {
+    case class Model(id: Int, pd: ProcessedData)
+    extends LocalFileSystemPersistentModel[Params]
+    
+    object Model extends LocalFileSystemPersistentModelLoader[Params, Model]
+
+    case class Params(id: Int) extends PIOParams
+  }
+
+  class PAlgo3(params: PAlgo3.Params)
+  extends PAlgorithm[ProcessedData, PAlgo3.Model, Query, Prediction] {
+    val id = params.id
+
+    def train(sc: SparkContext, pd: ProcessedData)
+    : PAlgo3.Model = PAlgo3.Model(id, pd)
+
+    override
+    def batchPredict(m: PAlgo3.Model, qs: RDD[(Long, Query)])
+    : RDD[(Long, Prediction)] = {
+      qs.mapValues(q => Prediction(id, q, Some(m)))
+    }
+
+    def predict(m: PAlgo3.Model, q: Query): Prediction = {
+      Prediction(id, q, Some(m))
+    }
+  }
+  
+  object LAlgo0 {
+    case class Model(id: Int, pd: ProcessedData)
+  }
+
+  class LAlgo0(id: Int = 0) 
+  extends LAlgorithm[ProcessedData, LAlgo0.Model, Query, Prediction] {
+    def train(pd: ProcessedData): LAlgo0.Model = LAlgo0.Model(id, pd)
+
+    def predict(m: LAlgo0.Model, q: Query): Prediction = {
+      Prediction(id, q, Some(m))
+    }
+  }
+  
+  object LAlgo1 {
+    case class Model(id: Int, pd: ProcessedData)
+  }
+
+  class LAlgo1(id: Int = 0) 
+  extends LAlgorithm[ProcessedData, LAlgo1.Model, Query, Prediction] {
+    def train(pd: ProcessedData): LAlgo1.Model = LAlgo1.Model(id, pd)
+    
+    def predict(m: LAlgo1.Model, q: Query): Prediction = {
+      Prediction(id, q, Some(m))
+    }
+  }
+  
+  object LAlgo2 {
+    case class Params(id: Int) extends PIOParams
+
+    case class Model(id: Int, pd: ProcessedData)
+    extends LocalFileSystemPersistentModel[EmptyParams]
+    
+    object Model extends LocalFileSystemPersistentModelLoader[EmptyParams, Model]
+  }
+
+  class LAlgo2(params: LAlgo2.Params) 
+  extends LAlgorithm[ProcessedData, LAlgo2.Model, Query, Prediction] {
+    def train(pd: ProcessedData): LAlgo2.Model = LAlgo2.Model(params.id, pd)
+    
+    def predict(m: LAlgo2.Model, q: Query): Prediction = {
+      Prediction(params.id, q, Some(m))
+    }
+  }
+
+  object LAlgo3 {
+    case class Params(id: Int) extends PIOParams
+
+    case class Model(id: Int, pd: ProcessedData)
+  }
+
+  class LAlgo3(params: LAlgo3.Params) 
+  extends LAlgorithm[ProcessedData, LAlgo3.Model, Query, Prediction] {
+    def train(pd: ProcessedData): LAlgo3.Model = LAlgo3.Model(params.id, pd)
+    
+    def predict(m: LAlgo3.Model, q: Query): Prediction = {
+      Prediction(params.id, q, Some(m))
+    }
+  }
+
+  // N : P2L. As N is in the middle of P and L.
+  object NAlgo0 {
+    case class Model(id: Int, pd: ProcessedData)
+  }
+
+  class NAlgo0 (id: Int = 0)
+  extends P2LAlgorithm[ProcessedData, NAlgo0.Model, Query, Prediction] {
+    def train(sc: SparkContext, pd: ProcessedData)
+    : NAlgo0.Model = NAlgo0.Model(id, pd)
+  
+    def predict(m: NAlgo0.Model, q: Query): Prediction = {
+      Prediction(id, q, Some(m))
+    }
+  }
+
+  object NAlgo1 {
+    case class Model(id: Int, pd: ProcessedData)
+  }
+
+  class NAlgo1 (id: Int = 0)
+  extends P2LAlgorithm[ProcessedData, NAlgo1.Model, Query, Prediction] {
+    def train(sc: SparkContext, pd: ProcessedData)
+    : NAlgo1.Model = NAlgo1.Model(id, pd)
+   
+    def predict(m: NAlgo1.Model, q: Query): Prediction = {
+      Prediction(id, q, Some(m))
+    }
+  }
+  
+  object NAlgo2 {
+    case class Params(id: Int) extends PIOParams
+
+    case class Model(id: Int, pd: ProcessedData)
+    extends LocalFileSystemPersistentModel[EmptyParams]
+    
+    object Model extends LocalFileSystemPersistentModelLoader[EmptyParams, Model]
+  }
+
+  class NAlgo2(params: NAlgo2.Params) 
+  extends P2LAlgorithm[ProcessedData, NAlgo2.Model, Query, Prediction] {
+    def train(sc: SparkContext, pd: ProcessedData)
+    : NAlgo2.Model = NAlgo2.Model(params.id, pd)
+    
+    def predict(m: NAlgo2.Model, q: Query): Prediction = {
+      Prediction(params.id, q, Some(m))
+    }
+  }
+
+  object NAlgo3 {
+    case class Params(id: Int) extends PIOParams
+
+    case class Model(id: Int, pd: ProcessedData)
+  }
+
+  class NAlgo3(params: NAlgo3.Params) 
+  extends P2LAlgorithm[ProcessedData, NAlgo3.Model, Query, Prediction] {
+    def train(sc: SparkContext, pd: ProcessedData)
+    : NAlgo3.Model = NAlgo3.Model(params.id, pd)
+    
+    def predict(m: NAlgo3.Model, q: Query): Prediction = {
+      Prediction(params.id, q, Some(m))
+    }
+  }
+
+  class LServing0(id: Int = 0) extends LServing[Query, Prediction] {
+    def serve(q: Query, ps: Seq[Prediction]): Prediction = {
+      Prediction(id, q, ps=ps)
+    }
+  }
+
+  object LServing1 {
+    case class Params(id: Int) extends PIOParams
+  }
+  
+  class LServing1(params: LServing1.Params) extends LServing[Query, Prediction] {
+    def serve(q: Query, ps: Seq[Prediction]): Prediction = {
+      Prediction(params.id, q, ps=ps)
+    }
+  }
+  
+  class LServing2(id: Int) extends LServing[Query, Prediction] {
+    override
+    def supplement(q: Query): Query = q.copy(supp = true)
+
+    def serve(q: Query, ps: Seq[Prediction]): Prediction = {
+      Prediction(id, q, ps=ps)
+    }
+  }
+}
+
+object Engine1 {
+  case class EvalInfo(v: Double) extends Serializable
+  case class Query() extends Serializable
+  case class Prediction() extends Serializable
+  case class Actual() extends Serializable
+  case class DSP(v: Double) extends Params
+}
+
+class Engine1 
+extends BaseEngine[
+  Engine1.EvalInfo, Engine1.Query, Engine1.Prediction, Engine1.Actual] {
+
+  def train(
+    sc: SparkContext, 
+    engineParams: EngineParams,
+    engineInstanceId: String = "",
+    params: WorkflowParams = WorkflowParams()): Seq[Any] = Seq[Any]()
+
+  def eval(sc: SparkContext, engineParams: EngineParams, params: WorkflowParams)
+  : Seq[(Engine1.EvalInfo, 
+      RDD[(Engine1.Query, Engine1.Prediction, Engine1.Actual)])] = {
+    val dsp = engineParams.dataSourceParams._2.asInstanceOf[Engine1.DSP]
+    Seq(
+      (Engine1.EvalInfo(dsp.v),
+        sc.emptyRDD[(Engine1.Query, Engine1.Prediction, Engine1.Actual)]))
+  }
+}
+
+
+class Metric0
+extends Metric[Engine1.EvalInfo, Engine1.Query, Engine1.Prediction,
+Engine1.Actual, Double] {
+  override def header: String = "Metric0"
+
+  def calculate(
+    sc: SparkContext, 
+    evalDataSet: Seq[(Engine1.EvalInfo, RDD[(Engine1.Query, Engine1.Prediction,
+    Engine1.Actual)])]): Double = {
+    evalDataSet.head._1.v
+  }
+}
+
+object Metric1 {
+  case class Result(c: Int, v: Double) extends Serializable
+}
+
+class Metric1
+extends Metric[Engine1.EvalInfo, Engine1.Query, Engine1.Prediction,
+Engine1.Actual, Metric1.Result]()(Ordering.by[Metric1.Result, Double](_.v)) {
+  override def header: String = "Metric1"
+
+  def calculate(
+    sc: SparkContext, 
+    evalDataSet: Seq[(Engine1.EvalInfo, RDD[(Engine1.Query, Engine1.Prediction,
+    Engine1.Actual)])]): Metric1.Result = {
+    Metric1.Result(0, evalDataSet.head._1.v)
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/org/apache/predictionio/workflow/BaseTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/predictionio/workflow/BaseTest.scala b/core/src/test/scala/org/apache/predictionio/workflow/BaseTest.scala
new file mode 100644
index 0000000..df36620
--- /dev/null
+++ b/core/src/test/scala/org/apache/predictionio/workflow/BaseTest.scala
@@ -0,0 +1,75 @@
+//package org.apache.spark
+package org.apache.predictionio.workflow
+
+import _root_.io.netty.util.internal.logging.{Slf4JLoggerFactory, InternalLoggerFactory}
+import org.scalatest.BeforeAndAfterAll
+import org.scalatest.BeforeAndAfterEach
+import org.scalatest.Suite
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkConf
+
+
+/** Manages a local `sc` {@link SparkContext} variable, correctly stopping it
+  * after each test. */
+trait LocalSparkContext 
+extends BeforeAndAfterEach with BeforeAndAfterAll { self: Suite =>
+
+  @transient var sc: SparkContext = _
+
+  override def beforeAll() {
+    InternalLoggerFactory.setDefaultFactory(new Slf4JLoggerFactory())
+    super.beforeAll()
+  }
+
+  override def afterEach() {
+    resetSparkContext()
+    super.afterEach()
+  }
+
+  def resetSparkContext() = {
+    LocalSparkContext.stop(sc)
+    sc = null
+  }
+
+}
+
+object LocalSparkContext {
+  def stop(sc: SparkContext) {
+    if (sc != null) {
+      sc.stop()
+    }
+    // To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
+    System.clearProperty("spark.driver.port")
+  }
+
+  /** Runs `f` by passing in `sc` and ensures that `sc` is stopped. */
+  def withSpark[T](sc: SparkContext)(f: SparkContext => T) = {
+    try {
+      f(sc)
+    } finally {
+      stop(sc)
+    }
+  }
+
+}
+/** Shares a local `SparkContext` between all tests in a suite and closes it at the end */
+trait SharedSparkContext extends BeforeAndAfterAll { self: Suite =>
+
+  @transient private var _sc: SparkContext = _
+
+  def sc: SparkContext = _sc
+
+  var conf = new SparkConf(false)
+
+  override def beforeAll() {
+    _sc = new SparkContext("local[4]", "test", conf)
+    super.beforeAll()
+  }
+
+  override def afterAll() {
+    LocalSparkContext.stop(_sc)
+    _sc = null
+    super.afterAll()
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/org/apache/predictionio/workflow/EngineWorkflowTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/predictionio/workflow/EngineWorkflowTest.scala b/core/src/test/scala/org/apache/predictionio/workflow/EngineWorkflowTest.scala
new file mode 100644
index 0000000..e69de29



[06/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/storage/PEventsSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/storage/PEventsSpec.scala b/data/src/test/scala/org/apache/predictionio/data/storage/PEventsSpec.scala
new file mode 100644
index 0000000..93cbe6e
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/storage/PEventsSpec.scala
@@ -0,0 +1,210 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.specs2._
+import org.specs2.specification.Step
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+class PEventsSpec extends Specification with TestEvents {
+
+  System.clearProperty("spark.driver.port")
+  System.clearProperty("spark.hostPort")
+  val sc = new SparkContext("local[4]", "PEventAggregatorSpec test")
+
+  val appId = 1
+  val channelId = 6
+  val dbName = "test_pio_storage_events_" + hashCode
+
+  def hbLocal = Storage.getDataObject[LEvents](
+    StorageTestUtils.hbaseSourceName,
+    dbName
+  )
+
+  def hbPar = Storage.getDataObject[PEvents](
+    StorageTestUtils.hbaseSourceName,
+    dbName
+  )
+
+  def jdbcLocal = Storage.getDataObject[LEvents](
+    StorageTestUtils.jdbcSourceName,
+    dbName
+  )
+
+  def jdbcPar = Storage.getDataObject[PEvents](
+    StorageTestUtils.jdbcSourceName,
+    dbName
+  )
+
+  def stopSpark = {
+    sc.stop()
+  }
+
+  def is = s2"""
+
+  PredictionIO Storage PEvents Specification
+
+    PEvents can be implemented by:
+    - HBPEvents ${hbPEvents}
+    - JDBCPEvents ${jdbcPEvents}
+    - (stop Spark) ${Step(sc.stop())}
+
+  """
+
+  def hbPEvents = sequential ^ s2"""
+
+    HBPEvents should
+    - behave like any PEvents implementation ${events(hbLocal, hbPar)}
+    - (table cleanup) ${Step(StorageTestUtils.dropHBaseNamespace(dbName))}
+
+  """
+
+  def jdbcPEvents = sequential ^ s2"""
+
+    JDBCPEvents should
+    - behave like any PEvents implementation ${events(jdbcLocal, jdbcPar)}
+    - (table cleanup) ${Step(StorageTestUtils.dropJDBCTable(s"${dbName}_$appId"))}
+    - (table cleanup) ${Step(StorageTestUtils.dropJDBCTable(s"${dbName}_${appId}_$channelId"))}
+
+  """
+
+  def events(localEventClient: LEvents, parEventClient: PEvents) = sequential ^ s2"""
+
+    - (init test) ${initTest(localEventClient)}
+    - (insert test events) ${insertTestEvents(localEventClient)}
+    find in default ${find(parEventClient)}
+    find in channel ${findChannel(parEventClient)}
+    aggregate user properties in default ${aggregateUserProperties(parEventClient)}
+    aggregate user properties in channel ${aggregateUserPropertiesChannel(parEventClient)}
+    write to default ${write(parEventClient)}
+    write to channel ${writeChannel(parEventClient)}
+
+  """
+
+  /* setup */
+
+  // events from TestEvents trait
+  val listOfEvents = List(u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2, r1, r2)
+  val listOfEventsChannel = List(u3e1, u3e2, u3e3, r3, r4)
+
+  def initTest(localEventClient: LEvents) = {
+    localEventClient.init(appId)
+    localEventClient.init(appId, Some(channelId))
+  }
+
+  def insertTestEvents(localEventClient: LEvents) = {
+    listOfEvents.map( localEventClient.insert(_, appId) )
+    // insert to channel
+    listOfEventsChannel.map( localEventClient.insert(_, appId, Some(channelId)) )
+    success
+  }
+
+  /* following are tests */
+
+  def find(parEventClient: PEvents) = {
+    val resultRDD: RDD[Event] = parEventClient.find(
+      appId = appId
+    )(sc)
+
+    val results = resultRDD.collect.toList
+      .map {_.copy(eventId = None)} // ignore eventId
+
+    results must containTheSameElementsAs(listOfEvents)
+  }
+
+  def findChannel(parEventClient: PEvents) = {
+    val resultRDD: RDD[Event] = parEventClient.find(
+      appId = appId,
+      channelId = Some(channelId)
+    )(sc)
+
+    val results = resultRDD.collect.toList
+      .map {_.copy(eventId = None)} // ignore eventId
+
+    results must containTheSameElementsAs(listOfEventsChannel)
+  }
+
+  def aggregateUserProperties(parEventClient: PEvents) = {
+    val resultRDD: RDD[(String, PropertyMap)] = parEventClient.aggregateProperties(
+      appId = appId,
+      entityType = "user"
+    )(sc)
+    val result: Map[String, PropertyMap] = resultRDD.collectAsMap.toMap
+
+    val expected = Map(
+      "u1" -> PropertyMap(u1, u1BaseTime, u1LastTime),
+      "u2" -> PropertyMap(u2, u2BaseTime, u2LastTime)
+    )
+
+    result must beEqualTo(expected)
+  }
+
+  def aggregateUserPropertiesChannel(parEventClient: PEvents) = {
+    val resultRDD: RDD[(String, PropertyMap)] = parEventClient.aggregateProperties(
+      appId = appId,
+      channelId = Some(channelId),
+      entityType = "user"
+    )(sc)
+    val result: Map[String, PropertyMap] = resultRDD.collectAsMap.toMap
+
+    val expected = Map(
+      "u3" -> PropertyMap(u3, u3BaseTime, u3LastTime)
+    )
+
+    result must beEqualTo(expected)
+  }
+
+  def write(parEventClient: PEvents) = {
+    val written = List(r5, r6)
+    val writtenRDD = sc.parallelize(written)
+    parEventClient.write(writtenRDD, appId)(sc)
+
+    // read back
+    val resultRDD = parEventClient.find(
+      appId = appId
+    )(sc)
+
+    val results = resultRDD.collect.toList
+      .map { _.copy(eventId = None)} // ignore eventId
+
+    val expected = listOfEvents ++ written
+
+    results must containTheSameElementsAs(expected)
+  }
+
+  def writeChannel(parEventClient: PEvents) = {
+    val written = List(r1, r5, r6)
+    val writtenRDD = sc.parallelize(written)
+    parEventClient.write(writtenRDD, appId, Some(channelId))(sc)
+
+    // read back
+    val resultRDD = parEventClient.find(
+      appId = appId,
+      channelId = Some(channelId)
+    )(sc)
+
+    val results = resultRDD.collect.toList
+      .map { _.copy(eventId = None)} // ignore eventId
+
+    val expected = listOfEventsChannel ++ written
+
+    results must containTheSameElementsAs(expected)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/storage/StorageTestUtils.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/storage/StorageTestUtils.scala b/data/src/test/scala/org/apache/predictionio/data/storage/StorageTestUtils.scala
new file mode 100644
index 0000000..6068f4c
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/storage/StorageTestUtils.scala
@@ -0,0 +1,42 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.apache.predictionio.data.storage.hbase.HBLEvents
+import scalikejdbc._
+
+object StorageTestUtils {
+  val hbaseSourceName = "HBASE"
+  val jdbcSourceName = "PGSQL"
+
+  def dropHBaseNamespace(namespace: String): Unit = {
+    val eventDb = Storage.getDataObject[LEvents](hbaseSourceName, namespace)
+      .asInstanceOf[HBLEvents]
+    val admin = eventDb.client.admin
+    val tableNames = admin.listTableNamesByNamespace(namespace)
+    tableNames.foreach { name =>
+      admin.disableTable(name)
+      admin.deleteTable(name)
+    }
+
+    //Only empty namespaces (no tables) can be removed.
+    admin.deleteNamespace(namespace)
+  }
+
+  def dropJDBCTable(table: String): Unit = DB autoCommit { implicit s =>
+    SQL(s"drop table $table").execute().apply()
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/storage/TestEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/storage/TestEvents.scala b/data/src/test/scala/org/apache/predictionio/data/storage/TestEvents.scala
new file mode 100644
index 0000000..f1c327b
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/storage/TestEvents.scala
@@ -0,0 +1,263 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.joda.time.DateTime
+import org.joda.time.DateTimeZone
+
+trait TestEvents {
+
+  val u1BaseTime = new DateTime(654321)
+  val u2BaseTime = new DateTime(6543210)
+  val u3BaseTime = new DateTime(6543410)
+
+  // u1 events
+  val u1e1 = Event(
+    event = "$set",
+    entityType = "user",
+    entityId = "u1",
+    properties = DataMap(
+      """{
+        "a" : 1,
+        "b" : "value2",
+        "d" : [1, 2, 3],
+      }"""),
+    eventTime = u1BaseTime
+  )
+
+  val u1e2 = u1e1.copy(
+    event = "$set",
+    properties = DataMap("""{"a" : 2}"""),
+    eventTime = u1BaseTime.plusDays(1)
+  )
+
+  val u1e3 = u1e1.copy(
+    event = "$set",
+    properties = DataMap("""{"b" : "value4"}"""),
+    eventTime = u1BaseTime.plusDays(2)
+  )
+
+  val u1e4 = u1e1.copy(
+    event = "$unset",
+    properties = DataMap("""{"b" : null}"""),
+    eventTime = u1BaseTime.plusDays(3)
+  )
+
+  val u1e5 = u1e1.copy(
+    event = "$set",
+    properties = DataMap("""{"e" : "new"}"""),
+    eventTime = u1BaseTime.plusDays(4)
+  )
+
+  val u1LastTime = u1BaseTime.plusDays(4)
+  val u1 = """{"a": 2, "d": [1, 2, 3], "e": "new"}"""
+
+  // delete event for u1
+  val u1ed = u1e1.copy(
+    event = "$delete",
+    properties = DataMap(),
+    eventTime = u1BaseTime.plusDays(5)
+  )
+
+  // u2 events
+  val u2e1 = Event(
+    event = "$set",
+    entityType = "user",
+    entityId = "u2",
+    properties = DataMap(
+      """{
+        "a" : 21,
+        "b" : "value12",
+        "d" : [7, 5, 6],
+      }"""),
+    eventTime = u2BaseTime
+  )
+
+  val u2e2 = u2e1.copy(
+    event = "$unset",
+    properties = DataMap("""{"a" : null}"""),
+    eventTime = u2BaseTime.plusDays(1)
+  )
+
+  val u2e3 = u2e1.copy(
+    event = "$set",
+    properties = DataMap("""{"b" : "value9", "g": "new11"}"""),
+    eventTime = u2BaseTime.plusDays(2)
+  )
+
+  val u2LastTime = u2BaseTime.plusDays(2)
+  val u2 = """{"b": "value9", "d": [7, 5, 6], "g": "new11"}"""
+
+  // u3 events
+  val u3e1 = Event(
+    event = "$set",
+    entityType = "user",
+    entityId = "u3",
+    properties = DataMap(
+      """{
+        "a" : 22,
+        "b" : "value13",
+        "d" : [5, 6, 1],
+      }"""),
+    eventTime = u3BaseTime
+  )
+
+  val u3e2 = u3e1.copy(
+    event = "$unset",
+    properties = DataMap("""{"a" : null}"""),
+    eventTime = u3BaseTime.plusDays(1)
+  )
+
+  val u3e3 = u3e1.copy(
+    event = "$set",
+    properties = DataMap("""{"b" : "value10", "f": "new12", "d" : [1, 3, 2]}"""),
+    eventTime = u3BaseTime.plusDays(2)
+  )
+
+  val u3LastTime = u3BaseTime.plusDays(2)
+  val u3 = """{"b": "value10", "d": [1, 3, 2], "f": "new12"}"""
+
+  // some random events
+  val r1 = Event(
+    event = "my_event",
+    entityType = "my_entity_type",
+    entityId = "my_entity_id",
+    targetEntityType = Some("my_target_entity_type"),
+    targetEntityId = Some("my_target_entity_id"),
+    properties = DataMap(
+      """{
+        "prop1" : 1,
+        "prop2" : "value2",
+        "prop3" : [1, 2, 3],
+        "prop4" : true,
+        "prop5" : ["a", "b", "c"],
+        "prop6" : 4.56
+      }"""
+    ),
+    eventTime = DateTime.now,
+    prId = Some("my_prid")
+  )
+  val r2 = Event(
+    event = "my_event2",
+    entityType = "my_entity_type2",
+    entityId = "my_entity_id2"
+  )
+  val r3 = Event(
+    event = "my_event3",
+    entityType = "my_entity_type",
+    entityId = "my_entity_id",
+    targetEntityType = Some("my_target_entity_type"),
+    targetEntityId = Some("my_target_entity_id"),
+    properties = DataMap(
+      """{
+        "propA" : 1.2345,
+        "propB" : "valueB",
+      }"""
+    ),
+    prId = Some("my_prid")
+  )
+  val r4 = Event(
+    event = "my_event4",
+    entityType = "my_entity_type4",
+    entityId = "my_entity_id4",
+    targetEntityType = Some("my_target_entity_type4"),
+    targetEntityId = Some("my_target_entity_id4"),
+    properties = DataMap(
+      """{
+        "prop1" : 1,
+        "prop2" : "value2",
+        "prop3" : [1, 2, 3],
+        "prop4" : true,
+        "prop5" : ["a", "b", "c"],
+        "prop6" : 4.56
+      }"""),
+    eventTime = DateTime.now
+  )
+  val r5 = Event(
+    event = "my_event5",
+    entityType = "my_entity_type5",
+    entityId = "my_entity_id5",
+    targetEntityType = Some("my_target_entity_type5"),
+    targetEntityId = Some("my_target_entity_id5"),
+    properties = DataMap(
+      """{
+        "prop1" : 1,
+        "prop2" : "value2",
+        "prop3" : [1, 2, 3],
+        "prop4" : true,
+        "prop5" : ["a", "b", "c"],
+        "prop6" : 4.56
+      }"""
+    ),
+    eventTime = DateTime.now
+  )
+  val r6 = Event(
+    event = "my_event6",
+    entityType = "my_entity_type6",
+    entityId = "my_entity_id6",
+    targetEntityType = Some("my_target_entity_type6"),
+    targetEntityId = Some("my_target_entity_id6"),
+    properties = DataMap(
+      """{
+        "prop1" : 6,
+        "prop2" : "value2",
+        "prop3" : [6, 7, 8],
+        "prop4" : true,
+        "prop5" : ["a", "b", "c"],
+        "prop6" : 4.56
+      }"""
+    ),
+    eventTime = DateTime.now
+  )
+
+  // timezone
+  val tz1 = Event(
+    event = "my_event",
+    entityType = "my_entity_type",
+    entityId = "my_entity_id0",
+    targetEntityType = Some("my_target_entity_type"),
+    targetEntityId = Some("my_target_entity_id"),
+    properties = DataMap(
+      """{
+        "prop1" : 1,
+        "prop2" : "value2",
+        "prop3" : [1, 2, 3],
+        "prop4" : true,
+        "prop5" : ["a", "b", "c"],
+        "prop6" : 4.56
+      }"""
+    ),
+    eventTime = new DateTime(12345678, DateTimeZone.forID("-08:00")),
+    prId = Some("my_prid")
+  )
+
+  val tz2 = Event(
+    event = "my_event",
+    entityType = "my_entity_type",
+    entityId = "my_entity_id1",
+    eventTime = new DateTime(12345678, DateTimeZone.forID("+02:00")),
+    prId = Some("my_prid")
+  )
+
+  val tz3 = Event(
+    event = "my_event",
+    entityType = "my_entity_type",
+    entityId = "my_entity_id2",
+    eventTime = new DateTime(12345678, DateTimeZone.forID("+08:00")),
+    prId = Some("my_prid")
+  )
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/webhooks/ConnectorTestUtil.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/webhooks/ConnectorTestUtil.scala b/data/src/test/scala/org/apache/predictionio/data/webhooks/ConnectorTestUtil.scala
new file mode 100644
index 0000000..0998c52
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/webhooks/ConnectorTestUtil.scala
@@ -0,0 +1,47 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.webhooks
+
+import org.specs2.execute.Result
+import org.specs2.mutable._
+
+import org.json4s.JObject
+import org.json4s.DefaultFormats
+import org.json4s.native.JsonMethods.parse
+import org.json4s.native.Serialization.write
+
+/** TestUtil for JsonConnector */
+trait ConnectorTestUtil extends Specification {
+
+  implicit val formats = DefaultFormats
+
+  def check(connector: JsonConnector, original: String, event: String): Result = {
+    val originalJson = parse(original).asInstanceOf[JObject]
+    val eventJson = parse(event).asInstanceOf[JObject]
+    // write and parse back to discard any JNothing field
+    val result = parse(write(connector.toEventJson(originalJson))).asInstanceOf[JObject]
+    result.obj must containTheSameElementsAs(eventJson.obj)
+  }
+
+  def check(connector: FormConnector, original: Map[String, String], event: String) = {
+
+    val eventJson = parse(event).asInstanceOf[JObject]
+    // write and parse back to discard any JNothing field
+    val result = parse(write(connector.toEventJson(original))).asInstanceOf[JObject]
+
+    result.obj must containTheSameElementsAs(eventJson.obj)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/webhooks/exampleform/ExampleFormConnectorSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/webhooks/exampleform/ExampleFormConnectorSpec.scala b/data/src/test/scala/org/apache/predictionio/data/webhooks/exampleform/ExampleFormConnectorSpec.scala
new file mode 100644
index 0000000..d99e2ca
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/webhooks/exampleform/ExampleFormConnectorSpec.scala
@@ -0,0 +1,164 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.webhooks.exampleform
+
+import org.apache.predictionio.data.webhooks.ConnectorTestUtil
+
+import org.specs2.mutable._
+
+/** Test the ExampleFormConnector */
+class ExampleFormConnectorSpec extends Specification with ConnectorTestUtil {
+
+  "ExampleFormConnector" should {
+
+    "convert userAction to Event JSON" in {
+      // webhooks input
+      val userAction = Map(
+        "type" -> "userAction",
+        "userId" -> "as34smg4",
+        "event" -> "do_something",
+        "context[ip]" -> "24.5.68.47", // optional
+        "context[prop1]" -> "2.345", // optional
+        "context[prop2]" -> "value1", // optional
+        "anotherProperty1" -> "100",
+        "anotherProperty2"-> "optional1", // optional
+        "timestamp" -> "2015-01-02T00:30:12.984Z"
+      )
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "properties": {
+            "context": {
+              "ip": "24.5.68.47",
+              "prop1": 2.345
+              "prop2": "value1"
+            },
+            "anotherProperty1": 100,
+            "anotherProperty2": "optional1"
+          }
+          "eventTime": "2015-01-02T00:30:12.984Z"
+        }
+      """
+
+      check(ExampleFormConnector, userAction, expected)
+    }
+
+    "convert userAction without optional fields to Event JSON" in {
+      // webhooks input
+      val userAction = Map(
+        "type" -> "userAction",
+        "userId" -> "as34smg4",
+        "event" -> "do_something",
+        "anotherProperty1" -> "100",
+        "timestamp" -> "2015-01-02T00:30:12.984Z"
+      )
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "properties": {
+            "anotherProperty1": 100,
+          }
+          "eventTime": "2015-01-02T00:30:12.984Z"
+        }
+      """
+
+      check(ExampleFormConnector, userAction, expected)
+    }
+
+    "convert userActionItem to Event JSON" in {
+      // webhooks input
+      val userActionItem = Map(
+        "type" -> "userActionItem",
+        "userId" -> "as34smg4",
+        "event" -> "do_something_on",
+        "itemId" -> "kfjd312bc",
+        "context[ip]" -> "1.23.4.56",
+        "context[prop1]" -> "2.345",
+        "context[prop2]" -> "value1",
+        "anotherPropertyA" -> "4.567", // optional
+        "anotherPropertyB" -> "false", // optional
+        "timestamp" -> "2015-01-15T04:20:23.567Z"
+      )
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something_on",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "targetEntityType": "item",
+          "targetEntityId": "kfjd312bc"
+          "properties": {
+            "context": {
+              "ip": "1.23.4.56",
+              "prop1": 2.345
+              "prop2": "value1"
+            },
+            "anotherPropertyA": 4.567
+            "anotherPropertyB": false
+          }
+          "eventTime": "2015-01-15T04:20:23.567Z"
+        }
+      """
+
+      check(ExampleFormConnector, userActionItem, expected)
+    }
+
+    "convert userActionItem without optional fields to Event JSON" in {
+      // webhooks input
+      val userActionItem = Map(
+        "type" -> "userActionItem",
+        "userId" -> "as34smg4",
+        "event" -> "do_something_on",
+        "itemId" -> "kfjd312bc",
+        "context[ip]" -> "1.23.4.56",
+        "context[prop1]" -> "2.345",
+        "context[prop2]" -> "value1",
+        "timestamp" -> "2015-01-15T04:20:23.567Z"
+      )
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something_on",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "targetEntityType": "item",
+          "targetEntityId": "kfjd312bc"
+          "properties": {
+            "context": {
+              "ip": "1.23.4.56",
+              "prop1": 2.345
+              "prop2": "value1"
+            }
+          }
+          "eventTime": "2015-01-15T04:20:23.567Z"
+        }
+      """
+
+      check(ExampleFormConnector, userActionItem, expected)
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/webhooks/examplejson/ExampleJsonConnectorSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/webhooks/examplejson/ExampleJsonConnectorSpec.scala b/data/src/test/scala/org/apache/predictionio/data/webhooks/examplejson/ExampleJsonConnectorSpec.scala
new file mode 100644
index 0000000..069d52e
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/webhooks/examplejson/ExampleJsonConnectorSpec.scala
@@ -0,0 +1,179 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.webhooks.examplejson
+
+import org.apache.predictionio.data.webhooks.ConnectorTestUtil
+
+import org.specs2.mutable._
+
+/** Test the ExampleJsonConnector */
+class ExampleJsonConnectorSpec extends Specification with ConnectorTestUtil {
+
+  "ExampleJsonConnector" should {
+
+    "convert userAction to Event JSON" in {
+      // webhooks input
+      val userAction = """
+        {
+          "type": "userAction"
+          "userId": "as34smg4",
+          "event": "do_something",
+          "context": {
+            "ip": "24.5.68.47",
+            "prop1": 2.345
+            "prop2": "value1"
+          },
+          "anotherProperty1": 100,
+          "anotherProperty2": "optional1",
+          "timestamp": "2015-01-02T00:30:12.984Z"
+        }
+      """
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "properties": {
+            "context": {
+              "ip": "24.5.68.47",
+              "prop1": 2.345
+              "prop2": "value1"
+            },
+            "anotherProperty1": 100,
+            "anotherProperty2": "optional1"
+          }
+          "eventTime": "2015-01-02T00:30:12.984Z"
+        }
+      """
+
+      check(ExampleJsonConnector, userAction, expected)
+    }
+
+    "convert userAction without optional field to Event JSON" in {
+      // webhooks input
+      val userAction = """
+        {
+          "type": "userAction"
+          "userId": "as34smg4",
+          "event": "do_something",
+          "anotherProperty1": 100,
+          "timestamp": "2015-01-02T00:30:12.984Z"
+        }
+      """
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "properties": {
+            "anotherProperty1": 100,
+          }
+          "eventTime": "2015-01-02T00:30:12.984Z"
+        }
+      """
+
+      check(ExampleJsonConnector, userAction, expected)
+    }
+
+    "convert userActionItem to Event JSON" in {
+      // webhooks input
+      val userActionItem = """
+        {
+          "type": "userActionItem"
+          "userId": "as34smg4",
+          "event": "do_something_on",
+          "itemId": "kfjd312bc",
+          "context": {
+            "ip": "1.23.4.56",
+            "prop1": 2.345
+            "prop2": "value1"
+          },
+          "anotherPropertyA": 4.567
+          "anotherPropertyB": false
+          "timestamp": "2015-01-15T04:20:23.567Z"
+      }
+      """
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something_on",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "targetEntityType": "item",
+          "targetEntityId": "kfjd312bc"
+          "properties": {
+            "context": {
+              "ip": "1.23.4.56",
+              "prop1": 2.345
+              "prop2": "value1"
+            },
+            "anotherPropertyA": 4.567
+            "anotherPropertyB": false
+          }
+          "eventTime": "2015-01-15T04:20:23.567Z"
+        }
+      """
+
+      check(ExampleJsonConnector, userActionItem, expected)
+    }
+
+    "convert userActionItem without optional fields to Event JSON" in {
+      // webhooks input
+      val userActionItem = """
+        {
+          "type": "userActionItem"
+          "userId": "as34smg4",
+          "event": "do_something_on",
+          "itemId": "kfjd312bc",
+          "context": {
+            "ip": "1.23.4.56",
+            "prop1": 2.345
+            "prop2": "value1"
+          }
+          "timestamp": "2015-01-15T04:20:23.567Z"
+      }
+      """
+
+      // expected converted Event JSON
+      val expected = """
+        {
+          "event": "do_something_on",
+          "entityType": "user",
+          "entityId": "as34smg4",
+          "targetEntityType": "item",
+          "targetEntityId": "kfjd312bc"
+          "properties": {
+            "context": {
+              "ip": "1.23.4.56",
+              "prop1": 2.345
+              "prop2": "value1"
+            }
+          }
+          "eventTime": "2015-01-15T04:20:23.567Z"
+        }
+      """
+
+      check(ExampleJsonConnector, userActionItem, expected)
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnectorSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnectorSpec.scala b/data/src/test/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnectorSpec.scala
new file mode 100644
index 0000000..854c9dd
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnectorSpec.scala
@@ -0,0 +1,254 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.webhooks.mailchimp
+
+import org.apache.predictionio.data.webhooks.ConnectorTestUtil
+
+import org.specs2.mutable._
+
+class MailChimpConnectorSpec extends Specification with ConnectorTestUtil {
+
+  // TOOD: test other events
+  // TODO: test different optional fields
+
+  "MailChimpConnector" should {
+
+    "convert subscribe to event JSON" in {
+
+      val subscribe = Map(
+        "type" -> "subscribe",
+        "fired_at" -> "2009-03-26 21:35:57",
+        "data[id]" -> "8a25ff1d98",
+        "data[list_id]" -> "a6b5da1054",
+        "data[email]" -> "api@mailchimp.com",
+        "data[email_type]" -> "html",
+        "data[merges][EMAIL]" -> "api@mailchimp.com",
+        "data[merges][FNAME]" -> "MailChimp",
+        "data[merges][LNAME]" -> "API",
+        "data[merges][INTERESTS]" -> "Group1,Group2", //optional
+        "data[ip_opt]" -> "10.20.10.30",
+        "data[ip_signup]" -> "10.20.10.30"
+      )
+
+      val expected = """
+        {
+          "event" : "subscribe",
+          "entityType" : "user",
+          "entityId" : "8a25ff1d98",
+          "targetEntityType" : "list",
+          "targetEntityId" : "a6b5da1054",
+          "properties" : {
+            "email" : "api@mailchimp.com",
+            "email_type" : "html",
+            "merges" : {
+              "EMAIL" : "api@mailchimp.com",
+              "FNAME" : "MailChimp",
+              "LNAME" : "API"
+              "INTERESTS" : "Group1,Group2"
+            },
+            "ip_opt" : "10.20.10.30",
+            "ip_signup" : "10.20.10.30"
+          },
+          "eventTime" : "2009-03-26T21:35:57.000Z"
+        }
+      """
+
+      check(MailChimpConnector, subscribe, expected)
+    }
+
+    //check unsubscribe to event Json
+    "convert unsubscribe to event JSON" in {
+
+      val unsubscribe = Map(
+        "type" -> "unsubscribe",
+        "fired_at" -> "2009-03-26 21:40:57",
+        "data[action]" -> "unsub",
+        "data[reason]" -> "manual",
+        "data[id]" -> "8a25ff1d98",
+        "data[list_id]" -> "a6b5da1054",
+        "data[email]" -> "api+unsub@mailchimp.com",
+        "data[email_type]" -> "html",
+        "data[merges][EMAIL]" -> "api+unsub@mailchimp.com",
+        "data[merges][FNAME]" -> "MailChimp",
+        "data[merges][LNAME]" -> "API",
+        "data[merges][INTERESTS]" -> "Group1,Group2", //optional 
+        "data[ip_opt]" -> "10.20.10.30",
+        "data[campaign_id]" -> "cb398d21d2"
+      )
+
+      val expected = """
+        {
+          "event" : "unsubscribe",
+          "entityType" : "user",
+          "entityId" : "8a25ff1d98",
+          "targetEntityType" : "list",
+          "targetEntityId" : "a6b5da1054",
+          "properties" : {
+            "action" : "unsub",
+            "reason" : "manual",
+            "email" : "api+unsub@mailchimp.com",
+            "email_type" : "html",
+            "merges" : {
+              "EMAIL" : "api+unsub@mailchimp.com",
+              "FNAME" : "MailChimp",
+              "LNAME" : "API"
+              "INTERESTS" : "Group1,Group2"
+            },
+            "ip_opt" : "10.20.10.30",
+            "campaign_id" : "cb398d21d2"
+          },
+          "eventTime" : "2009-03-26T21:40:57.000Z"
+        }
+      """
+
+      check(MailChimpConnector, unsubscribe, expected)
+    }
+
+    //check profile update to event Json 
+    "convert profile update to event JSON" in {
+
+      val profileUpdate = Map(
+        "type" -> "profile",
+        "fired_at" -> "2009-03-26 21:31:21",
+        "data[id]" -> "8a25ff1d98",
+        "data[list_id]" -> "a6b5da1054",
+        "data[email]" -> "api@mailchimp.com",
+        "data[email_type]" -> "html",
+        "data[merges][EMAIL]" -> "api@mailchimp.com",
+        "data[merges][FNAME]" -> "MailChimp",
+        "data[merges][LNAME]" -> "API",
+        "data[merges][INTERESTS]" -> "Group1,Group2", //optional
+        "data[ip_opt]" -> "10.20.10.30"
+      )
+
+      val expected = """
+        {
+          "event" : "profile",
+          "entityType" : "user",
+          "entityId" : "8a25ff1d98",
+          "targetEntityType" : "list",
+          "targetEntityId" : "a6b5da1054",
+          "properties" : {
+            "email" : "api@mailchimp.com",
+            "email_type" : "html",
+            "merges" : {
+              "EMAIL" : "api@mailchimp.com",
+              "FNAME" : "MailChimp",
+              "LNAME" : "API"
+              "INTERESTS" : "Group1,Group2"
+            },
+            "ip_opt" : "10.20.10.30"
+          },
+          "eventTime" : "2009-03-26T21:31:21.000Z"
+        }
+      """
+
+      check(MailChimpConnector, profileUpdate, expected)
+    }
+
+    //check email update to event Json 
+    "convert email update to event JSON" in {
+
+      val emailUpdate = Map(
+        "type" -> "upemail",
+        "fired_at" -> "2009-03-26 22:15:09",
+        "data[list_id]" -> "a6b5da1054",
+        "data[new_id]" -> "51da8c3259",
+        "data[new_email]" -> "api+new@mailchimp.com",
+        "data[old_email]" -> "api+old@mailchimp.com"
+      )
+
+      val expected = """
+        {
+          "event" : "upemail",
+          "entityType" : "user",
+          "entityId" : "51da8c3259",
+          "targetEntityType" : "list",
+          "targetEntityId" : "a6b5da1054",
+          "properties" : {
+            "new_email" : "api+new@mailchimp.com",
+            "old_email" : "api+old@mailchimp.com"
+          },
+          "eventTime" : "2009-03-26T22:15:09.000Z"
+        }
+      """
+
+      check(MailChimpConnector, emailUpdate, expected)
+    }
+
+    //check cleaned email to event Json 
+    "convert cleaned email to event JSON" in {
+
+      val cleanedEmail = Map(
+        "type" -> "cleaned",
+        "fired_at" -> "2009-03-26 22:01:00",
+        "data[list_id]" -> "a6b5da1054",
+        "data[campaign_id]" -> "4fjk2ma9xd",
+        "data[reason]" -> "hard",
+        "data[email]" -> "api+cleaned@mailchimp.com"
+      )
+
+      val expected = """
+        {
+          "event" : "cleaned",
+          "entityType" : "list",
+          "entityId" : "a6b5da1054",
+          "properties" : {
+            "campaignId" : "4fjk2ma9xd",
+            "reason" : "hard",
+            "email" : "api+cleaned@mailchimp.com"
+          },
+          "eventTime" : "2009-03-26T22:01:00.000Z"
+        }
+      """
+
+      check(MailChimpConnector, cleanedEmail, expected)
+    }
+
+    //check campaign sending status to event Json 
+    "convert campaign sending status to event JSON" in {
+
+      val campaign = Map(
+        "type" -> "campaign",
+        "fired_at" -> "2009-03-26 22:15:09",
+        "data[id]" -> "5aa2102003",
+        "data[subject]" -> "Test Campaign Subject",
+        "data[status]" -> "sent",
+        "data[reason]" -> "",
+        "data[list_id]" -> "a6b5da1054"
+      )
+
+      val expected = """
+        {
+          "event" : "campaign",
+          "entityType" : "campaign",
+          "entityId" : "5aa2102003",
+          "targetEntityType" : "list",
+          "targetEntityId" : "a6b5da1054",
+          "properties" : {
+            "subject" : "Test Campaign Subject",
+            "status" : "sent",
+            "reason" : ""
+          },
+          "eventTime" : "2009-03-26T22:15:09.000Z"
+        }
+      """
+
+      check(MailChimpConnector, campaign, expected)
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/webhooks/segmentio/SegmentIOConnectorSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/webhooks/segmentio/SegmentIOConnectorSpec.scala b/data/src/test/scala/org/apache/predictionio/data/webhooks/segmentio/SegmentIOConnectorSpec.scala
new file mode 100644
index 0000000..de92ecd
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/webhooks/segmentio/SegmentIOConnectorSpec.scala
@@ -0,0 +1,335 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.webhooks.segmentio
+
+import org.apache.predictionio.data.webhooks.ConnectorTestUtil
+
+import org.specs2.mutable._
+
+class SegmentIOConnectorSpec extends Specification with ConnectorTestUtil {
+
+  // TODO: test different optional fields
+
+  val commonFields =
+    s"""
+       |  "anonymous_id": "id",
+       |  "sent_at": "sendAt",
+       |  "version": "2",
+     """.stripMargin
+
+  "SegmentIOConnector" should {
+
+    "convert group with context to event JSON" in {
+      val context =
+        """
+          |  "context": {
+          |    "app": {
+          |      "name": "InitechGlobal",
+          |      "version": "545",
+          |      "build": "3.0.1.545"
+          |    },
+          |    "campaign": {
+          |      "name": "TPS Innovation Newsletter",
+          |      "source": "Newsletter",
+          |      "medium": "email",
+          |      "term": "tps reports",
+          |      "content": "image link"
+          |    },
+          |    "device": {
+          |      "id": "B5372DB0-C21E-11E4-8DFC-AA07A5B093DB",
+          |      "advertising_id": "7A3CBEA0-BDF5-11E4-8DFC-AA07A5B093DB",
+          |      "ad_tracking_enabled": true,
+          |      "manufacturer": "Apple",
+          |      "model": "iPhone7,2",
+          |      "name": "maguro",
+          |      "type": "ios",
+          |      "token": "ff15bc0c20c4aa6cd50854ff165fd265c838e5405bfeb9571066395b8c9da449"
+          |    },
+          |    "ip": "8.8.8.8",
+          |    "library": {
+          |      "name": "analytics-ios",
+          |      "version": "1.8.0"
+          |    },
+          |    "network": {
+          |      "bluetooth": false,
+          |      "carrier": "T-Mobile NL",
+          |      "cellular": true,
+          |      "wifi": false
+          |    },
+          |    "location": {
+          |      "city": "San Francisco",
+          |      "country": "United States",
+          |      "latitude": 40.2964197,
+          |      "longitude": -76.9411617,
+          |      "speed": 0
+          |    },
+          |    "os": {
+          |      "name": "iPhone OS",
+          |      "version": "8.1.3"
+          |    },
+          |    "referrer": {
+          |      "id": "ABCD582CDEFFFF01919",
+          |      "type": "dataxu"
+          |    },
+          |    "screen": {
+          |      "width": 320,
+          |      "height": 568,
+          |      "density": 2
+          |    },
+          |    "timezone": "Europe/Amsterdam",
+          |    "user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5)"
+          |  }
+        """.stripMargin
+
+      val group =
+        s"""
+           |{ $commonFields
+            |  "type": "group",
+            |  "group_id": "groupId",
+            |  "user_id": "userIdValue",
+            |  "timestamp" : "2012-12-02T00:30:08.276Z",
+            |  "traits": {
+            |    "name": "groupName",
+            |    "employees": 329,
+            |  },
+            |  $context
+            |}
+        """.stripMargin
+
+      val expected =
+        s"""
+          |{
+          |  "event": "group",
+          |  "entityType": "user",
+          |  "entityId": "userIdValue",
+          |  "properties": {
+          |    $context,
+          |    "group_id": "groupId",
+          |    "traits": {
+          |      "name": "groupName",
+          |      "employees": 329
+          |    },
+          |  },
+          |  "eventTime" : "2012-12-02T00:30:08.276Z"
+          |}
+        """.stripMargin
+
+      check(SegmentIOConnector, group, expected)
+    }
+
+    "convert group to event JSON" in {
+      val group =
+        s"""
+          |{ $commonFields
+          |  "type": "group",
+          |  "group_id": "groupId",
+          |  "user_id": "userIdValue",
+          |  "timestamp" : "2012-12-02T00:30:08.276Z",
+          |  "traits": {
+          |    "name": "groupName",
+          |    "employees": 329,
+          |  }
+          |}
+        """.stripMargin
+
+      val expected =
+        """
+          |{
+          |  "event": "group",
+          |  "entityType": "user",
+          |  "entityId": "userIdValue",
+          |  "properties": {
+          |    "group_id": "groupId",
+          |    "traits": {
+          |      "name": "groupName",
+          |      "employees": 329
+          |    }
+          |  },
+          |  "eventTime" : "2012-12-02T00:30:08.276Z"
+          |}
+        """.stripMargin
+
+      check(SegmentIOConnector, group, expected)
+    }
+
+    "convert screen to event JSON" in {
+      val screen =
+        s"""
+          |{ $commonFields
+          |  "type": "screen",
+          |  "name": "screenName",
+          |  "user_id": "userIdValue",
+          |  "timestamp" : "2012-12-02T00:30:08.276Z",
+          |  "properties": {
+          |    "variation": "screenVariation"
+          |  }
+          |}
+        """.stripMargin
+
+      val expected =
+        """
+          |{
+          |  "event": "screen",
+          |  "entityType": "user",
+          |  "entityId": "userIdValue",
+          |  "properties": {
+          |    "properties": {
+          |      "variation": "screenVariation"
+          |    },
+          |    "name": "screenName"
+          |  },
+          |  "eventTime" : "2012-12-02T00:30:08.276Z"
+          |}
+        """.stripMargin
+
+      check(SegmentIOConnector, screen, expected)
+    }
+
+    "convert page to event JSON" in {
+      val page =
+       s"""
+          |{ $commonFields
+          |  "type": "page",
+          |  "name": "pageName",
+          |  "user_id": "userIdValue",
+          |  "timestamp" : "2012-12-02T00:30:08.276Z",
+          |  "properties": {
+          |    "title": "pageTitle",
+          |    "url": "pageUrl"
+          |  }
+          |}
+        """.stripMargin
+
+      val expected =
+        """
+          |{
+          |  "event": "page",
+          |  "entityType": "user",
+          |  "entityId": "userIdValue",
+          |  "properties": {
+          |    "properties": {
+          |      "title": "pageTitle",
+          |      "url": "pageUrl"
+          |    },
+          |    "name": "pageName"
+          |  },
+          |  "eventTime" : "2012-12-02T00:30:08.276Z"
+          |}
+        """.stripMargin
+
+      check(SegmentIOConnector, page, expected)
+    }
+
+    "convert alias to event JSON" in {
+      val alias =
+        s"""
+          |{ $commonFields
+          |  "type": "alias",
+          |  "previous_id": "previousIdValue",
+          |  "user_id": "userIdValue",
+          |  "timestamp" : "2012-12-02T00:30:08.276Z"
+          |}
+        """.stripMargin
+
+      val expected =
+        """
+          |{
+          |  "event": "alias",
+          |  "entityType": "user",
+          |  "entityId": "userIdValue",
+          |  "properties": {
+          |    "previous_id" : "previousIdValue"
+          |  },
+          |  "eventTime" : "2012-12-02T00:30:08.276Z"
+          |}
+        """.stripMargin
+
+      check(SegmentIOConnector, alias, expected)
+    }
+
+    "convert track to event JSON" in {
+      val track =
+       s"""
+          |{ $commonFields
+          |  "user_id": "some_user_id",
+          |  "type": "track",
+          |  "event": "Registered",
+          |  "timestamp" : "2012-12-02T00:30:08.276Z",
+          |  "properties": {
+          |    "plan": "Pro Annual",
+          |    "accountType" : "Facebook"
+          |  }
+          |}
+        """.stripMargin
+
+      val expected =
+        """
+          |{
+          |  "event": "track",
+          |  "entityType": "user",
+          |  "entityId": "some_user_id",
+          |  "properties": {
+          |    "event": "Registered",
+          |    "properties": {
+          |      "plan": "Pro Annual",
+          |      "accountType": "Facebook"
+          |    }
+          |  },
+          |  "eventTime" : "2012-12-02T00:30:08.276Z"
+          |}
+        """.stripMargin
+
+      check(SegmentIOConnector, track, expected)
+    }
+
+    "convert identify to event JSON" in {
+      val identify = s"""
+        { $commonFields
+          "type"      : "identify",
+          "user_id"    : "019mr8mf4r",
+          "traits"    : {
+              "email"            : "achilles@segment.com",
+              "name"             : "Achilles",
+              "subscription_plan" : "Premium",
+              "friendCount"      : 29
+          },
+          "timestamp" : "2012-12-02T00:30:08.276Z"
+        }
+      """
+
+      val expected = """
+        {
+          "event" : "identify",
+          "entityType": "user",
+          "entityId" : "019mr8mf4r",
+          "properties" : {
+            "traits" : {
+              "email"            : "achilles@segment.com",
+              "name"             : "Achilles",
+              "subscription_plan" : "Premium",
+              "friendCount"      : 29
+            }
+          },
+          "eventTime" : "2012-12-02T00:30:08.276Z"
+        }
+      """
+
+      check(SegmentIOConnector, identify, expected)
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/main/scala/io/prediction/e2/engine/BinaryVectorizer.scala
----------------------------------------------------------------------
diff --git a/e2/src/main/scala/io/prediction/e2/engine/BinaryVectorizer.scala b/e2/src/main/scala/io/prediction/e2/engine/BinaryVectorizer.scala
deleted file mode 100644
index 6c0d5d3..0000000
--- a/e2/src/main/scala/io/prediction/e2/engine/BinaryVectorizer.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package io.prediction.e2.engine
-
-import org.apache.spark.rdd.RDD
-import org.apache.spark.SparkContext._
-import org.apache.spark.mllib.linalg.Vectors
-import org.apache.spark.mllib.linalg.Vector
-import scala.collection.immutable.HashMap
-import scala.collection.immutable.HashSet
-
-class BinaryVectorizer(propertyMap : HashMap[(String, String), Int])
-extends Serializable {
-
-  val properties: Array[(String, String)] = propertyMap.toArray.sortBy(_._2).map(_._1)
-  val numFeatures = propertyMap.size
-
-  override def toString: String = {
-    s"BinaryVectorizer($numFeatures): " + properties.map(e => s"(${e._1}, ${e._2})").mkString(",")
-  }
-
-  def toBinary(map :  Array[(String, String)]) : Vector = {
-    val mapArr : Seq[(Int, Double)] = map.flatMap(
-      e => propertyMap.get(e).map(idx => (idx, 1.0))
-    )
-
-    Vectors.sparse(numFeatures, mapArr)
-  }
-}
-
-
-object BinaryVectorizer {
-  def apply (input : RDD[HashMap[String, String]], properties : HashSet[String])
-  : BinaryVectorizer = {
-    new BinaryVectorizer(HashMap(
-      input.flatMap(identity)
-        .filter(e => properties.contains(e._1))
-        .distinct
-        .collect
-        .zipWithIndex : _*
-    ))
-  }
-
-  def apply(input: Seq[(String, String)]): BinaryVectorizer = {
-    val indexed: Seq[((String, String), Int)] = input.zipWithIndex
-    new BinaryVectorizer(HashMap(indexed:_*))
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/main/scala/io/prediction/e2/engine/CategoricalNaiveBayes.scala
----------------------------------------------------------------------
diff --git a/e2/src/main/scala/io/prediction/e2/engine/CategoricalNaiveBayes.scala b/e2/src/main/scala/io/prediction/e2/engine/CategoricalNaiveBayes.scala
deleted file mode 100644
index c598519..0000000
--- a/e2/src/main/scala/io/prediction/e2/engine/CategoricalNaiveBayes.scala
+++ /dev/null
@@ -1,176 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package io.prediction.e2.engine
-
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-/**
- * Class for training a naive Bayes model with categorical variables
- */
-object CategoricalNaiveBayes {
-  /**
-   * Train with data points and return the model
-   *
-   * @param points training data points
-   */
-  def train(points: RDD[LabeledPoint]): CategoricalNaiveBayesModel = {
-    val labelCountFeatureLikelihoods = points.map { p =>
-      (p.label, p.features)
-    }.combineByKey[(Long, Array[Map[String, Long]])](
-        createCombiner =
-          (features: Array[String]) => {
-            val featureCounts = features.map { feature =>
-              Map[String, Long]().withDefaultValue(0L).updated(feature, 1L)
-            }
-
-            (1L, featureCounts)
-          },
-        mergeValue =
-          (c: (Long, Array[Map[String, Long]]), features: Array[String]) => {
-            (c._1 + 1L, c._2.zip(features).map { case (m, feature) =>
-              m.updated(feature, m(feature) + 1L)
-            })
-          },
-        mergeCombiners =
-          (
-            c1: (Long, Array[Map[String, Long]]),
-            c2: (Long, Array[Map[String, Long]])) => {
-            val labelCount1 = c1._1
-            val labelCount2 = c2._1
-            val featureCounts1 = c1._2
-            val featureCounts2 = c2._2
-
-            (labelCount1 + labelCount2,
-              featureCounts1.zip(featureCounts2).map { case (m1, m2) =>
-                m2 ++ m2.map { case (k, v) => k -> (v + m2(k))}
-              })
-          }
-      ).mapValues { case (labelCount, featureCounts) =>
-      val featureLikelihoods = featureCounts.map { featureCount =>
-        // mapValues does not return a serializable map
-        featureCount.mapValues(count => math.log(count.toDouble / labelCount))
-          .map(identity)
-      }
-
-      (labelCount, featureLikelihoods)
-    }.collect().toMap
-
-    val noOfPoints = labelCountFeatureLikelihoods.map(_._2._1).sum
-    val priors =
-      labelCountFeatureLikelihoods.mapValues { countFeatureLikelihoods =>
-        math.log(countFeatureLikelihoods._1 / noOfPoints.toDouble)
-      }
-    val likelihoods = labelCountFeatureLikelihoods.mapValues(_._2)
-
-    CategoricalNaiveBayesModel(priors, likelihoods)
-  }
-}
-
-/**
- * Model for naive Bayes classifiers with categorical variables.
- *
- * @param priors log prior probabilities
- * @param likelihoods log likelihood probabilities
- */
-case class CategoricalNaiveBayesModel(
-  priors: Map[String, Double],
-  likelihoods: Map[String, Array[Map[String, Double]]]) extends Serializable {
-
-  val featureCount = likelihoods.head._2.size
-
-  /**
-   * Calculate the log score of having the given features and label
-   *
-   * @param point label and features
-   * @param defaultLikelihood a function that calculates the likelihood when a
-   *                          feature value is not present. The input to the
-   *                          function is the other feature value likelihoods.
-   * @return log score when label is present. None otherwise.
-   */
-  def logScore(
-    point: LabeledPoint,
-    defaultLikelihood: (Seq[Double]) => Double = ls => Double.NegativeInfinity
-    ): Option[Double] = {
-    val label = point.label
-    val features = point.features
-
-    if (!priors.contains(label)) {
-      None
-    } else {
-      Some(logScoreInternal(label, features, defaultLikelihood))
-    }
-  }
-
-  private def logScoreInternal(
-    label: String,
-    features: Array[String],
-    defaultLikelihood: (Seq[Double]) => Double = ls => Double.NegativeInfinity
-    ): Double = {
-
-    val prior = priors(label)
-    val likelihood = likelihoods(label)
-
-    val likelihoodScores = features.zip(likelihood).map {
-      case (feature, featureLikelihoods) =>
-        featureLikelihoods.getOrElse(
-          feature,
-          defaultLikelihood(featureLikelihoods.values.toSeq)
-        )
-    }
-
-    prior + likelihoodScores.sum
-  }
-
-  /**
-   * Return the label that yields the highest score
-   *
-   * @param features features for classification
-   *
-   */
-  def predict(features: Array[String]): String = {
-    priors.keySet.map { label =>
-      (label, logScoreInternal(label, features))
-    }.toSeq
-      .sortBy(_._2)(Ordering.Double.reverse)
-      .take(1)
-      .head
-      ._1
-  }
-}
-
-/**
- * Class that represents the features and labels of a data point.
- *
- * @param label Label of this data point
- * @param features Features of this data point
- */
-case class LabeledPoint(label: String, features: Array[String]) {
-  override def toString: String = {
-    val featuresString = features.mkString("[", ",", "]")
-
-    s"($label, $featuresString)"
-  }
-
-  override def equals(other: Any): Boolean = other match {
-    case that: LabeledPoint => that.toString == this.toString
-    case _ => false
-  }
-
-  override def hashCode(): Int = {
-    this.toString.hashCode
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/main/scala/io/prediction/e2/engine/MarkovChain.scala
----------------------------------------------------------------------
diff --git a/e2/src/main/scala/io/prediction/e2/engine/MarkovChain.scala b/e2/src/main/scala/io/prediction/e2/engine/MarkovChain.scala
deleted file mode 100644
index 4c992f5..0000000
--- a/e2/src/main/scala/io/prediction/e2/engine/MarkovChain.scala
+++ /dev/null
@@ -1,89 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package io.prediction.e2.engine
-
-import org.apache.spark.SparkContext._
-import org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
-import org.apache.spark.mllib.linalg.{SparseVector, Vectors}
-import org.apache.spark.rdd.RDD
-
-/**
- * Class for training a Markov Chain model
- */
-object MarkovChain {
-  /**
-   * Train a Markov Chain model
-   *
-   * @param matrix Tally of all state transitions
-   * @param topN Use the top-N tally for each state
-   */
-  def train(matrix: CoordinateMatrix, topN: Int): MarkovChainModel = {
-    val noOfStates = matrix.numCols().toInt
-    val transitionVectors = matrix.entries
-      .keyBy(_.i.toInt)
-      .groupByKey()
-      .mapValues { rowEntries =>
-      val total = rowEntries.map(_.value).sum
-      val sortedTopN = rowEntries.toSeq
-        .sortBy(_.value)(Ordering.Double.reverse)
-        .take(topN)
-        .map(me => (me.j.toInt, me.value / total))
-        .sortBy(_._1)
-
-      new SparseVector(
-        noOfStates,
-        sortedTopN.map(_._1).toArray,
-        sortedTopN.map(_._2).toArray)
-    }
-
-    new MarkovChainModel(
-      transitionVectors,
-      topN)
-  }
-}
-
-/**
- * Markov Chain model
- *
- * @param transitionVectors transition vectors
- * @param n top N used to construct the model
- */
-case class MarkovChainModel(
-  transitionVectors: RDD[(Int, SparseVector)],
-  n: Int) {
-
-  /**
-   * Calculate the probabilities of the next state
-   *
-   * @param currentState probabilities of the current state
-   */
-  def predict(currentState: Seq[Double]): Seq[Double] = {
-    // multiply the input with transition matrix row by row
-    val nextStateVectors = transitionVectors.map { case (rowIndex, vector) =>
-        val values = vector.indices.map { index =>
-          vector(index) * currentState(rowIndex)
-        }
-
-        Vectors.sparse(currentState.size, vector.indices, values)
-    }.collect()
-
-    // sum up to get the total probabilities
-    (0 until currentState.size).map { index =>
-      nextStateVectors.map { vector =>
-        vector(index)
-      }.sum
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/main/scala/io/prediction/e2/evaluation/CrossValidation.scala
----------------------------------------------------------------------
diff --git a/e2/src/main/scala/io/prediction/e2/evaluation/CrossValidation.scala b/e2/src/main/scala/io/prediction/e2/evaluation/CrossValidation.scala
deleted file mode 100644
index 8b482bd..0000000
--- a/e2/src/main/scala/io/prediction/e2/evaluation/CrossValidation.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package io.prediction.e2.evaluation
-
-import scala.reflect.ClassTag
-import org.apache.spark.rdd.RDD
-
-/** Common helper functions */
-object CommonHelperFunctions {
-
-  /** Split a data set into evalK folds for crossvalidation.
-    * Apply to data sets supplied to evaluation.
-    *
-    * @tparam D Data point class.
-    * @tparam TD Training data class.
-    * @tparam EI Evaluation Info class.
-    * @tparam Q Input query class.
-    * @tparam A Actual value class.
-    */
-
-  def splitData[D: ClassTag, TD, EI, Q, A](
-
-     evalK: Int,
-     dataset: RDD[D],
-     evaluatorInfo: EI,
-     trainingDataCreator: RDD[D] => TD,
-     queryCreator: D => Q,
-     actualCreator: D => A): Seq[(TD, EI, RDD[(Q, A)])] = {
-
-    val indexedPoints = dataset.zipWithIndex
-
-    def selectPoint(foldIdx: Int, pt: D, idx: Long, k: Int, isTraining: Boolean): Option[D] = {
-      if ((idx % k == foldIdx) ^ isTraining) Some(pt)
-      else None
-    }
-
-    (0 until evalK).map { foldIdx =>
-      val trainingPoints = indexedPoints.flatMap { case(pt, idx) =>
-        selectPoint(foldIdx, pt, idx, evalK, true)
-      }
-      val testingPoints = indexedPoints.flatMap { case(pt, idx) =>
-        selectPoint(foldIdx, pt, idx, evalK, false)
-      }
-
-      (
-        trainingDataCreator(trainingPoints),
-        evaluatorInfo,
-        testingPoints.map { d => (queryCreator(d), actualCreator(d)) }
-      )
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/main/scala/io/prediction/e2/package.scala
----------------------------------------------------------------------
diff --git a/e2/src/main/scala/io/prediction/e2/package.scala b/e2/src/main/scala/io/prediction/e2/package.scala
deleted file mode 100644
index 9f5491a..0000000
--- a/e2/src/main/scala/io/prediction/e2/package.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.e2
-
-/** Collection of engine libraries that have no dependency on PredictionIO */
-package object engine {}
-
-/** Collection of evaluation libraries that have no dependency on PredictionIO */
-package object evaluation {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/main/scala/io/prediction/package.scala
----------------------------------------------------------------------
diff --git a/e2/src/main/scala/io/prediction/package.scala b/e2/src/main/scala/io/prediction/package.scala
deleted file mode 100644
index 9628b5d..0000000
--- a/e2/src/main/scala/io/prediction/package.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction
-
-/** Independent library of code that is useful for engine development and
-  * evaluation
-  */
-package object e2 {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/main/scala/org/apache/predictionio/e2/engine/BinaryVectorizer.scala
----------------------------------------------------------------------
diff --git a/e2/src/main/scala/org/apache/predictionio/e2/engine/BinaryVectorizer.scala b/e2/src/main/scala/org/apache/predictionio/e2/engine/BinaryVectorizer.scala
new file mode 100644
index 0000000..d831718
--- /dev/null
+++ b/e2/src/main/scala/org/apache/predictionio/e2/engine/BinaryVectorizer.scala
@@ -0,0 +1,61 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+package org.apache.predictionio.e2.engine
+
+import org.apache.spark.rdd.RDD
+import org.apache.spark.SparkContext._
+import org.apache.spark.mllib.linalg.Vectors
+import org.apache.spark.mllib.linalg.Vector
+import scala.collection.immutable.HashMap
+import scala.collection.immutable.HashSet
+
+class BinaryVectorizer(propertyMap : HashMap[(String, String), Int])
+extends Serializable {
+
+  val properties: Array[(String, String)] = propertyMap.toArray.sortBy(_._2).map(_._1)
+  val numFeatures = propertyMap.size
+
+  override def toString: String = {
+    s"BinaryVectorizer($numFeatures): " + properties.map(e => s"(${e._1}, ${e._2})").mkString(",")
+  }
+
+  def toBinary(map :  Array[(String, String)]) : Vector = {
+    val mapArr : Seq[(Int, Double)] = map.flatMap(
+      e => propertyMap.get(e).map(idx => (idx, 1.0))
+    )
+
+    Vectors.sparse(numFeatures, mapArr)
+  }
+}
+
+
+object BinaryVectorizer {
+  def apply (input : RDD[HashMap[String, String]], properties : HashSet[String])
+  : BinaryVectorizer = {
+    new BinaryVectorizer(HashMap(
+      input.flatMap(identity)
+        .filter(e => properties.contains(e._1))
+        .distinct
+        .collect
+        .zipWithIndex : _*
+    ))
+  }
+
+  def apply(input: Seq[(String, String)]): BinaryVectorizer = {
+    val indexed: Seq[((String, String), Int)] = input.zipWithIndex
+    new BinaryVectorizer(HashMap(indexed:_*))
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/main/scala/org/apache/predictionio/e2/engine/CategoricalNaiveBayes.scala
----------------------------------------------------------------------
diff --git a/e2/src/main/scala/org/apache/predictionio/e2/engine/CategoricalNaiveBayes.scala b/e2/src/main/scala/org/apache/predictionio/e2/engine/CategoricalNaiveBayes.scala
new file mode 100644
index 0000000..7944bbc
--- /dev/null
+++ b/e2/src/main/scala/org/apache/predictionio/e2/engine/CategoricalNaiveBayes.scala
@@ -0,0 +1,176 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+package org.apache.predictionio.e2.engine
+
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+/**
+ * Class for training a naive Bayes model with categorical variables
+ */
+object CategoricalNaiveBayes {
+  /**
+   * Train with data points and return the model
+   *
+   * @param points training data points
+   */
+  def train(points: RDD[LabeledPoint]): CategoricalNaiveBayesModel = {
+    val labelCountFeatureLikelihoods = points.map { p =>
+      (p.label, p.features)
+    }.combineByKey[(Long, Array[Map[String, Long]])](
+        createCombiner =
+          (features: Array[String]) => {
+            val featureCounts = features.map { feature =>
+              Map[String, Long]().withDefaultValue(0L).updated(feature, 1L)
+            }
+
+            (1L, featureCounts)
+          },
+        mergeValue =
+          (c: (Long, Array[Map[String, Long]]), features: Array[String]) => {
+            (c._1 + 1L, c._2.zip(features).map { case (m, feature) =>
+              m.updated(feature, m(feature) + 1L)
+            })
+          },
+        mergeCombiners =
+          (
+            c1: (Long, Array[Map[String, Long]]),
+            c2: (Long, Array[Map[String, Long]])) => {
+            val labelCount1 = c1._1
+            val labelCount2 = c2._1
+            val featureCounts1 = c1._2
+            val featureCounts2 = c2._2
+
+            (labelCount1 + labelCount2,
+              featureCounts1.zip(featureCounts2).map { case (m1, m2) =>
+                m2 ++ m2.map { case (k, v) => k -> (v + m2(k))}
+              })
+          }
+      ).mapValues { case (labelCount, featureCounts) =>
+      val featureLikelihoods = featureCounts.map { featureCount =>
+        // mapValues does not return a serializable map
+        featureCount.mapValues(count => math.log(count.toDouble / labelCount))
+          .map(identity)
+      }
+
+      (labelCount, featureLikelihoods)
+    }.collect().toMap
+
+    val noOfPoints = labelCountFeatureLikelihoods.map(_._2._1).sum
+    val priors =
+      labelCountFeatureLikelihoods.mapValues { countFeatureLikelihoods =>
+        math.log(countFeatureLikelihoods._1 / noOfPoints.toDouble)
+      }
+    val likelihoods = labelCountFeatureLikelihoods.mapValues(_._2)
+
+    CategoricalNaiveBayesModel(priors, likelihoods)
+  }
+}
+
+/**
+ * Model for naive Bayes classifiers with categorical variables.
+ *
+ * @param priors log prior probabilities
+ * @param likelihoods log likelihood probabilities
+ */
+case class CategoricalNaiveBayesModel(
+  priors: Map[String, Double],
+  likelihoods: Map[String, Array[Map[String, Double]]]) extends Serializable {
+
+  val featureCount = likelihoods.head._2.size
+
+  /**
+   * Calculate the log score of having the given features and label
+   *
+   * @param point label and features
+   * @param defaultLikelihood a function that calculates the likelihood when a
+   *                          feature value is not present. The input to the
+   *                          function is the other feature value likelihoods.
+   * @return log score when label is present. None otherwise.
+   */
+  def logScore(
+    point: LabeledPoint,
+    defaultLikelihood: (Seq[Double]) => Double = ls => Double.NegativeInfinity
+    ): Option[Double] = {
+    val label = point.label
+    val features = point.features
+
+    if (!priors.contains(label)) {
+      None
+    } else {
+      Some(logScoreInternal(label, features, defaultLikelihood))
+    }
+  }
+
+  private def logScoreInternal(
+    label: String,
+    features: Array[String],
+    defaultLikelihood: (Seq[Double]) => Double = ls => Double.NegativeInfinity
+    ): Double = {
+
+    val prior = priors(label)
+    val likelihood = likelihoods(label)
+
+    val likelihoodScores = features.zip(likelihood).map {
+      case (feature, featureLikelihoods) =>
+        featureLikelihoods.getOrElse(
+          feature,
+          defaultLikelihood(featureLikelihoods.values.toSeq)
+        )
+    }
+
+    prior + likelihoodScores.sum
+  }
+
+  /**
+   * Return the label that yields the highest score
+   *
+   * @param features features for classification
+   *
+   */
+  def predict(features: Array[String]): String = {
+    priors.keySet.map { label =>
+      (label, logScoreInternal(label, features))
+    }.toSeq
+      .sortBy(_._2)(Ordering.Double.reverse)
+      .take(1)
+      .head
+      ._1
+  }
+}
+
+/**
+ * Class that represents the features and labels of a data point.
+ *
+ * @param label Label of this data point
+ * @param features Features of this data point
+ */
+case class LabeledPoint(label: String, features: Array[String]) {
+  override def toString: String = {
+    val featuresString = features.mkString("[", ",", "]")
+
+    s"($label, $featuresString)"
+  }
+
+  override def equals(other: Any): Boolean = other match {
+    case that: LabeledPoint => that.toString == this.toString
+    case _ => false
+  }
+
+  override def hashCode(): Int = {
+    this.toString.hashCode
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/main/scala/org/apache/predictionio/e2/engine/MarkovChain.scala
----------------------------------------------------------------------
diff --git a/e2/src/main/scala/org/apache/predictionio/e2/engine/MarkovChain.scala b/e2/src/main/scala/org/apache/predictionio/e2/engine/MarkovChain.scala
new file mode 100644
index 0000000..41a070d
--- /dev/null
+++ b/e2/src/main/scala/org/apache/predictionio/e2/engine/MarkovChain.scala
@@ -0,0 +1,89 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+package org.apache.predictionio.e2.engine
+
+import org.apache.spark.SparkContext._
+import org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
+import org.apache.spark.mllib.linalg.{SparseVector, Vectors}
+import org.apache.spark.rdd.RDD
+
+/**
+ * Class for training a Markov Chain model
+ */
+object MarkovChain {
+  /**
+   * Train a Markov Chain model
+   *
+   * @param matrix Tally of all state transitions
+   * @param topN Use the top-N tally for each state
+   */
+  def train(matrix: CoordinateMatrix, topN: Int): MarkovChainModel = {
+    val noOfStates = matrix.numCols().toInt
+    val transitionVectors = matrix.entries
+      .keyBy(_.i.toInt)
+      .groupByKey()
+      .mapValues { rowEntries =>
+      val total = rowEntries.map(_.value).sum
+      val sortedTopN = rowEntries.toSeq
+        .sortBy(_.value)(Ordering.Double.reverse)
+        .take(topN)
+        .map(me => (me.j.toInt, me.value / total))
+        .sortBy(_._1)
+
+      new SparseVector(
+        noOfStates,
+        sortedTopN.map(_._1).toArray,
+        sortedTopN.map(_._2).toArray)
+    }
+
+    new MarkovChainModel(
+      transitionVectors,
+      topN)
+  }
+}
+
+/**
+ * Markov Chain model
+ *
+ * @param transitionVectors transition vectors
+ * @param n top N used to construct the model
+ */
+case class MarkovChainModel(
+  transitionVectors: RDD[(Int, SparseVector)],
+  n: Int) {
+
+  /**
+   * Calculate the probabilities of the next state
+   *
+   * @param currentState probabilities of the current state
+   */
+  def predict(currentState: Seq[Double]): Seq[Double] = {
+    // multiply the input with transition matrix row by row
+    val nextStateVectors = transitionVectors.map { case (rowIndex, vector) =>
+        val values = vector.indices.map { index =>
+          vector(index) * currentState(rowIndex)
+        }
+
+        Vectors.sparse(currentState.size, vector.indices, values)
+    }.collect()
+
+    // sum up to get the total probabilities
+    (0 until currentState.size).map { index =>
+      nextStateVectors.map { vector =>
+        vector(index)
+      }.sum
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/main/scala/org/apache/predictionio/e2/evaluation/CrossValidation.scala
----------------------------------------------------------------------
diff --git a/e2/src/main/scala/org/apache/predictionio/e2/evaluation/CrossValidation.scala b/e2/src/main/scala/org/apache/predictionio/e2/evaluation/CrossValidation.scala
new file mode 100644
index 0000000..d2e1d6a
--- /dev/null
+++ b/e2/src/main/scala/org/apache/predictionio/e2/evaluation/CrossValidation.scala
@@ -0,0 +1,64 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+package org.apache.predictionio.e2.evaluation
+
+import scala.reflect.ClassTag
+import org.apache.spark.rdd.RDD
+
+/** Common helper functions */
+object CommonHelperFunctions {
+
+  /** Split a data set into evalK folds for crossvalidation.
+    * Apply to data sets supplied to evaluation.
+    *
+    * @tparam D Data point class.
+    * @tparam TD Training data class.
+    * @tparam EI Evaluation Info class.
+    * @tparam Q Input query class.
+    * @tparam A Actual value class.
+    */
+
+  def splitData[D: ClassTag, TD, EI, Q, A](
+
+     evalK: Int,
+     dataset: RDD[D],
+     evaluatorInfo: EI,
+     trainingDataCreator: RDD[D] => TD,
+     queryCreator: D => Q,
+     actualCreator: D => A): Seq[(TD, EI, RDD[(Q, A)])] = {
+
+    val indexedPoints = dataset.zipWithIndex
+
+    def selectPoint(foldIdx: Int, pt: D, idx: Long, k: Int, isTraining: Boolean): Option[D] = {
+      if ((idx % k == foldIdx) ^ isTraining) Some(pt)
+      else None
+    }
+
+    (0 until evalK).map { foldIdx =>
+      val trainingPoints = indexedPoints.flatMap { case(pt, idx) =>
+        selectPoint(foldIdx, pt, idx, evalK, true)
+      }
+      val testingPoints = indexedPoints.flatMap { case(pt, idx) =>
+        selectPoint(foldIdx, pt, idx, evalK, false)
+      }
+
+      (
+        trainingDataCreator(trainingPoints),
+        evaluatorInfo,
+        testingPoints.map { d => (queryCreator(d), actualCreator(d)) }
+      )
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/main/scala/org/apache/predictionio/e2/package.scala
----------------------------------------------------------------------
diff --git a/e2/src/main/scala/org/apache/predictionio/e2/package.scala b/e2/src/main/scala/org/apache/predictionio/e2/package.scala
new file mode 100644
index 0000000..c16e521
--- /dev/null
+++ b/e2/src/main/scala/org/apache/predictionio/e2/package.scala
@@ -0,0 +1,22 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.e2
+
+/** Collection of engine libraries that have no dependency on PredictionIO */
+package object engine {}
+
+/** Collection of evaluation libraries that have no dependency on PredictionIO */
+package object evaluation {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/main/scala/org/apache/predictionio/package.scala
----------------------------------------------------------------------
diff --git a/e2/src/main/scala/org/apache/predictionio/package.scala b/e2/src/main/scala/org/apache/predictionio/package.scala
new file mode 100644
index 0000000..b480779
--- /dev/null
+++ b/e2/src/main/scala/org/apache/predictionio/package.scala
@@ -0,0 +1,21 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio
+
+/** Independent library of code that is useful for engine development and
+  * evaluation
+  */
+package object e2 {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/io/prediction/e2/engine/BinaryVectorizerTest.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/io/prediction/e2/engine/BinaryVectorizerTest.scala b/e2/src/test/scala/io/prediction/e2/engine/BinaryVectorizerTest.scala
deleted file mode 100644
index 5e6bc16..0000000
--- a/e2/src/test/scala/io/prediction/e2/engine/BinaryVectorizerTest.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.e2.engine
-
-import io.prediction.e2.fixture.BinaryVectorizerFixture
-import io.prediction.e2.fixture.SharedSparkContext
-import org.apache.spark.mllib.linalg.Vectors
-import org.apache.spark.rdd.RDD
-import org.scalatest.FlatSpec
-import org.scalatest.Matchers
-import scala.collection.immutable.HashMap
-
-
-import scala.language.reflectiveCalls
-
-class BinaryVectorizerTest extends FlatSpec with Matchers with SharedSparkContext
-with BinaryVectorizerFixture{
-
-  "toBinary" should "produce the following summed values:" in {
-    val testCase = BinaryVectorizer(sc.parallelize(base.maps), base.properties)
-    val vectorTwoA = testCase.toBinary(testArrays.twoA)
-    val vectorTwoB = testCase.toBinary(testArrays.twoB)
-
-
-    // Make sure vectors produced are the same size.
-    vectorTwoA.size should be (vectorTwoB.size)
-
-    // // Test case for checking food value not listed in base.maps.
-    testCase.toBinary(testArrays.one).toArray.sum should be (1.0)
-
-    // Test cases for making sure indices are preserved.
-    val sumOne = vecSum(vectorTwoA, vectorTwoB)
-
-    exactly (1, sumOne) should be (2.0)
-    exactly (2,sumOne) should be (0.0)
-    exactly (2, sumOne) should be (1.0)
-
-    val sumTwo = vecSum(Vectors.dense(sumOne), testCase.toBinary(testArrays.twoC))
-
-    exactly (3, sumTwo) should be (1.0)
-  }
-
-}


[33/34] incubator-predictionio git commit: also change docs

Posted by do...@apache.org.
also change docs


Project: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/commit/ed9d62d7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/tree/ed9d62d7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/diff/ed9d62d7

Branch: refs/heads/develop
Commit: ed9d62d7894979412b11c258a28d5ca28f357e88
Parents: 64280b1
Author: Xusen Yin <yi...@gmail.com>
Authored: Thu Jul 7 23:50:02 2016 -0700
Committer: Xusen Yin <yi...@gmail.com>
Committed: Thu Jul 7 23:50:02 2016 -0700

----------------------------------------------------------------------
 docs/javadoc/javadoc-overview.html              |  8 +++----
 .../manual/obsolete/cookbook/evaluation.html.md |  4 ++--
 .../cookbook/filteringunavailable.html.md       |  6 ++---
 docs/manual/obsolete/engines/index.html.md      |  4 ++--
 .../obsolete/engines/itemrank/index.html.md     | 10 ++++----
 .../obsolete/engines/itemrec/index.html.md      |  2 +-
 .../obsolete/engines/itemsim/index.html.md      |  2 +-
 .../enginebuilders/local-helloworld.html.md     |  6 ++---
 .../stepbystep/combiningalgorithms.html.md      | 10 ++++----
 .../stepbystep/dataalgorithm.html.md            | 12 +++++-----
 .../stepbystep/evaluation.html.md               | 12 +++++-----
 .../stepbystep/testcomponents.html.md           |  4 ++--
 .../tutorials/engines/quickstart.html.md        | 22 +++++++++---------
 .../tutorials/recommendation/movielens.html.md  |  6 ++---
 .../tutorials/recommendation/yelp.html.md       | 10 ++++----
 .../source/community/contribute-webhook.html.md | 24 ++++++++++----------
 docs/manual/source/customize/dase.html.md.erb   | 14 ++++++------
 .../source/customize/troubleshooting.html.md    | 14 ++++++------
 .../source/datacollection/eventapi.html.md      |  2 +-
 .../datacollection/eventmodel.html.md.erb       | 10 ++++----
 .../source/demo/textclassification.html.md      |  6 ++---
 .../dimensionalityreduction.html.md             |  6 ++---
 .../shared/install/_dependent_services.html.erb |  4 ++--
 docs/manual/source/resources/faq.html.md        |  4 ++--
 .../source/resources/intellij.html.md.erb       |  6 ++---
 docs/manual/source/resources/upgrade.html.md    | 20 ++++++++--------
 docs/manual/source/sdk/java.html.md.erb         |  2 +-
 docs/manual/source/sdk/php.html.md.erb          |  2 +-
 .../source/system/anotherdatastore.html.md      |  4 ++--
 .../classification/quickstart.html.md.erb       |  6 ++---
 .../quickstart.html.md.erb                      |  6 ++---
 .../quickstart.html.md.erb                      |  6 ++---
 .../quickstart.html.md.erb                      |  6 ++---
 .../leadscoring/quickstart.html.md.erb          |  6 ++---
 .../productranking/quickstart.html.md.erb       |  6 ++---
 .../recommendation/batch-evaluator.html.md      | 24 ++++++++++----------
 .../recommendation/customize-data-prep.html.md  |  2 +-
 .../recommendation/customize-serving.html.md    |  4 ++--
 .../recommendation/quickstart.html.md.erb       |  6 ++---
 .../multi-events-multi-algos.html.md.erb        |  2 +-
 .../similarproduct/quickstart.html.md.erb       |  6 ++---
 .../templates/vanilla/quickstart.html.md.erb    |  2 +-
 docs/scaladoc/rootdoc.txt                       |  4 ++--
 43 files changed, 161 insertions(+), 161 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/javadoc/javadoc-overview.html
----------------------------------------------------------------------
diff --git a/docs/javadoc/javadoc-overview.html b/docs/javadoc/javadoc-overview.html
index d93a61c..8561f1e 100644
--- a/docs/javadoc/javadoc-overview.html
+++ b/docs/javadoc/javadoc-overview.html
@@ -5,13 +5,13 @@
   <p>
     If you are building a prediction engine, the most interesting package would
     be <a
-    href="{@docRoot}/io/prediction/controller/java/package-summary.html">io.prediction.controller.java</a>
+    href="{@docRoot}/org.apache.predictionio/controller/java/package-summary.html">org.apache.predictionio.controller.java</a>
     and <a
-    href="{@docRoot}/io/prediction/data/store/java/package-summary.html">io.prediction.data.store.java</a>
+    href="{@docRoot}/org.apache.predictionio/data/store/java/package-summary.html">org.apache.predictionio.data.store.java</a>
     You may also want to look at <a
-    href="{@docRoot}/io/prediction/controller/package-summary.html">io.prediction.controller</a>,
+    href="{@docRoot}/org.apache.predictionio/controller/package-summary.html">org.apache.predictionio.controller</a>,
     as some functionality, such as custom model persistence {@link
-    io.prediction.controller.PersistentModel}, are provided directly by that
+    org.apache.predictionio.controller.PersistentModel}, are provided directly by that
     package.
   </p>
 </body>

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/obsolete/cookbook/evaluation.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/obsolete/cookbook/evaluation.html.md b/docs/manual/obsolete/cookbook/evaluation.html.md
index 683a449..66b4914 100644
--- a/docs/manual/obsolete/cookbook/evaluation.html.md
+++ b/docs/manual/obsolete/cookbook/evaluation.html.md
@@ -300,7 +300,7 @@ up and running.
 
 ```
 $ cd $PIO_HOME/examples/scala-local-movielens-evaluation
-$ $PIO_HOME/bin/pio run io.prediction.examples.mlc.Evaluation1 -- \
+$ $PIO_HOME/bin/pio run org.apache.predictionio.examples.mlc.Evaluation1 -- \
   --master spark://`hostname`:7077
 ...
 2014-10-07 19:09:47,136 INFO  workflow.CoreWorkflow$ - Saved engine instance with ID: nfUVwwgMQOqgpb5QEUdAGg
@@ -331,7 +331,7 @@ the evaluation to 12 periods, each period last 1 week. `Evaluation2` perform
 this task. We can run it with the following command:
 
 ```
-$ $PIO_HOME/bin/pio run io.prediction.examples.mlc.Evaluation2 -- \
+$ $PIO_HOME/bin/pio run org.apache.predictionio.examples.mlc.Evaluation2 -- \
   --master spark://`hostname`:7077
 ```
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/obsolete/cookbook/filteringunavailable.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/obsolete/cookbook/filteringunavailable.html.md b/docs/manual/obsolete/cookbook/filteringunavailable.html.md
index 1ee0c6c..765d7c3 100644
--- a/docs/manual/obsolete/cookbook/filteringunavailable.html.md
+++ b/docs/manual/obsolete/cookbook/filteringunavailable.html.md
@@ -21,7 +21,7 @@ When a Query comes in, it is passed to the Algorithm components for making Predi
 Predictions into one, and returns it.
 
 The ItemRec Engine's component can be found it its static factory class
-`io.prediction.engines.itemrec.ItemRecEngine`. It looks like the following:
+`org.apache.predictionio.engines.itemrec.ItemRecEngine`. It looks like the following:
 
 ```scala
 object ItemRecEngine extends IEngineFactory {
@@ -75,7 +75,7 @@ You can create new engine project in any directory.
 $ $PIO_HOME/bin/pio new scala-local-movielens-filtering
 # Copy ItemRec Engine default settings to the same directory
 $ $PIO_HOME/bin/pio instance --directory-name scala-local-movielens-filtering \
-    io.prediction.engines.itemrec
+    org.apache.predictionio.engines.itemrec
 $ cd scala-local-movielens-filtering
 ```
 
@@ -145,7 +145,7 @@ This project depends on the builtin engines, hence in `build.sbt` under project
 ```scala
 libraryDependencies ++= Seq(
   ...
-  "io.prediction"    %% "engines"       % "0.8.2" % "provided",
+  "org.apache.predictionio"    %% "engines"       % "0.8.2" % "provided",
   ...
 ```
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/obsolete/engines/index.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/obsolete/engines/index.html.md b/docs/manual/obsolete/engines/index.html.md
index a055ad7..0cebfb7 100644
--- a/docs/manual/obsolete/engines/index.html.md
+++ b/docs/manual/obsolete/engines/index.html.md
@@ -18,8 +18,8 @@ based on the default Item Recommendation Engine. The new project will contain
 configuration for your engine instance.
 
 ```
-$ $PIO_HOME/bin/pio instance io.prediction.engines.itemrec
-$ cd io.prediction.engines.itemrec
+$ $PIO_HOME/bin/pio instance org.apache.predictionio.engines.itemrec
+$ cd org.apache.predictionio.engines.itemrec
 $ $PIO_HOME/bin/pio register
 ```
 where `$PIO_HOME` is your installation path of PredictionIO.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/obsolete/engines/itemrank/index.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/obsolete/engines/itemrank/index.html.md b/docs/manual/obsolete/engines/itemrank/index.html.md
index 96d872e..118751a 100644
--- a/docs/manual/obsolete/engines/itemrank/index.html.md
+++ b/docs/manual/obsolete/engines/itemrank/index.html.md
@@ -88,7 +88,7 @@ event_client.set_user('id_1',
   <div data-tab="Java SDK" data-lang="java">
 ```java
 import com.google.common.collect.ImmutableMap;
-import io.prediction.EventClient;
+import org.apache.predictionio.EventClient;
 import org.joda.time.DateTime;
 
 EventClient eventClient = new EventClient(1);
@@ -155,7 +155,7 @@ event_client.set_item('id_3',
 ```java
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
-import io.prediction.EventClient;
+import org.apache.predictionio.EventClient;
 import java.util.Map;
 import org.joda.time.DateTime;
 
@@ -223,7 +223,7 @@ event_client.record_user_action_on_item('view', 'id_1', 'id_3',
   <div data-tab="Java SDK" data-lang="java">
 ```java
 import com.google.common.collect.ImmutableMap;
-import io.prediction.EventClient;
+import org.apache.predictionio.EventClient;
 import org.joda.time.DateTime;
 
 eventClient.userActionItem("view", "id_1", "id_3", ImmutableMap.<String, Object>of(),
@@ -288,7 +288,7 @@ event_client.record_user_action_on_item('view', 'id_1', 'id_3',
   <div data-tab="Java SDK" data-lang="java">
 ```java
 import com.google.common.collect.ImmutableMap;
-import io.prediction.EventClient;
+import org.apache.predictionio.EventClient;
 import java.util.Map;
 import org.joda.time.DateTime;
 
@@ -442,7 +442,7 @@ puts predictions
 ```java
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
-import io.prediction.EngineClient;
+import org.apache.predictionio.EngineClient;
 import org.joda.time.DateTime;
 
 EngineClient engineClient = new EngineClient(apiURL);

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/obsolete/engines/itemrec/index.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/obsolete/engines/itemrec/index.html.md b/docs/manual/obsolete/engines/itemrec/index.html.md
index e30e548..3de6500 100644
--- a/docs/manual/obsolete/engines/itemrec/index.html.md
+++ b/docs/manual/obsolete/engines/itemrec/index.html.md
@@ -106,7 +106,7 @@ puts predictions
   <div data-tab="Java SDK" data-lang="java">
 ```java
 import com.google.common.collect.ImmutableMap;
-import io.prediction.EngineClient;
+import org.apache.predictionio.EngineClient;
 
 EngineClient engineClient = new EngineClient("http://localhost:9993");
 engineClient.sendQuery(ImmutableMap.<String, Object>of(

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/obsolete/engines/itemsim/index.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/obsolete/engines/itemsim/index.html.md b/docs/manual/obsolete/engines/itemsim/index.html.md
index 73595c0..ba41c48 100644
--- a/docs/manual/obsolete/engines/itemsim/index.html.md
+++ b/docs/manual/obsolete/engines/itemsim/index.html.md
@@ -104,7 +104,7 @@ puts predictions
 ```java
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
-import io.prediction.EngineClient;
+import org.apache.predictionio.EngineClient;
 
 EngineClient engineClient = new EngineClient("http://localhost:9997");
 engineClient.sendQuery(ImmutableMap.<String, Object>of(

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/obsolete/tutorials/enginebuilders/local-helloworld.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/obsolete/tutorials/enginebuilders/local-helloworld.html.md b/docs/manual/obsolete/tutorials/enginebuilders/local-helloworld.html.md
index afddd83..75a1f8e 100644
--- a/docs/manual/obsolete/tutorials/enginebuilders/local-helloworld.html.md
+++ b/docs/manual/obsolete/tutorials/enginebuilders/local-helloworld.html.md
@@ -250,7 +250,7 @@ Create a new file <code>src/main/java/MyDataSource.java</code>:
 ```java
 package myorg;
 
-import io.prediction.controller.java.*;
+import org.apache.predictionio.controller.java.*;
 
 import java.util.List;
 import java.util.ArrayList;
@@ -329,7 +329,7 @@ Create a new file <code>src/main/java/MyAlgorithm.java</code>:
 ```java
 package myorg;
 
-import io.prediction.controller.java.*;
+import org.apache.predictionio.controller.java.*;
 
 import java.util.Map;
 import java.util.HashMap;
@@ -402,7 +402,7 @@ Create a new file <code>src/main/java/MyEngineFactory.java</code>:
 ```java
 package myorg;
 
-import io.prediction.controller.java.*;
+import org.apache.predictionio.controller.java.*;
 
 public class MyEngineFactory implements IJavaEngineFactory {
   public JavaSimpleEngine<MyTrainingData, EmptyDataParams, MyQuery, MyPredictedResult,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/combiningalgorithms.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/combiningalgorithms.html.md b/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/combiningalgorithms.html.md
index d24fdd9..4a09a3a 100644
--- a/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/combiningalgorithms.html.md
+++ b/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/combiningalgorithms.html.md
@@ -74,7 +74,7 @@ This part is simliar to earlier tutorials.
 
 ```
 $ cd $PIO_HOME/examples/java-local-tutorial
-$ ../../bin/pio run io.prediction.examples.java.recommendations.tutorial4.Runner4a -- -- data/ml-100k/
+$ ../../bin/pio run org.apache.predictionio.examples.java.recommendations.tutorial4.Runner4a -- -- data/ml-100k/
 ```
 where `$PIO_HOME` is the root directory of the PredictionIO code tree.
 
@@ -128,7 +128,7 @@ And you can test it out with
 
 ```bash
 $ cd $PIO_HOME/examples/java-local-tutorial
-$ ../../bin/pio run io.prediction.examples.java.recommendations.tutorial4.Runner4b -- -- data/ml-100k/
+$ ../../bin/pio run org.apache.predictionio.examples.java.recommendations.tutorial4.Runner4b -- -- data/ml-100k/
 ```
 
 You should see
@@ -210,7 +210,7 @@ engine](https://github.com/PredictionIO/PredictionIO/tree/master/examples/java-l
 
 ```bash
 $ cd $PIO_HOME/examples/java-local-tutorial
-$ ../../bin/pio run io.prediction.examples.java.recommendations.tutorial4.Runner4c -- -- data/ml-100k/
+$ ../../bin/pio run org.apache.predictionio.examples.java.recommendations.tutorial4.Runner4c -- -- data/ml-100k/
 ```
 
 ## Deployment
@@ -374,10 +374,10 @@ for the engine, and register it with PredictionIO. Here's the content:
 
 ```json
 {
-  "id": "io.prediction.examples.java.recommendations.tutorial4.EngineFactory",
+  "id": "org.apache.predictionio.examples.java.recommendations.tutorial4.EngineFactory",
   "version": "0.8.2",
   "name": "FeatureBased Recommendations Engine",
-  "engineFactory": "io.prediction.examples.java.recommendations.tutorial4.EngineFactory"
+  "engineFactory": "org.apache.predictionio.examples.java.recommendations.tutorial4.EngineFactory"
 }
 ```
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/dataalgorithm.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/dataalgorithm.html.md b/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/dataalgorithm.html.md
index 4b65912..1e109ed 100644
--- a/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/dataalgorithm.html.md
+++ b/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/dataalgorithm.html.md
@@ -78,7 +78,7 @@ the path of file containing the rating data.
 Note that each controller component (*DataSource, Preparator, Algorithm, Serving
 and Metrics*) is restricted to having empty constructor or constructor which
 takes exactly one argument which must implement the
-`io.prediction.controller.java.JavaParams` interface.
+`org.apache.predictionio.controller.java.JavaParams` interface.
 
 We can define the DataSource parameter class as following (in
 `DataSourceParams.java`):
@@ -94,7 +94,7 @@ public class DataSourceParams implements JavaParams {
 ```
 
 The *DataSource* component must extend
-`io.prediction.controller.java.LJavaDataSource`:
+`org.apache.predictionio.controller.java.LJavaDataSource`:
 
 ```java
 public abstract class LJavaDataSource<DSP extends Params,DP,TD,Q,A>
@@ -185,7 +185,7 @@ public class Model implements Serializable {
 ```
 
 The *Algorithm* component must extend
-`io.prediction.controller.java.LJavaAlgorithm`.
+`org.apache.predictionio.controller.java.LJavaAlgorithm`.
 
 ```java
 public abstract class LJavaAlgorithm<AP extends Params,PD,M,Q,P>
@@ -238,7 +238,7 @@ PredictionIO framework requires an *Engine Factory* which returns an *Engine*
 with the controller components defined.
 
 The *Engine Factory* must implement the
-`io.prediction.controller.java.IJavaEngineFactory` interface and implement the
+`org.apache.predictionio.controller.java.IJavaEngineFactory` interface and implement the
 `apply()` method (as shown in `EngineFactory.java`):
 
 ```java
@@ -286,10 +286,10 @@ An engine manifest `engine.json` is needed to describe the Engine:
 
 ```json
 {
-  "id": "io.prediction.examples.java.recommendations.tutorial1.EngineFactory",
+  "id": "org.apache.predictionio.examples.java.recommendations.tutorial1.EngineFactory",
   "version": "0.8.2",
   "name": "Simple Recommendations Engine",
-  "engineFactory": "io.prediction.examples.java.recommendations.tutorial1.EngineFactory"
+  "engineFactory": "org.apache.predictionio.examples.java.recommendations.tutorial1.EngineFactory"
 }
 ```
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/evaluation.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/evaluation.html.md b/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/evaluation.html.md
index ea5a860..3a4612b 100644
--- a/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/evaluation.html.md
+++ b/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/evaluation.html.md
@@ -16,7 +16,7 @@ modify `DataSource.java` to do a random split of the rating data to generate
 the *Test Set*. For demonstration purpose, the modified `DataSource.java` is put
 under directory `tutorial3/`.
 
-Recall that `io.prediction.controller.java.LJavaDataSource` takes the
+Recall that `org.apache.predictionio.controller.java.LJavaDataSource` takes the
 following type parameters:
 
 ```java
@@ -68,7 +68,7 @@ result by the *Evaluator*.
 
 We will implement a Root Mean Square Error (RMSE) evaluator. You can find the
 implementation in `Evaluator.java`. The *Evaluator* extends
-`io.prediction.controller.java.JavaEvaluator`, which requires the following type
+`org.apache.predictionio.controller.java.JavaEvaluator`, which requires the following type
 parameters:
 
 ```java
@@ -167,7 +167,7 @@ Execute the following command:
 
 ```
 $ cd $PIO_HOME/examples/java-local-tutorial
-$ ../../bin/pio run io.prediction.examples.java.recommendations.tutorial3.Runner3 -- -- data/test/ratings.csv
+$ ../../bin/pio run org.apache.predictionio.examples.java.recommendations.tutorial3.Runner3 -- -- data/test/ratings.csv
 ```
 where `$PIO_HOME` is the root directory of the PredictionIO code tree.
 
@@ -206,16 +206,16 @@ $ ./fetch.sh
 Re-run `Runner3` with the `ml-100k` data set:
 
 ```
-$ ../../bin/pio run io.prediction.examples.java.recommendations.tutorial3.Runner3 -- -- `pwd`/data/ml-100k/u.data
+$ ../../bin/pio run org.apache.predictionio.examples.java.recommendations.tutorial3.Runner3 -- -- `pwd`/data/ml-100k/u.data
 ```
 
 You should see the following output when it finishes running.
 
 ```
 2014-09-30 17:06:34,033 INFO  spark.SparkContext - Job finished: collect at Workflow.scala:597, took 0.103821 s
-2014-09-30 17:06:34,033 INFO  workflow.CoreWorkflow$ - DataSourceParams: io.prediction.examples.java.recommendations.tutorial1.DataSourceParams@3b9f69ce
+2014-09-30 17:06:34,033 INFO  workflow.CoreWorkflow$ - DataSourceParams: org.apache.predictionio.examples.java.recommendations.tutorial1.DataSourceParams@3b9f69ce
 2014-09-30 17:06:34,033 INFO  workflow.CoreWorkflow$ - PreparatorParams: Empty
-2014-09-30 17:06:34,034 INFO  workflow.CoreWorkflow$ - Algo: 0 Name: MyRecommendationAlgo Params: io.prediction.examples.java.recommendations.tutorial1.AlgoParams@76171b1
+2014-09-30 17:06:34,034 INFO  workflow.CoreWorkflow$ - Algo: 0 Name: MyRecommendationAlgo Params: org.apache.predictionio.examples.java.recommendations.tutorial1.AlgoParams@76171b1
 2014-09-30 17:06:34,034 INFO  workflow.CoreWorkflow$ - ServingParams: Empty
 2014-09-30 17:06:34,035 INFO  workflow.CoreWorkflow$ - EvaluatorParams: Empty
 2014-09-30 17:06:34,035 INFO  workflow.CoreWorkflow$ - [(null,1.052046904037191), (null,1.042766938101085), (null,1.0490312745374106)]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/testcomponents.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/testcomponents.html.md b/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/testcomponents.html.md
index 4a921fd..faea9eb 100644
--- a/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/testcomponents.html.md
+++ b/docs/manual/obsolete/tutorials/enginebuilders/stepbystep/testcomponents.html.md
@@ -64,7 +64,7 @@ compile and package the JARs):
 
 ```
 $ cd $PIO_HOME/examples/java-local-tutorial
-$ ../../bin/pio run io.prediction.examples.java.recommendations.tutorial2.Runner1 -- -- data/test/ratings.csv
+$ ../../bin/pio run org.apache.predictionio.examples.java.recommendations.tutorial2.Runner1 -- -- data/test/ratings.csv
 ```
 where `$PIO_HOME` is the root directory of the PredictionIO code tree. The two
 `--` are to separate parameters passed to `pio run` (the `Runner1` class in this
@@ -118,7 +118,7 @@ Execute the following command to run:
 
 ```
 $ cd $PIO_HOME/examples/java-local-tutorial
-$ ../../bin/pio run io.prediction.examples.java.recommendations.tutorial2.Runner2 -- -- data/test/ratings.csv
+$ ../../bin/pio run org.apache.predictionio.examples.java.recommendations.tutorial2.Runner2 -- -- data/test/ratings.csv
 ```
 
 You should see the *Model* generated by the Algorithm at the end of the console

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/obsolete/tutorials/engines/quickstart.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/obsolete/tutorials/engines/quickstart.html.md b/docs/manual/obsolete/tutorials/engines/quickstart.html.md
index a3c587d..f2747d1 100644
--- a/docs/manual/obsolete/tutorials/engines/quickstart.html.md
+++ b/docs/manual/obsolete/tutorials/engines/quickstart.html.md
@@ -41,12 +41,12 @@ specific programming language:
 <div class="tabs">
   <div data-tab="PHP SDK" data-lang="php">
 <p>To use the PredictionIO PHP SDK, we are going to install it with Composer:</p>
-<p>1. Create a file called ``composer.json`` in your project directory, which adds predictionio/predictionio as a dependency. It should look like this:</p>
+<p>1. Create a file called ``composer.json`` in your project directory, which adds predictionorg.apache.predictionioio as a dependency. It should look like this:</p>
 
 ```json
 {
     "require": {
-        "predictionio/predictionio": "~0.8.0"
+        "predictionorg.apache.predictionioio": "~0.8.0"
     }
 }
 ```
@@ -85,7 +85,7 @@ section of your project's <code>pom.xml</code> file:
 ```bash
 <dependencies>
   <dependency>
-    <groupId>io.prediction</groupId>
+    <groupId>org.apache.predictionio</groupId>
     <artifactId>client</artifactId>
     <version>0.8.0</version>
   </dependency>
@@ -248,16 +248,16 @@ $ ruby import.rb
   </div>
   <div data-tab="Java SDK" data-lang="java">
 <p><em>QuickstartImport.java</em> is located under
-PredictionIO-Java-SDK/examples/quickstart_import/src/main/java/io/prediction/samples/.
+PredictionIO-Java-SDK/examples/quickstart_import/src/main/java/org.apache.predictionio/samples/.
 Replace <code>your_app_id</code> with your app id (integer).</p>
 
 ```java
-package io.prediction.samples;
+package org.apache.predictionio.samples;
 
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 
-import io.prediction.EventClient;
+import org.apache.predictionio.EventClient;
 
 import java.io.IOException;
 import java.util.Map;
@@ -327,8 +327,8 @@ To deploy an engine instance for *quickstartapp*, first create an engine
 instance project:
 
 ```bash
-$ $PIO_HOME/bin/pio instance io.prediction.engines.itemrank
-$ cd io.prediction.engines.itemrank
+$ $PIO_HOME/bin/pio instance org.apache.predictionio.engines.itemrank
+$ cd org.apache.predictionio.engines.itemrank
 $ $PIO_HOME/bin/pio register
 ```
 
@@ -455,14 +455,14 @@ $ ruby show.rb
   </div>
   <div data-tab="Java SDK" data-lang="java">
 <p><em>QuickstartShow.java</em> is located under
-PredictionIO-Java-SDK/examples/quickstart_show/src/main/java/io/prediction/samples/.</p>
+PredictionIO-Java-SDK/examples/quickstart_show/src/main/java/org.apache.predictionio/samples/.</p>
 
 ```java
-package io.prediction.samples;
+package org.apache.predictionio.samples;
 
 import com.google.common.collect.ImmutableList;
 
-import io.prediction.EngineClient;
+import org.apache.predictionio.EngineClient;
 
 import java.io.IOException;
 import java.util.HashMap;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/obsolete/tutorials/recommendation/movielens.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/obsolete/tutorials/recommendation/movielens.html.md b/docs/manual/obsolete/tutorials/recommendation/movielens.html.md
index b321df0..ac94757 100644
--- a/docs/manual/obsolete/tutorials/recommendation/movielens.html.md
+++ b/docs/manual/obsolete/tutorials/recommendation/movielens.html.md
@@ -68,12 +68,12 @@ Create an engine instance project base on the default Item Recommendation
 Engine.
 
 ```
-$ $PIO_HOME/bin/pio instance io.prediction.engines.itemrec
-$ cd io.prediction.engines.itemrec
+$ $PIO_HOME/bin/pio instance org.apache.predictionio.engines.itemrec
+$ cd org.apache.predictionio.engines.itemrec
 $ $PIO_HOME/bin/pio register
 ```
 where `$PIO_HOME` is your installation path of PredictionIO.
-Under the directory `io.prediction.engines.itemrec`, you will see a
+Under the directory `org.apache.predictionio.engines.itemrec`, you will see a
 self-contained set of configuation files for an instance of Item Recommendation
 Engine.
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/obsolete/tutorials/recommendation/yelp.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/obsolete/tutorials/recommendation/yelp.html.md b/docs/manual/obsolete/tutorials/recommendation/yelp.html.md
index 7abee97..4c69547 100644
--- a/docs/manual/obsolete/tutorials/recommendation/yelp.html.md
+++ b/docs/manual/obsolete/tutorials/recommendation/yelp.html.md
@@ -106,8 +106,8 @@ An engine represents a type of prediction. For our purposes we will be using the
 [item recommendation engine](/engines/itemrec/).
 
 ```
-$ $PIO_HOME/bin/pio instance io.prediction.engines.itemrec
-$ cd io.prediction.engines.itemrec
+$ $PIO_HOME/bin/pio instance org.apache.predictionio.engines.itemrec
+$ cd org.apache.predictionio.engines.itemrec
 $ $PIO_HOME/bin/pio register
 ```
 
@@ -143,7 +143,7 @@ Before you can deploy your engine you need to train it.
 First you need to **change into the engine instance folder:**
 
 ```
-$ cd $PIO_HOME/io.prediction.engines.itemrec
+$ cd $PIO_HOME/org.apache.predictionio.engines.itemrec
 ```
 
 Train the engine with the imported data:
@@ -165,7 +165,7 @@ Now it is time to launch the engine.
 First you need to **change into the engine instance folder:**
 
 ```
-$ cd $PIO_HOME/io.prediction.engines.itemrec
+$ cd $PIO_HOME/org.apache.predictionio.engines.itemrec
 ```
 
 Then you can deploy with:
@@ -218,7 +218,7 @@ For our example we could train and deploy every 6 hours with the following:
 ```
 $ crontab -e
 
-0 */6 * * *     cd $PIO_HOME/io.prediction.engines.itemrec; $PIO_HOME/bin/pio train; $PIO_HOME/bin/pio deploy
+0 */6 * * *     cd $PIO_HOME/org.apache.predictionio.engines.itemrec; $PIO_HOME/bin/pio train; $PIO_HOME/bin/pio deploy
 ```
 
 It is not necessary to undeploy, the deploy command will do that automatically.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/community/contribute-webhook.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/source/community/contribute-webhook.html.md b/docs/manual/source/community/contribute-webhook.html.md
index 5919cc4..e2612be 100644
--- a/docs/manual/source/community/contribute-webhook.html.md
+++ b/docs/manual/source/community/contribute-webhook.html.md
@@ -11,10 +11,10 @@ Currently we support two types of connectors: `JsonConnector` and `FormConnector
 **JsonConnector**:
 
 ```scala
-package io.prediction.data.webhooks
+package org.apache.predictionio.data.webhooks
 
 /** Connector for Webhooks connection */
-private[prediction] trait JsonConnector {
+private[predictionio] trait JsonConnector {
 
   /** Convert from original JObject to Event JObject
     * @param data original JObject recevived through webhooks
@@ -38,11 +38,11 @@ Note that you may collect Webhooks data into default channel (without the `chann
 **FormConnector**:
 
 ```scala
-package io.prediction.data.webhooks
+package org.apache.predictionio.data.webhooks
 
 /** Connector for Webhooks connection with Form submission data format
   */
-private[prediction] trait FormConnector {
+private[predictionio] trait FormConnector {
 
   /** Convert from original Form submission data to Event JObject
     * @param data Map of key-value pairs in String type received through webhooks
@@ -93,7 +93,7 @@ Because the data sent by this third-party "ExampleJson" site is in JSON format,
 
 
 ```scala
-private[prediction] object ExampleJsonConnector extends JsonConnector {
+private[predictionio] object ExampleJsonConnector extends JsonConnector {
 
   implicit val json4sFormats: Formats = DefaultFormats
 
@@ -161,22 +161,22 @@ private[prediction] object ExampleJsonConnector extends JsonConnector {
 }
 ```
 
-You can find the complete example in [the GitHub repo](https://github.com/PredictionIO/PredictionIO/blob/develop/data/src/main/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnector.scala) and how to write [tests for the connector](https://github.com/PredictionIO/PredictionIO/blob/develop/data/src/test/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnectorSpec.scala).
+You can find the complete example in [the GitHub repo](https://github.com/PredictionIO/PredictionIO/blob/develop/data/src/main/scala/org.apache.predictionio/data/webhooks/examplejson/ExampleJsonConnector.scala) and how to write [tests for the connector](https://github.com/PredictionIO/PredictionIO/blob/develop/data/src/test/scala/org.apache.predictionio/data/webhooks/examplejson/ExampleJsonConnectorSpec.scala).
 
 
 Please put the connector code in a separate directory for each site. For example, code for segmentio connector should be in
 
 ```
-data/src/main/scala/io/prediction/data/webhooks/segmentio/
+data/src/main/scala/org.apache.predictionio/data/webhooks/segmentio/
 ```
 
 and tests should be in
 
 ```
-data/src/test/scala/io/prediction/data/webhooks/segmentio/
+data/src/test/scala/org.apache.predictionio/data/webhooks/segmentio/
 ```
 
-**For form-submission data**, you can find the comple example [the GitHub repo](https://github.com/PredictionIO/PredictionIO/blob/develop/data/src/main/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnector.scala) and how to write [tests for the connector](https://github.com/PredictionIO/PredictionIO/blob/develop/data/src/test/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnectorSpec.scala).
+**For form-submission data**, you can find the comple example [the GitHub repo](https://github.com/PredictionIO/PredictionIO/blob/develop/data/src/main/scala/org.apache.predictionio/data/webhooks/exampleform/ExampleFormConnector.scala) and how to write [tests for the connector](https://github.com/PredictionIO/PredictionIO/blob/develop/data/src/test/scala/org.apache.predictionio/data/webhooks/exampleform/ExampleFormConnectorSpec.scala).
 
 
 ## 2. Integrate the Connector into Event Server
@@ -184,13 +184,13 @@ data/src/test/scala/io/prediction/data/webhooks/segmentio/
 Once we have the connector implemented, we can add this to the EventServer so we can collect real-time data.
 
 Add the connector to [`WebhooksConnectors` object](
-https://github.com/PredictionIO/PredictionIO/blob/develop/data/src/main/scala/io/prediction/data/api/WebhooksConnectors.scala):
+https://github.com/PredictionIO/PredictionIO/blob/develop/data/src/main/scala/org.apache.predictionio/data/api/WebhooksConnectors.scala):
 
 ```scala
 
-import io.prediction.data.webhooks.examplejson.ExampleJsonConnector // ADDED
+import org.apache.predictionio.data.webhooks.examplejson.ExampleJsonConnector // ADDED
 
-private[prediction] object WebhooksConnectors {
+private[predictionio] object WebhooksConnectors {
 
   // Map of Connector Name to Connector
   val json: Map[String, JsonConnector] = Map(

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/customize/dase.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/customize/dase.html.md.erb b/docs/manual/source/customize/dase.html.md.erb
index 78a0194..85ed22c 100644
--- a/docs/manual/source/customize/dase.html.md.erb
+++ b/docs/manual/source/customize/dase.html.md.erb
@@ -10,7 +10,7 @@ DataSource reads and selects useful data from the Event Store (data store of the
 
 ## readTraining()
 
-You need to implment readTraining() of [PDataSource](https://docs.prediction.io/api/current/#io.prediction.controller.PDataSource), where you can use the [PEventStore Engine API](https://docs.prediction.io/api/current/#io.prediction.data.store.PEventStore$) to read the events and create the TrainingData based on the events.
+You need to implment readTraining() of [PDataSource](https://docs.prediction.io/api/current/#org.apache.predictionio.controller.PDataSource), where you can use the [PEventStore Engine API](https://docs.prediction.io/api/current/#org.apache.predictionio.data.store.PEventStore$) to read the events and create the TrainingData based on the events.
 
 The following code example reads user "view" and "buy" item events, filters specific type of events for future processing and returns TrainingData accordingly.
 
@@ -49,7 +49,7 @@ class DataSource(val dsp: DataSourceParams)
 
 Please see [Event Server Overview](https://docs.prediction.io/datacollection/) to understand [EventAPI](https://docs.prediction.io/datacollection/eventapi/) and [event modeling](https://docs.prediction.io/datacollection/eventmodel/).
 
-With [PEventStore Engine API](https://docs.prediction.io/api/current/#io.prediction.data.store.PEventStore$), you can easily read different events in DataSource and get the information you need.
+With [PEventStore Engine API](https://docs.prediction.io/api/current/#org.apache.predictionio.data.store.PEventStore$), you can easily read different events in DataSource and get the information you need.
 
 For example, let's say you have events like the following:
 
@@ -146,7 +146,7 @@ A few example usages of Preparator:
 
 ## prepare()
 
-You need to implement the `prepare()` method of [PPrepartor](https://docs.prediction.io/api/current/#io.prediction.controller.PPreparator) to perform such tasks.
+You need to implement the `prepare()` method of [PPrepartor](https://docs.prediction.io/api/current/#org.apache.predictionio.controller.PPreparator) to perform such tasks.
 
 Example:
 
@@ -167,8 +167,8 @@ predict() is responsible for using this model to make prediction. It is called w
 
 PredictionIO supports two types of algorithms:
 
-- **[P2LAlgorithm](https://docs.prediction.io/api/current/#io.prediction.controller.P2LAlgorithm)**: trains a Model which does not contain RDD
-- **[PAlgorithm](https://docs.prediction.io/api/current/#io.prediction.controller.PAlgorithm)**: trains a Model which contains RDD
+- **[P2LAlgorithm](https://docs.prediction.io/api/current/#org.apache.predictionio.controller.P2LAlgorithm)**: trains a Model which does not contain RDD
+- **[PAlgorithm](https://docs.prediction.io/api/current/#org.apache.predictionio.controller.PAlgorithm)**: trains a Model which contains RDD
 
 ## P2LAlgorithm
 
@@ -194,7 +194,7 @@ Example:
 
 ## using LEventStore Engine API in predict()
 
-You may use [LEventStore.findByEntity()](https://docs.prediction.io/api/current/#io.prediction.data.store.LEventStore$) to retrieve events of a specific entity. For example, retrieve recent events of the user specified in the query) and use these recent events to make prediction in real time.
+You may use [LEventStore.findByEntity()](https://docs.prediction.io/api/current/#org.apache.predictionio.data.store.LEventStore$) to retrieve events of a specific entity. For example, retrieve recent events of the user specified in the query) and use these recent events to make prediction in real time.
 
 
 For example, the following code reads the recent 10 view events of `query.user`:
@@ -234,7 +234,7 @@ Example:
 
 ## serve()
 
-You need to implement the serve() method of the class [LServing](https://docs.prediction.io/api/current/#io.prediction.controller.LServing). The serve() method processes predicted result. It is also responsible for combining multiple predicted results into one if you have more than one predictive model.
+You need to implement the serve() method of the class [LServing](https://docs.prediction.io/api/current/#org.apache.predictionio.controller.LServing). The serve() method processes predicted result. It is also responsible for combining multiple predicted results into one if you have more than one predictive model.
 
 Example:
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/customize/troubleshooting.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/source/customize/troubleshooting.html.md b/docs/manual/source/customize/troubleshooting.html.md
index 201a8de..d17b605 100644
--- a/docs/manual/source/customize/troubleshooting.html.md
+++ b/docs/manual/source/customize/troubleshooting.html.md
@@ -28,7 +28,7 @@ training process stops after the TrainingData is printed.
 [INFO] [CoreWorkflow$] TrainingData:
 [INFO] [CoreWorkflow$] ratings: [1501] (List(Rating(3,0,4.0), Rating(3,1,4.0))...)
 ...
-[INFO] [CoreWorkflow$] Training interrupted by io.prediction.workflow.StopAfterReadInterruption.
+[INFO] [CoreWorkflow$] Training interrupted by org.apache.predictionio.workflow.StopAfterReadInterruption.
 ```
 
 Similarly, you can stop the training after the Preparator phase by using
@@ -55,7 +55,7 @@ For example, to print TrainingData to console and check if the `ratings` is empt
 do the following:
 
 ```scala
-import io.prediction.controller.SanityCheck // ADDED
+import org.apache.predictionio.controller.SanityCheck // ADDED
 
 class TrainingData(
   val ratings: RDD[Rating]
@@ -89,8 +89,8 @@ If your data is empty, you should see the following error thrown by the
 Exception in thread "main" java.lang.IllegalArgumentException: requirement failed: ratings cannot be empty!
 	at scala.Predef$.require(Predef.scala:233)
 	at org.template.recommendation.TrainingData.sanityCheck(DataSource.scala:73)
-	at io.prediction.workflow.CoreWorkflow$$anonfun$runTypelessContext$7.apply(Workflow.scala:474)
-	at io.prediction.workflow.CoreWorkflow$$anonfun$runTypelessContext$7.apply(Workflow.scala:465)
+	at org.apache.predictionio.workflow.CoreWorkflow$$anonfun$runTypelessContext$7.apply(Workflow.scala:474)
+	at org.apache.predictionio.workflow.CoreWorkflow$$anonfun$runTypelessContext$7.apply(Workflow.scala:465)
 	at scala.collection.immutable.Map$Map1.foreach(Map.scala:109)
   ...
 ```
@@ -107,7 +107,7 @@ You should see the checking is skipped such as the following output:
 [INFO] [CoreWorkflow$] Data sanity checking is off.
 [INFO] [CoreWorkflow$] Data Source
 ...
-[INFO] [CoreWorkflow$] Training interrupted by io.prediction.workflow.StopAfterReadInterruption.
+[INFO] [CoreWorkflow$] Training interrupted by org.apache.predictionio.workflow.StopAfterReadInterruption.
 ```
 
 ## Engine Status Page
@@ -132,7 +132,7 @@ Note that the Spark context is available as variable `sc` inside the shell.
 For example, to get the events of `MyApp1` using PEventStore API inside the pio-shell and collect them into an array `c`. run the following in the shell:
 
 ```
-> import io.prediction.data.store.PEventStore
+> import org.apache.predictionio.data.store.PEventStore
 > val eventsRDD = PEventStore.find(appName="MyApp1")(sc)
 > val c = eventsRDD.collect()
 ```
@@ -142,5 +142,5 @@ Then you should see following returned in the shell:
 ```
 ...
 15/05/18 14:24:42 INFO DAGScheduler: Job 0 finished: collect at <console>:24, took 1.850779 s
-c: Array[io.prediction.data.storage.Event] = Array(Event(id=Some(AaQUUBsFZxteRpDV_7fDGQAAAU1ZfRW1tX9LSWdZSb0),event=$set,eType=item,eId=i42,tType=None,tId=None,p=DataMap(Map(categories -> JArray(List(JString(c2), JString(c1), JString(c6), JString(c3))))),t=2015-05-15T21:31:19.349Z,tags=List(),pKey=None,ct=2015-05-15T21:31:19.354Z), Event(id=Some(DjvP3Dnci9F4CWmiqoLabQAAAU1ZfROaqdRYO-pZ_no),event=$set,eType=user,eId=u9,tType=None,tId=None,p=DataMap(Map()),t=2015-05-15T21:31:18.810Z,tags=List(),pKey=None,ct=2015-05-15T21:31:18.817Z), Event(id=Some(DjvP3Dnci9F4CWmiqoLabQAAAU1ZfRq7tsanlemwmZQ),event=view,eType=user,eId=u9,tType=Some(item),tId=Some(i25),p=DataMap(Map()),t=2015-05-15T21:31:20.635Z,tags=List(),pKey=None,ct=2015-05-15T21:31:20.639Z), Event(id=Some(DjvP3Dnci9F4CWmiqoLabQAAAU1ZfR...
+c: Array[org.apache.predictionio.data.storage.Event] = Array(Event(id=Some(AaQUUBsFZxteRpDV_7fDGQAAAU1ZfRW1tX9LSWdZSb0),event=$set,eType=item,eId=i42,tType=None,tId=None,p=DataMap(Map(categories -> JArray(List(JString(c2), JString(c1), JString(c6), JString(c3))))),t=2015-05-15T21:31:19.349Z,tags=List(),pKey=None,ct=2015-05-15T21:31:19.354Z), Event(id=Some(DjvP3Dnci9F4CWmiqoLabQAAAU1ZfROaqdRYO-pZ_no),event=$set,eType=user,eId=u9,tType=None,tId=None,p=DataMap(Map()),t=2015-05-15T21:31:18.810Z,tags=List(),pKey=None,ct=2015-05-15T21:31:18.817Z), Event(id=Some(DjvP3Dnci9F4CWmiqoLabQAAAU1ZfRq7tsanlemwmZQ),event=view,eType=user,eId=u9,tType=Some(item),tId=Some(i25),p=DataMap(Map()),t=2015-05-15T21:31:20.635Z,tags=List(),pKey=None,ct=2015-05-15T21:31:20.639Z), Event(id=Some(DjvP3Dnci9F4CWmiqoLabQAAAU1ZfR...
 ```

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/datacollection/eventapi.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/source/datacollection/eventapi.html.md b/docs/manual/source/datacollection/eventapi.html.md
index 9fb3716..12c93f2 100644
--- a/docs/manual/source/datacollection/eventapi.html.md
+++ b/docs/manual/source/datacollection/eventapi.html.md
@@ -9,7 +9,7 @@ PredictionIO's SDKs.
 
 INFO: All PredictionIO-compliant engines support accessing the Event Store (i.e. the
 data store of Event Server) through [PredictionIO's Storage
-API](http://docs.prediction.io/api/current/index.html#io.prediction.data.storage.package).
+API](http://docs.prediction.io/api/current/index.html#org.apache.predictionio.data.storage.package).
 
 ## Launching the Event Server
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/datacollection/eventmodel.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/datacollection/eventmodel.html.md.erb b/docs/manual/source/datacollection/eventmodel.html.md.erb
index bd875dd..91b3f65 100644
--- a/docs/manual/source/datacollection/eventmodel.html.md.erb
+++ b/docs/manual/source/datacollection/eventmodel.html.md.erb
@@ -87,7 +87,7 @@ For example, setting entity `user-1`'s properties `birthday` and `address`:
 }
 ```
 
-**Note** that the properties values of the entity will be aggregated based on these special events and the eventTime. The state of the entity is different depending on the time you are looking at the data. In engine's DataSource, you can use [PEventStore.aggregateProperties() API](https://docs.prediction.io/api/current/#io.prediction.data.store.PEventStore$) to retrieve the state of entity's properties (based on time).
+**Note** that the properties values of the entity will be aggregated based on these special events and the eventTime. The state of the entity is different depending on the time you are looking at the data. In engine's DataSource, you can use [PEventStore.aggregateProperties() API](https://docs.prediction.io/api/current/#org.apache.predictionio.data.store.PEventStore$) to retrieve the state of entity's properties (based on time).
 
 NOTE: Although it doesn't hurt to import duplicated special events for an entity (exactly same properties) into event server (it just means that the entity changes to the same state as before and new duplicated event provides no new information about the user), it could waste storage space.
 
@@ -223,7 +223,7 @@ NOTE: Note that you should quote the entire URL by using single or double quotes
 
 You should see all events being created for this user-2.
 
-Now, let's retrieve the user-2's properties using the [PEventStore API](https://docs.prediction.io/api/current/#io.prediction.data.store.PEventStore$).
+Now, let's retrieve the user-2's properties using the [PEventStore API](https://docs.prediction.io/api/current/#org.apache.predictionio.data.store.PEventStore$).
 
 First, start `pio-shell` by running:
 
@@ -246,14 +246,14 @@ Run the following code in PIO shell (Replace `"MyTestApp"` with your app name):
 
 ```scala
 scala> val appName="MyTestApp"
-scala> import io.prediction.data.store.PEventStore
+scala> import org.apache.predictionio.data.store.PEventStore
 scala> PEventStore.aggregateProperties(appName=appName, entityType="user")(sc).collect()
 ```
 
 This command is using PEventStore to aggregate the user properties as a Map of user Id and the PropertyMap. `collect()` will return the data as array. You should see the following output at the end, which indicates there is user id 2 with empty properties because that's the state of user 2 with all imported events taken into account.
 
 ```
-res0: Array[(String, io.prediction.data.storage.PropertyMap)] =
+res0: Array[(String, org.apache.predictionio.data.storage.PropertyMap)] =
 Array((2,PropertyMap(Map(), 2014-09-09T16:17:42.937-08:00, 2014-09-13T16:17:42.143-08:00)))
 ```
 
@@ -269,7 +269,7 @@ scala> PEventStore.aggregateProperties(appName=appName, entityType="user", until
 You should see the following ouptut and the aggregated properties matches what we expected as described earlier (right befor event 3): user-2 has properties of a = 3, b = 5 and c = 6.
 
 ```
-res2: Array[(String, io.prediction.data.storage.PropertyMap)] =
+res2: Array[(String, org.apache.predictionio.data.storage.PropertyMap)] =
 Array((2,PropertyMap(Map(b -> JInt(5), a -> JInt(3), c -> JInt(6)), 2014-09-09T16:17:42.937-08:00, 2014-09-10T13:12:04.937-08:00))
 ```
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/demo/textclassification.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/source/demo/textclassification.html.md b/docs/manual/source/demo/textclassification.html.md
index 7b819e3..b76f9da 100644
--- a/docs/manual/source/demo/textclassification.html.md
+++ b/docs/manual/source/demo/textclassification.html.md
@@ -470,7 +470,7 @@ The last and final object implemented in this class simply creates a Map with ke
 The algorithm components in this engine, `NBAlgorithm` and `LRAlgorithm`, actually follows a very general form. Firstly, a parameter class must again be initialized to feed in the corresponding Algorithm model parameters. For example, NBAlgorithm incorporates NBAlgorithmParams which holds the appropriate additive smoothing parameter lambda for the Naive Bayes model.
 
 
-The main class of interest in this component is the class that extends [P2LAlgorithm](https://docs.prediction.io/api/current/#io.prediction.controller.P2LAlgorithm). This class must implement a method named train which will output your predictive model (as a concrete object, this will be implemented via a Scala  class). It must also implement a predict method that transforms a query to an appropriate feature vector, and uses this to predict with the fitted model. The vectorization function is implemented by a PreparedData object, and the categorization (prediction) is handled by an instance of the NBModel implementation. Again, this demonstrates the facility with which different models can be incorporated into PredictionIO's DASE architecture.
+The main class of interest in this component is the class that extends [P2LAlgorithm](https://docs.prediction.io/api/current/#org.apache.predictionio.controller.P2LAlgorithm). This class must implement a method named train which will output your predictive model (as a concrete object, this will be implemented via a Scala  class). It must also implement a predict method that transforms a query to an appropriate feature vector, and uses this to predict with the fitted model. The vectorization function is implemented by a PreparedData object, and the categorization (prediction) is handled by an instance of the NBModel implementation. Again, this demonstrates the facility with which different models can be incorporated into PredictionIO's DASE architecture.
 
 The model class itself will be discussed in the following section, however, turn your attention to the TextManipulationEngine object defined in the script `Engine.scala`. You can see here that the engine is initialized by specifying the DataSource, Preparator, and Serving classes, as well as a Map of algorithm names to Algorithm classes. This tells the engine which algorithms to run. In practice, you can have as many statistical learning models as you'd like, you simply have to implement a new algorithm component to do this. However, this general design form will persist, and the main meat of the work should be in the implementation of your model class.
 
@@ -580,7 +580,7 @@ To use the alternative multinomial logistic regression algorithm change your `en
 
 ## Serving: Delivering the Final Prediction
 
-The serving component is the final stage in the engine, and in a sense, the most important. This is the final stage in which you combine the results obtained from the different models you choose to run. The Serving class extends the [LServing](https://docs.prediction.io/api/current/#io.prediction.controller.LServing) class which must implement a method called serve. This takes a query and an associated sequence of predicted results, which contains the predicted results from the different algorithms that are implemented in your engine, and combines the results to yield a final prediction.  It is this final prediction that you will receive after sending a query.
+The serving component is the final stage in the engine, and in a sense, the most important. This is the final stage in which you combine the results obtained from the different models you choose to run. The Serving class extends the [LServing](https://docs.prediction.io/api/current/#org.apache.predictionio.controller.LServing) class which must implement a method called serve. This takes a query and an associated sequence of predicted results, which contains the predicted results from the different algorithms that are implemented in your engine, and combines the results to yield a final prediction.  It is this final prediction that you will receive after sending a query.
 
 For example, you could choose to slightly modify the implementation to return class probabilities coming from a mixture of model estimates for class probabilities, or any other technique you could conceive for combining your results. The default engine setting has this set to yield the label from the model predicting with greater confidence.
 
@@ -590,7 +590,7 @@ For example, you could choose to slightly modify the implementation to return cl
 
  A predictive model needs to be evaluated to see how it will generalize to future observations. PredictionIO uses cross-validation to perform model performance metric estimates needed to assess your particular choice of model. The script `Evaluation.scala` available with the engine template exemplifies what a usual evaluator setup will look like. First, you must define an appropriate metric. In the engine template, since the topic is text classification, the default metric implemented is category accuracy.
 
- Second you must define an evaluation object (i.e. extends the class [Evaluation](https://docs.prediction.io/api/current/#io.prediction.controller.Evaluation)).
+ Second you must define an evaluation object (i.e. extends the class [Evaluation](https://docs.prediction.io/api/current/#org.apache.predictionio.controller.Evaluation)).
 Here, you must specify the actual engine and metric components that are to be used for the evaluation. In the engine template, the specified engine is the TextManipulationEngine object, and metric, Accuracy. Lastly, you must specify the parameter values that you want to test in the cross validation. You see in the following block of code:
 
 ```scala

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/machinelearning/dimensionalityreduction.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/source/machinelearning/dimensionalityreduction.html.md b/docs/manual/source/machinelearning/dimensionalityreduction.html.md
index 931b063..90a85dd 100644
--- a/docs/manual/source/machinelearning/dimensionalityreduction.html.md
+++ b/docs/manual/source/machinelearning/dimensionalityreduction.html.md
@@ -276,7 +276,7 @@ Remember that the Data Preparator is the engine component that takes care of the
 To make sure there is no confusion, replace the import statements in the `Preparator.scala` script with the following:
 
 ```scala
-import io.prediction.controller.{Params, PPreparator}
+import org.apache.predictionio.controller.{Params, PPreparator}
 import org.apache.spark.SparkContext
 import org.apache.spark.mllib.feature.{StandardScaler, StandardScalerModel}
 import org.apache.spark.mllib.linalg.distributed.RowMatrix
@@ -362,8 +362,8 @@ The following code is taken from the [text classification engine template](http:
 ```scala
 package FeatureReduction
 
-import io.prediction.controller.Params
-import io.prediction.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.P2LAlgorithm
 import org.apache.spark.SparkContext
 import org.apache.spark.ml.classification.LogisticRegression
 import org.apache.spark.sql.DataFrame

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/partials/shared/install/_dependent_services.html.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/partials/shared/install/_dependent_services.html.erb b/docs/manual/source/partials/shared/install/_dependent_services.html.erb
index 073baad..8aa39da 100644
--- a/docs/manual/source/partials/shared/install/_dependent_services.html.erb
+++ b/docs/manual/source/partials/shared/install/_dependent_services.html.erb
@@ -22,7 +22,7 @@ You may use `jps` to verify that you have everything started:
 ```
 $ jps -l
 15344 org.apache.hadoop.hbase.master.HMaster
-15409 io.prediction.tools.console.Console
+15409 org.apache.predictionio.tools.console.Console
 15256 org.elasticsearch.bootstrap.Elasticsearch
 15469 sun.tools.jps.Jps
 $
@@ -30,7 +30,7 @@ $
 
 A running setup will have these up and running:
 
-- io.prediction.tools.console.Console
+- org.apache.predictionio.tools.console.Console
 - org.apache.hadoop.hbase.master.HMaster
 - org.elasticsearch.bootstrap.Elasticsearch
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/resources/faq.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/source/resources/faq.html.md b/docs/manual/source/resources/faq.html.md
index 11779d0..e404d8b 100644
--- a/docs/manual/source/resources/faq.html.md
+++ b/docs/manual/source/resources/faq.html.md
@@ -160,11 +160,11 @@ $ pio deploy -- --driver-memory 8G
 
 ## Building PredictionIO
 
-### Q: How to resolve "Error: Could not find or load main class io.prediction.tools.Console" after ./make_distribution.sh?
+### Q: How to resolve "Error: Could not find or load main class org.apache.predictionio.tools.Console" after ./make_distribution.sh?
 
 ```
 $ bin/pio app
-Error: Could not find or load main class io.prediction.tools.Console
+Error: Could not find or load main class org.apache.predictionio.tools.Console
 ```
 
 When PredictionIO bumps a version, it creates another JAR file with the new

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/resources/intellij.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/resources/intellij.html.md.erb b/docs/manual/source/resources/intellij.html.md.erb
index 6c87443..ab5d8bc 100644
--- a/docs/manual/source/resources/intellij.html.md.erb
+++ b/docs/manual/source/resources/intellij.html.md.erb
@@ -108,7 +108,7 @@ Configurations...*. Click on the **+** button and select *Application*. Name it
 `pio train` and put in the following.
 
 ```
-Main class: io.prediction.workflow.CreateWorkflow
+Main class: org.apache.predictionio.workflow.CreateWorkflow
 VM options: -Dspark.master=local -Dlog4j.configuration=file:/**replace_with_your_PredictionIO_path**/conf/log4j.properties
 Program arguments: --engine-id dummy --engine-version dummy --engine-variant engine.json
 ```
@@ -156,7 +156,7 @@ For `pio deploy`, simply duplicate the previous configuration and replace with
 the following.
 
 ```
-Main class: io.prediction.workflow.CreateServer
+Main class: org.apache.predictionio.workflow.CreateServer
 Program Arguments: --engineInstanceId **replace_with_the_id_from_pio_train**
 ```
 
@@ -199,4 +199,4 @@ Now, before opening the template with Intellij, run the following command in the
 $ pio build
 ```
 
-This should update the pioVersion key in SBT to the version of PredictionIO you have installed, so that Intellij loads the correct JARS via its Auto-Import feature. Now, you can go ahead and open the file `build.sbt` with Intellij IDEA. You are now ready to [customize](/customize/) your new engine template.
\ No newline at end of file
+This should update the pioVersion key in SBT to the version of PredictionIO you have installed, so that Intellij loads the correct JARS via its Auto-Import feature. Now, you can go ahead and open the file `build.sbt` with Intellij IDEA. You are now ready to [customize](/customize/) your new engine template.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/resources/upgrade.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/source/resources/upgrade.html.md b/docs/manual/source/resources/upgrade.html.md
index 53c5c4d..a26cedc 100644
--- a/docs/manual/source/resources/upgrade.html.md
+++ b/docs/manual/source/resources/upgrade.html.md
@@ -45,13 +45,13 @@ NOTE: The following changes are not required for using 0.9.2 but it's recommende
 - remove this line of code:
 
     ```scala
-    import io.prediction.data.storage.Storage
+    import org.apache.predictionio.data.storage.Storage
     ```
 
     and replace it by
 
     ```scala
-    import io.prediction.data.store.PEventStore
+    import org.apache.predictionio.data.store.PEventStore
     ```
 
 - Change `appId: Int` to `appName: String` in DataSourceParams
@@ -94,9 +94,9 @@ NOTE: The following changes are not required for using 0.9.2 but it's recommende
 
 If Storage.getLEvents() is also used in Algorithm (such as ALSAlgorithm of E-Commerce Recommendation template), you also need to do following:
 
-NOTE: If `io.prediction.data.storage.Storage` is not used at all (such as Recommendation, Similar Product, Classification, Lead Scoring, Product Ranking template), there is no need to change Algorithm and can go to the later **engine.json** section.
+NOTE: If `org.apache.predictionio.data.storage.Storage` is not used at all (such as Recommendation, Similar Product, Classification, Lead Scoring, Product Ranking template), there is no need to change Algorithm and can go to the later **engine.json** section.
 
-- remove `import io.prediction.data.storage.Storage` and replace it by `import io.prediction.data.store.LEventStore`
+- remove `import org.apache.predictionio.data.storage.Storage` and replace it by `import org.apache.predictionio.data.store.LEventStore`
 - change `appId` to `appName` in the XXXAlgorithmParams class.
 - remove this line of code: `@transient lazy val lEventsDb = Storage.getLEvents()`
 - locate where `LEventStore.findByEntity()` is used, change it to `LEventStore.findByEntity()`:
@@ -230,22 +230,22 @@ Follow instructions below to modify existing engine templates to be compatible w
     import org.apache.spark.SparkContext
     ```
 
-2. Modify the file `build.sbt` in your template directory to use `pioVersion.value` as the version of io.prediction.core dependency:
+2. Modify the file `build.sbt` in your template directory to use `pioVersion.value` as the version of org.apache.predictionio.core dependency:
 
     Under your template's root directory, you should see a file `build.sbt` which has the following content:
 
     ```
     libraryDependencies ++= Seq(
-      "io.prediction"    %% "core"          % "0.8.6" % "provided",
+      "org.apache.predictionio"    %% "core"          % "0.8.6" % "provided",
       "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
       "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")
     ```
 
-    Change the version of `"io.prediction" && "core"` to `pioVersion.value`:
+    Change the version of `"org.apache.predictionio" && "core"` to `pioVersion.value`:
 
     ```
     libraryDependencies ++= Seq(
-      "io.prediction"    %% "core"          % pioVersion.value % "provided",
+      "org.apache.predictionio"    %% "core"          % pioVersion.value % "provided",
       "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
       "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")
     ```
@@ -253,7 +253,7 @@ Follow instructions below to modify existing engine templates to be compatible w
 3. Create a new file `pio-build.sbt` in template's **project/** directory with the following content:
 
     ```
-    addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")
+    addSbtPlugin("org.apache.predictionio" % "pio-build" % "0.9.0")
     ```
 
     Then, you should see the following two files in the **project/** directory:
@@ -404,5 +404,5 @@ Replace by the returned app ID: ( is the original app ID used in 0.8.0/0.8.2.)
 $ set -a
 $ source conf/pio-env.sh
 $ set +a
-$ sbt/sbt "data/run-main io.prediction.data.storage.hbase.upgrade.Upgrade <from app ID>" "<to app ID>"
+$ sbt/sbt "data/run-main org.apache.predictionio.data.storage.hbase.upgrade.Upgrade <from app ID>" "<to app ID>"
 ```

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/sdk/java.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/sdk/java.html.md.erb b/docs/manual/source/sdk/java.html.md.erb
index 314579c..8b8762e 100644
--- a/docs/manual/source/sdk/java.html.md.erb
+++ b/docs/manual/source/sdk/java.html.md.erb
@@ -8,7 +8,7 @@ title: Java & Android SDK
 
 ## Central Repository
 
-[Browse](http://search.maven.org/#search%7Cga%7C1%7Cio.prediction)
+[Browse](http://search.maven.org/#search%7Cga%7C1%7Corg.apache.predictionio)
 
 ## GitHub
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/sdk/php.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/sdk/php.html.md.erb b/docs/manual/source/sdk/php.html.md.erb
index 5606c9b..a16f44d 100644
--- a/docs/manual/source/sdk/php.html.md.erb
+++ b/docs/manual/source/sdk/php.html.md.erb
@@ -8,7 +8,7 @@ title: PHP SDK
 
 ## Packagist
 
-[Browse](https://packagist.org/packages/predictionio/predictionio)
+[Browse](https://packagist.org/packages/predictionorg.apache.predictionioio)
 
 ## GitHub
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/system/anotherdatastore.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/source/system/anotherdatastore.html.md b/docs/manual/source/system/anotherdatastore.html.md
index 563430b..28fde14 100644
--- a/docs/manual/source/system/anotherdatastore.html.md
+++ b/docs/manual/source/system/anotherdatastore.html.md
@@ -280,8 +280,8 @@ supported.
 
 It is quite straightforward to implement support of other backends. A good
 starting point is to reference the JDBC implementation inside the
-[io.prediction.data.storage.jdbc
-package](https://github.com/PredictionIO/PredictionIO/tree/develop/data/src/main/scala/io/prediction/data/storage/jdbc).
+[org.apache.predictionio.data.storage.jdbc
+package](https://github.com/PredictionIO/PredictionIO/tree/develop/data/src/main/scala/org.apache.predictionio/data/storage/jdbc).
 
 Contributions of different backends implementation is highly encouraged. To
 start contributing, please refer to [this guide](/community/contribute-code/).

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/templates/classification/quickstart.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/templates/classification/quickstart.html.md.erb b/docs/manual/source/templates/classification/quickstart.html.md.erb
index 327c1b3..12a01aa 100644
--- a/docs/manual/source/templates/classification/quickstart.html.md.erb
+++ b/docs/manual/source/templates/classification/quickstart.html.md.erb
@@ -150,8 +150,8 @@ client.create_event(
   <div data-tab="Java SDK" data-lang="java">
 ```java
 import com.google.common.collect.ImmutableMap;
-import io.prediction.Event;
-import io.prediction.EventClient;
+import org.apache.predictionio.Event;
+import org.apache.predictionio.EventClient;
 
 EventClient client = new EventClient(<ACCESS KEY>, <URL OF EVENTSERVER>);
 
@@ -429,7 +429,7 @@ import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.gson.JsonObject;
 
-import io.prediction.EngineClient;
+import org.apache.predictionio.EngineClient;
 
 EngineClient engineClient = new EngineClient(<ENGINE DEPLOY URL>);
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/templates/complementarypurchase/quickstart.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/templates/complementarypurchase/quickstart.html.md.erb b/docs/manual/source/templates/complementarypurchase/quickstart.html.md.erb
index 651ec7b..25b6e2b 100644
--- a/docs/manual/source/templates/complementarypurchase/quickstart.html.md.erb
+++ b/docs/manual/source/templates/complementarypurchase/quickstart.html.md.erb
@@ -159,8 +159,8 @@ client.create_event(
   </div>
   <div data-tab="Java SDK" data-lang="java">
 ```java
-import io.prediction.Event;
-import io.prediction.EventClient;
+import org.apache.predictionio.Event;
+import org.apache.predictionio.EventClient;
 
 import com.google.common.collect.ImmutableList;
 
@@ -297,7 +297,7 @@ import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableList;
 import com.google.gson.JsonObject;
 
-import io.prediction.EngineClient;
+import org.apache.predictionio.EngineClient;
 
 // create client object
 EngineClient engineClient = new EngineClient("http://localhost:8000");

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/templates/ecommercerecommendation/quickstart.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/templates/ecommercerecommendation/quickstart.html.md.erb b/docs/manual/source/templates/ecommercerecommendation/quickstart.html.md.erb
index fc31650..ea2c425 100644
--- a/docs/manual/source/templates/ecommercerecommendation/quickstart.html.md.erb
+++ b/docs/manual/source/templates/ecommercerecommendation/quickstart.html.md.erb
@@ -147,8 +147,8 @@ client.create_event(
   </div>
   <div data-tab="Java SDK" data-lang="java">
 ```java
-import io.prediction.Event;
-import io.prediction.EventClient;
+import org.apache.predictionio.Event;
+import org.apache.predictionio.EventClient;
 
 import com.google.common.collect.ImmutableList;
 
@@ -495,7 +495,7 @@ import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableList;
 import com.google.gson.JsonObject;
 
-import io.prediction.EngineClient;
+import org.apache.predictionio.EngineClient;
 
 // create client object
 EngineClient engineClient = new EngineClient("http://localhost:8000");

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/templates/javaecommercerecommendation/quickstart.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/templates/javaecommercerecommendation/quickstart.html.md.erb b/docs/manual/source/templates/javaecommercerecommendation/quickstart.html.md.erb
index fe8354b..5dc2061 100644
--- a/docs/manual/source/templates/javaecommercerecommendation/quickstart.html.md.erb
+++ b/docs/manual/source/templates/javaecommercerecommendation/quickstart.html.md.erb
@@ -148,8 +148,8 @@ client.create_event(
   </div>
   <div data-tab="Java SDK" data-lang="java">
 ```java
-import io.prediction.Event;
-import io.prediction.EventClient;
+import org.apache.predictionio.Event;
+import org.apache.predictionio.EventClient;
 
 import com.google.common.collect.ImmutableList;
 
@@ -495,7 +495,7 @@ import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableList;
 import com.google.gson.JsonObject;
 
-import io.prediction.EngineClient;
+import org.apache.predictionio.EngineClient;
 
 // create client object
 EngineClient engineClient = new EngineClient("http://localhost:8000");

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/templates/leadscoring/quickstart.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/templates/leadscoring/quickstart.html.md.erb b/docs/manual/source/templates/leadscoring/quickstart.html.md.erb
index 081111c..5d5723c 100644
--- a/docs/manual/source/templates/leadscoring/quickstart.html.md.erb
+++ b/docs/manual/source/templates/leadscoring/quickstart.html.md.erb
@@ -152,8 +152,8 @@ client.create_event(
   </div>
   <div data-tab="Java SDK" data-lang="java">
 ```java
-import io.prediction.Event;
-import io.prediction.EventClient;
+import org.apache.predictionio.Event;
+import org.apache.predictionio.EventClient;
 
 import com.google.common.collect.ImmutableList;
 
@@ -372,7 +372,7 @@ import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableList;
 import com.google.gson.JsonObject;
 
-import io.prediction.EngineClient;
+import org.apache.predictionio.EngineClient;
 
 // create client object
 EngineClient engineClient = new EngineClient("http://localhost:8000");

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/templates/productranking/quickstart.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/templates/productranking/quickstart.html.md.erb b/docs/manual/source/templates/productranking/quickstart.html.md.erb
index 759fd02..555390a 100644
--- a/docs/manual/source/templates/productranking/quickstart.html.md.erb
+++ b/docs/manual/source/templates/productranking/quickstart.html.md.erb
@@ -116,8 +116,8 @@ client.create_event(
   </div>
   <div data-tab="Java SDK" data-lang="java">
 ```java
-import io.prediction.Event;
-import io.prediction.EventClient;
+import org.apache.predictionio.Event;
+import org.apache.predictionio.EventClient;
 
 import com.google.common.collect.ImmutableList;
 
@@ -372,7 +372,7 @@ import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableList;
 import com.google.gson.JsonObject;
 
-import io.prediction.EngineClient;
+import org.apache.predictionio.EngineClient;
 
 // create client object
 EngineClient engineClient = new EngineClient("http://localhost:8000");

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/templates/recommendation/batch-evaluator.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/source/templates/recommendation/batch-evaluator.html.md b/docs/manual/source/templates/recommendation/batch-evaluator.html.md
index b574036..3d66645 100644
--- a/docs/manual/source/templates/recommendation/batch-evaluator.html.md
+++ b/docs/manual/source/templates/recommendation/batch-evaluator.html.md
@@ -60,15 +60,15 @@ Note that output directory is specified by the variable `outputDir`.
 ```scala
 package org.template.recommendation
 
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.Engine
-import io.prediction.controller.EngineParams
-import io.prediction.controller.EngineParamsGenerator
-import io.prediction.controller.Evaluation
-import io.prediction.controller.Params
-import io.prediction.core.BaseEvaluator
-import io.prediction.core.BaseEvaluatorResult
-import io.prediction.workflow.WorkflowParams
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.Engine
+import org.apache.predictionio.controller.EngineParams
+import org.apache.predictionio.controller.EngineParamsGenerator
+import org.apache.predictionio.controller.Evaluation
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.core.BaseEvaluator
+import org.apache.predictionio.core.BaseEvaluatorResult
+import org.apache.predictionio.workflow.WorkflowParams
 
 import org.apache.spark.SparkContext
 import org.apache.spark.rdd.RDD
@@ -153,9 +153,9 @@ Modify the appName parameter in `DataSourceParams` to match your app name.
 ```scala
 package org.template.recommendation
 
-import io.prediction.controller.EngineParamsGenerator
-import io.prediction.controller.EngineParams
-import io.prediction.controller.Evaluation
+import org.apache.predictionio.controller.EngineParamsGenerator
+import org.apache.predictionio.controller.EngineParams
+import org.apache.predictionio.controller.Evaluation
 
 object BatchEvaluation extends Evaluation {
   // Define Engine and Evaluator used in Evaluation

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/templates/recommendation/customize-data-prep.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/source/templates/recommendation/customize-data-prep.html.md b/docs/manual/source/templates/recommendation/customize-data-prep.html.md
index d4b0119..dc22572 100644
--- a/docs/manual/source/templates/recommendation/customize-data-prep.html.md
+++ b/docs/manual/source/templates/recommendation/customize-data-prep.html.md
@@ -138,7 +138,7 @@ Modify `src/main/scala/Preparator.scala` again in the *MyRecommendation*
 directory to:
 
 ```scala
-import io.prediction.controller.Params // ADDED
+import org.apache.predictionio.controller.Params // ADDED
 
  // ADDED CustomPreparatorParams case class
 case class CustomPreparatorParams(

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/templates/recommendation/customize-serving.html.md
----------------------------------------------------------------------
diff --git a/docs/manual/source/templates/recommendation/customize-serving.html.md b/docs/manual/source/templates/recommendation/customize-serving.html.md
index c7d2afd..e49960f 100644
--- a/docs/manual/source/templates/recommendation/customize-serving.html.md
+++ b/docs/manual/source/templates/recommendation/customize-serving.html.md
@@ -171,7 +171,7 @@ directory to:
 ```scala
 import scala.io.Source
 
-import io.prediction.controller.Params  // ADDED
+import org.apache.predictionio.controller.Params  // ADDED
 
 // ADDED ServingParams to specify the blacklisting file location.
 case class ServingParams(filepath: String) extends Params
@@ -219,4 +219,4 @@ $ pio deploy
 
 You can change the `filepath` value without re-building the code next time.
 
-#### [Next: Filter Recommended Items by Blacklist in Query](blacklist-items.html)
\ No newline at end of file
+#### [Next: Filter Recommended Items by Blacklist in Query](blacklist-items.html)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/templates/recommendation/quickstart.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/templates/recommendation/quickstart.html.md.erb b/docs/manual/source/templates/recommendation/quickstart.html.md.erb
index 46d15b9..f84e9d1 100644
--- a/docs/manual/source/templates/recommendation/quickstart.html.md.erb
+++ b/docs/manual/source/templates/recommendation/quickstart.html.md.erb
@@ -137,8 +137,8 @@ client.create_event(
   </div>
   <div data-tab="Java SDK" data-lang="java">
 ```java
-import io.prediction.Event;
-import io.prediction.EventClient;
+import org.apache.predictionio.Event;
+import org.apache.predictionio.EventClient;
 
 EventClient client = new EventClient(<ACCESS KEY>, <URL OF EVENTSERVER>);
 
@@ -323,7 +323,7 @@ puts response
 import com.google.common.collect.ImmutableMap;
 import com.google.gson.JsonObject;
 
-import io.prediction.EngineClient;
+import org.apache.predictionio.EngineClient;
 
 // create client object
 EngineClient engineClient = new EngineClient(<ENGINE DEPLOY URL>);

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/templates/similarproduct/multi-events-multi-algos.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/templates/similarproduct/multi-events-multi-algos.html.md.erb b/docs/manual/source/templates/similarproduct/multi-events-multi-algos.html.md.erb
index b30dc1b..d4fb789 100644
--- a/docs/manual/source/templates/similarproduct/multi-events-multi-algos.html.md.erb
+++ b/docs/manual/source/templates/similarproduct/multi-events-multi-algos.html.md.erb
@@ -162,7 +162,7 @@ It is shown in the code below:
 ```scala
 package org.template.similarproduct
 
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/templates/similarproduct/quickstart.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/templates/similarproduct/quickstart.html.md.erb b/docs/manual/source/templates/similarproduct/quickstart.html.md.erb
index b55f511..8f8c6d1 100644
--- a/docs/manual/source/templates/similarproduct/quickstart.html.md.erb
+++ b/docs/manual/source/templates/similarproduct/quickstart.html.md.erb
@@ -139,8 +139,8 @@ client.create_event(
   </div>
   <div data-tab="Java SDK" data-lang="java">
 ```java
-import io.prediction.Event;
-import io.prediction.EventClient;
+import org.apache.predictionio.Event;
+import org.apache.predictionio.EventClient;
 
 import com.google.common.collect.ImmutableList;
 
@@ -401,7 +401,7 @@ import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableList;
 import com.google.gson.JsonObject;
 
-import io.prediction.EngineClient;
+import org.apache.predictionio.EngineClient;
 
 // create client object
 EngineClient engineClient = new EngineClient("http://localhost:8000");

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/manual/source/templates/vanilla/quickstart.html.md.erb
----------------------------------------------------------------------
diff --git a/docs/manual/source/templates/vanilla/quickstart.html.md.erb b/docs/manual/source/templates/vanilla/quickstart.html.md.erb
index 4c0f200..6e24038 100644
--- a/docs/manual/source/templates/vanilla/quickstart.html.md.erb
+++ b/docs/manual/source/templates/vanilla/quickstart.html.md.erb
@@ -100,7 +100,7 @@ puts response
 import com.google.common.collect.ImmutableMap;
 import com.google.gson.JsonObject;
 
-import io.prediction.EngineClient;
+import org.apache.predictionio.EngineClient;
 
 // create client object
 EngineClient engineClient = new EngineClient(<ENGINE DEPLOY URL>);

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/ed9d62d7/docs/scaladoc/rootdoc.txt
----------------------------------------------------------------------
diff --git a/docs/scaladoc/rootdoc.txt b/docs/scaladoc/rootdoc.txt
index c546369..00db0ed 100644
--- a/docs/scaladoc/rootdoc.txt
+++ b/docs/scaladoc/rootdoc.txt
@@ -2,8 +2,8 @@ This is the API documentation of PredictionIO.
 
 == Package Structure ==
 
- - [[io.prediction.controller]] - The common starting point. Building blocks of a prediction engine.
- - [[io.prediction.data.store]] - Event Store API.
+ - [[org.apache.predictionio.controller]] - The common starting point. Building blocks of a prediction engine.
+ - [[org.apache.predictionio.data.store]] - Event Store API.
 
 == Experimental Features ==
 


[08/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/view/LBatchView.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/view/LBatchView.scala b/data/src/main/scala/org/apache/predictionio/data/view/LBatchView.scala
new file mode 100644
index 0000000..5bd7478
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/view/LBatchView.scala
@@ -0,0 +1,200 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.view
+
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.EventValidation
+import org.apache.predictionio.data.storage.DataMap
+import org.apache.predictionio.data.storage.Storage
+
+import org.joda.time.DateTime
+import scala.language.implicitConversions
+
+import scala.concurrent.ExecutionContext.Implicits.global // TODO
+
+@deprecated("Use LEvents or LEventStore instead.", "0.9.2")
+object ViewPredicates {
+  def getStartTimePredicate(startTimeOpt: Option[DateTime])
+  : (Event => Boolean) = {
+    startTimeOpt.map(getStartTimePredicate).getOrElse(_ => true)
+  }
+
+  def getStartTimePredicate(startTime: DateTime): (Event => Boolean) = {
+    e => (!(e.eventTime.isBefore(startTime) || e.eventTime.isEqual(startTime)))
+  }
+
+  def getUntilTimePredicate(untilTimeOpt: Option[DateTime])
+  : (Event => Boolean) = {
+    untilTimeOpt.map(getUntilTimePredicate).getOrElse(_ => true)
+  }
+
+  def getUntilTimePredicate(untilTime: DateTime): (Event => Boolean) = {
+    _.eventTime.isBefore(untilTime)
+  }
+
+  def getEntityTypePredicate(entityTypeOpt: Option[String]): (Event => Boolean)
+  = {
+    entityTypeOpt.map(getEntityTypePredicate).getOrElse(_ => true)
+  }
+
+  def getEntityTypePredicate(entityType: String): (Event => Boolean) = {
+    (_.entityType == entityType)
+  }
+
+  def getEventPredicate(eventOpt: Option[String]): (Event => Boolean)
+  = {
+    eventOpt.map(getEventPredicate).getOrElse(_ => true)
+  }
+
+  def getEventPredicate(event: String): (Event => Boolean) = {
+    (_.event == event)
+  }
+}
+
+@deprecated("Use LEvents instead.", "0.9.2")
+object ViewAggregators {
+  def getDataMapAggregator(): ((Option[DataMap], Event) => Option[DataMap]) = {
+    (p, e) => {
+      e.event match {
+        case "$set" => {
+          if (p == None) {
+            Some(e.properties)
+          } else {
+            p.map(_ ++ e.properties)
+          }
+        }
+        case "$unset" => {
+          if (p == None) {
+            None
+          } else {
+            p.map(_ -- e.properties.keySet)
+          }
+        }
+        case "$delete" => None
+        case _ => p // do nothing for others
+      }
+    }
+  }
+}
+
+@deprecated("Use LEvents instead.", "0.9.2")
+object EventSeq {
+  // Need to
+  // >>> import scala.language.implicitConversions
+  // to enable implicit conversion. Only import in the code where this is
+  // necessary to avoid confusion.
+  implicit def eventSeqToList(es: EventSeq): List[Event] = es.events
+  implicit def listToEventSeq(l: List[Event]): EventSeq = new EventSeq(l)
+}
+
+
+@deprecated("Use LEvents instead.", "0.9.2")
+class EventSeq(val events: List[Event]) {
+  def filter(
+    eventOpt: Option[String] = None,
+    entityTypeOpt: Option[String] = None,
+    startTimeOpt: Option[DateTime] = None,
+    untilTimeOpt: Option[DateTime] = None): EventSeq = {
+
+    events
+    .filter(ViewPredicates.getEventPredicate(eventOpt))
+    .filter(ViewPredicates.getStartTimePredicate(startTimeOpt))
+    .filter(ViewPredicates.getUntilTimePredicate(untilTimeOpt))
+    .filter(ViewPredicates.getEntityTypePredicate(entityTypeOpt))
+  }
+
+  def filter(p: (Event => Boolean)): EventSeq = events.filter(p)
+
+  def aggregateByEntityOrdered[T](init: T, op: (T, Event) => T)
+  : Map[String, T] = {
+    events
+    .groupBy( _.entityId )
+    .mapValues( _.sortBy(_.eventTime.getMillis).foldLeft[T](init)(op))
+    .toMap
+  }
+
+
+}
+
+
+@deprecated("Use LEventStore instead.", "0.9.2")
+class LBatchView(
+  val appId: Int,
+  val startTime: Option[DateTime],
+  val untilTime: Option[DateTime]) {
+
+  @transient lazy val eventsDb = Storage.getLEvents()
+
+  @transient lazy val _events = eventsDb.find(
+    appId = appId,
+    startTime = startTime,
+    untilTime = untilTime).toList
+
+  @transient lazy val events: EventSeq = new EventSeq(_events)
+
+  /* Aggregate event data
+   *
+   * @param entityType only aggregate event with entityType
+   * @param startTimeOpt if specified, only aggregate event after (inclusive)
+   * startTimeOpt
+   * @param untilTimeOpt if specified, only aggregate event until (exclusive)
+   * endTimeOpt
+   */
+  def aggregateProperties(
+      entityType: String,
+      startTimeOpt: Option[DateTime] = None,
+      untilTimeOpt: Option[DateTime] = None
+      ): Map[String, DataMap] = {
+
+    events
+    .filter(entityTypeOpt = Some(entityType))
+    .filter(e => EventValidation.isSpecialEvents(e.event))
+    .aggregateByEntityOrdered(
+      init = None,
+      op = ViewAggregators.getDataMapAggregator())
+    .filter{ case (k, v) => (v != None) }
+    .mapValues(_.get)
+
+  }
+
+  /*
+  def aggregateByEntityOrdered[T](
+    predicate: Event => Boolean,
+    init: T,
+    op: (T, Event) => T): Map[String, T] = {
+
+    _events
+      .filter( predicate(_) )
+      .groupBy( _.entityId )
+      .mapValues( _.sortBy(_.eventTime.getMillis).foldLeft[T](init)(op))
+      .toMap
+
+  }
+  */
+
+  /*
+  def groupByEntityOrdered[T](
+    predicate: Event => Boolean,
+    map: Event => T): Map[String, Seq[T]] = {
+
+    _events
+      .filter( predicate(_) )
+      .groupBy( _.entityId )
+      .mapValues( _.sortBy(_.eventTime.getMillis).map(map(_)) )
+      .toMap
+  }
+  */
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/view/PBatchView.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/view/PBatchView.scala b/data/src/main/scala/org/apache/predictionio/data/view/PBatchView.scala
new file mode 100644
index 0000000..6c75402
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/view/PBatchView.scala
@@ -0,0 +1,209 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.view
+
+import org.apache.predictionio.data.storage.hbase.HBPEvents
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.EventValidation
+import org.apache.predictionio.data.storage.DataMap
+import org.apache.predictionio.data.storage.Storage
+
+import org.joda.time.DateTime
+
+import org.json4s.JValue
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+
+// each JValue data associated with the time it is set
+private[prediction] case class PropTime(val d: JValue, val t: Long) extends Serializable
+
+private[prediction] case class SetProp (
+  val fields: Map[String, PropTime],
+  // last set time. Note: fields could be empty with valid set time
+  val t: Long) extends Serializable {
+
+  def ++ (that: SetProp): SetProp = {
+    val commonKeys = fields.keySet.intersect(that.fields.keySet)
+
+    val common: Map[String, PropTime] = commonKeys.map { k =>
+      val thisData = this.fields(k)
+      val thatData = that.fields(k)
+      // only keep the value with latest time
+      val v = if (thisData.t > thatData.t) thisData else thatData
+      (k, v)
+    }.toMap
+
+    val combinedFields = common ++
+      (this.fields -- commonKeys) ++ (that.fields -- commonKeys)
+
+    // keep the latest set time
+    val combinedT = if (this.t > that.t) this.t else that.t
+
+    SetProp(
+      fields = combinedFields,
+      t = combinedT
+    )
+  }
+}
+
+private[prediction] case class UnsetProp (fields: Map[String, Long]) extends Serializable {
+  def ++ (that: UnsetProp): UnsetProp = {
+    val commonKeys = fields.keySet.intersect(that.fields.keySet)
+
+    val common: Map[String, Long] = commonKeys.map { k =>
+      val thisData = this.fields(k)
+      val thatData = that.fields(k)
+      // only keep the value with latest time
+      val v = if (thisData > thatData) thisData else thatData
+      (k, v)
+    }.toMap
+
+    val combinedFields = common ++
+      (this.fields -- commonKeys) ++ (that.fields -- commonKeys)
+
+    UnsetProp(
+      fields = combinedFields
+    )
+  }
+}
+
+private[prediction] case class DeleteEntity (t: Long) extends Serializable {
+  def ++ (that: DeleteEntity): DeleteEntity = {
+    if (this.t > that.t) this else that
+  }
+}
+
+private[prediction] case class EventOp (
+  val setProp: Option[SetProp] = None,
+  val unsetProp: Option[UnsetProp] = None,
+  val deleteEntity: Option[DeleteEntity] = None
+) extends Serializable {
+
+  def ++ (that: EventOp): EventOp = {
+    EventOp(
+      setProp = (setProp ++ that.setProp).reduceOption(_ ++ _),
+      unsetProp = (unsetProp ++ that.unsetProp).reduceOption(_ ++ _),
+      deleteEntity = (deleteEntity ++ that.deleteEntity).reduceOption(_ ++ _)
+    )
+  }
+
+  def toDataMap(): Option[DataMap] = {
+    setProp.flatMap { set =>
+
+      val unsetKeys: Set[String] = unsetProp.map( unset =>
+        unset.fields.filter{ case (k, v) => (v >= set.fields(k).t) }.keySet
+      ).getOrElse(Set())
+
+      val combinedFields = deleteEntity.map { delete =>
+        if (delete.t >= set.t) {
+          None
+        } else {
+          val deleteKeys: Set[String] = set.fields
+            .filter { case (k, PropTime(kv, t)) =>
+              (delete.t >= t)
+            }.keySet
+          Some(set.fields -- unsetKeys -- deleteKeys)
+        }
+      }.getOrElse{
+        Some(set.fields -- unsetKeys)
+      }
+
+      // Note: mapValues() doesn't return concrete Map and causes
+      // NotSerializableException issue. Use map(identity) to work around this.
+      // see https://issues.scala-lang.org/browse/SI-7005
+      combinedFields.map(f => DataMap(f.mapValues(_.d).map(identity)))
+    }
+  }
+
+}
+
+private[prediction] object EventOp {
+  def apply(e: Event): EventOp = {
+    val t = e.eventTime.getMillis
+    e.event match {
+      case "$set" => {
+        val fields = e.properties.fields.mapValues(jv =>
+          PropTime(jv, t)
+        ).map(identity)
+
+        EventOp(
+          setProp = Some(SetProp(fields = fields, t = t))
+        )
+      }
+      case "$unset" => {
+        val fields = e.properties.fields.mapValues(jv => t).map(identity)
+        EventOp(
+          unsetProp = Some(UnsetProp(fields = fields))
+        )
+      }
+      case "$delete" => {
+        EventOp(
+          deleteEntity = Some(DeleteEntity(t))
+        )
+      }
+      case _ => {
+        EventOp()
+      }
+    }
+  }
+}
+
+@deprecated("Use PEvents or PEventStore instead.", "0.9.2")
+class PBatchView(
+  val appId: Int,
+  val startTime: Option[DateTime],
+  val untilTime: Option[DateTime],
+  val sc: SparkContext) {
+
+  // NOTE: parallel Events DB interface
+  @transient lazy val eventsDb = Storage.getPEvents()
+
+  @transient lazy val _events: RDD[Event] =
+    eventsDb.getByAppIdAndTimeAndEntity(
+      appId = appId,
+      startTime = startTime,
+      untilTime = untilTime,
+      entityType = None,
+      entityId = None)(sc)
+
+  // TODO: change to use EventSeq?
+  @transient lazy val events: RDD[Event] = _events
+
+  def aggregateProperties(
+    entityType: String,
+    startTimeOpt: Option[DateTime] = None,
+    untilTimeOpt: Option[DateTime] = None
+  ): RDD[(String, DataMap)] = {
+
+    _events
+      .filter( e => ((e.entityType == entityType) &&
+        (EventValidation.isSpecialEvents(e.event))) )
+      .map( e => (e.entityId, EventOp(e) ))
+      .aggregateByKey[EventOp](EventOp())(
+        // within same partition
+        seqOp = { case (u, v) => u ++ v },
+        // across partition
+        combOp = { case (accu, u) => accu ++ u }
+      )
+      .mapValues(_.toDataMap)
+      .filter{ case (k, v) => v.isDefined }
+      .map{ case (k, v) => (k, v.get) }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/view/QuickTest.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/view/QuickTest.scala b/data/src/main/scala/org/apache/predictionio/data/view/QuickTest.scala
new file mode 100644
index 0000000..eba3276
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/view/QuickTest.scala
@@ -0,0 +1,94 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.view
+
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.LEvents
+import org.apache.predictionio.data.storage.EventValidation
+import org.apache.predictionio.data.storage.DataMap
+import org.apache.predictionio.data.storage.Storage
+
+import scala.concurrent.ExecutionContext.Implicits.global // TODO
+
+import grizzled.slf4j.Logger
+import org.joda.time.DateTime
+
+import scala.language.implicitConversions
+
+class TestHBLEvents() {
+  @transient lazy val eventsDb = Storage.getLEvents()
+
+  def run(): Unit = {
+    val r = eventsDb.find(
+      appId = 1,
+      startTime = None,
+      untilTime = None,
+      entityType = Some("pio_user"),
+      entityId = Some("3")).toList
+    println(r)
+  }
+}
+
+class TestSource(val appId: Int) {
+  @transient lazy val logger = Logger[this.type]
+  @transient lazy val batchView = new LBatchView(appId,
+    None, None)
+
+  def run(): Unit = {
+    println(batchView.events)
+  }
+}
+
+object QuickTest {
+
+  def main(args: Array[String]) {
+    val t = new TestHBLEvents()
+    t.run()
+
+    // val ts = new TestSource(args(0).toInt)
+    // ts.run()
+  }
+}
+
+object TestEventTime {
+  @transient lazy val batchView = new LBatchView(9, None, None)
+
+  // implicit def back2list(es: EventSeq) = es.events
+
+  def main(args: Array[String]) {
+    val e = batchView.events.filter(
+      eventOpt = Some("rate"),
+      startTimeOpt = Some(new DateTime(1998, 1, 1, 0, 0))
+      // untilTimeOpt = Some(new DateTime(1997, 1, 1, 0, 0))
+    )
+      // untilTimeOpt = Some(new DateTime(2000, 1, 1, 0, 0)))
+
+    e.foreach { println }
+    println()
+    println()
+    println()
+    val u = batchView.aggregateProperties("pio_item")
+    u.foreach { println }
+    println()
+    println()
+    println()
+
+    // val l: Seq[Event] = e
+    val l = e.map { _.entityId }
+    l.foreach { println }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorException.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorException.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorException.scala
new file mode 100644
index 0000000..ee47a9c
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorException.scala
@@ -0,0 +1,31 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.webhooks
+
+/** Webhooks Connnector Exception
+  *
+  * @param message the detail message
+  * @param cause the cause
+  */
+private[prediction] class ConnectorException(message: String, cause: Throwable)
+  extends Exception(message, cause) {
+
+  /** Webhooks Connnector Exception with cause being set to null
+    *
+    * @param message the detail message
+    */
+  def this(message: String) = this(message, null)
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorUtil.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorUtil.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorUtil.scala
new file mode 100644
index 0000000..40feb98
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorUtil.scala
@@ -0,0 +1,46 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.webhooks
+
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.EventJson4sSupport
+
+import org.json4s.Formats
+import org.json4s.DefaultFormats
+import org.json4s.JObject
+import org.json4s.native.Serialization.read
+import org.json4s.native.Serialization.write
+
+
+private[prediction] object ConnectorUtil {
+
+  implicit val eventJson4sFormats: Formats = DefaultFormats +
+    new EventJson4sSupport.APISerializer
+
+  // intentionally use EventJson4sSupport.APISerializer to convert
+  // from JSON to Event object. Don't allow connector directly create
+  // Event object so that the Event object formation is consistent
+  // by enforcing JSON format
+
+  def toEvent(connector: JsonConnector, data: JObject): Event = {
+    read[Event](write(connector.toEventJson(data)))
+  }
+
+  def toEvent(connector: FormConnector, data: Map[String, String]): Event = {
+    read[Event](write(connector.toEventJson(data)))
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/webhooks/FormConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/FormConnector.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/FormConnector.scala
new file mode 100644
index 0000000..dd04a21
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/FormConnector.scala
@@ -0,0 +1,32 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.webhooks
+
+import org.json4s.JObject
+
+/** Connector for Webhooks connection with Form submission data format
+  */
+private[prediction] trait FormConnector {
+
+  // TODO: support conversion to multiple events?
+
+  /** Convert from original Form submission data to Event JObject
+    * @param data Map of key-value pairs in String type received through webhooks
+    * @return Event JObject
+   */
+  def toEventJson(data: Map[String, String]): JObject
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/webhooks/JsonConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/JsonConnector.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/JsonConnector.scala
new file mode 100644
index 0000000..eda8059
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/JsonConnector.scala
@@ -0,0 +1,31 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.webhooks
+
+import org.json4s.JObject
+
+/** Connector for Webhooks connection */
+private[prediction] trait JsonConnector {
+
+  // TODO: support conversion to multiple events?
+
+  /** Convert from original JObject to Event JObject
+    * @param data original JObject recevived through webhooks
+    * @return Event JObject
+   */
+  def toEventJson(data: JObject): JObject
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/webhooks/exampleform/ExampleFormConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/exampleform/ExampleFormConnector.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/exampleform/ExampleFormConnector.scala
new file mode 100644
index 0000000..adf8791
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/exampleform/ExampleFormConnector.scala
@@ -0,0 +1,123 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.webhooks.exampleform
+
+import org.apache.predictionio.data.webhooks.FormConnector
+import org.apache.predictionio.data.webhooks.ConnectorException
+
+import org.json4s.JObject
+
+
+/** Example FormConnector with following types of webhook form data inputs:
+  *
+  * UserAction
+  *
+  *   "type"="userAction"
+  *   "userId"="as34smg4",
+  *   "event"="do_something",
+  *   "context[ip]"="24.5.68.47", // optional
+  *   "context[prop1]"="2.345", // optional
+  *   "context[prop2]"="value1" // optional
+  *   "anotherProperty1"="100",
+  *   "anotherProperty2"="optional1", // optional
+  *   "timestamp"="2015-01-02T00:30:12.984Z"
+  *
+  * UserActionItem
+  *
+  *   "type"="userActionItem"
+  *   "userId"="as34smg4",
+  *   "event"="do_something_on",
+  *   "itemId"="kfjd312bc",
+  *   "context[ip]"="1.23.4.56",
+  *   "context[prop1]"="2.345",
+  *   "context[prop2]"="value1",
+  *   "anotherPropertyA"="4.567", // optional
+  *   "anotherPropertyB"="false", // optional
+  *   "timestamp"="2015-01-15T04:20:23.567Z"
+  *
+  */
+private[prediction] object ExampleFormConnector extends FormConnector {
+
+  override
+  def toEventJson(data: Map[String, String]): JObject = {
+    val json = try {
+      data.get("type") match {
+        case Some("userAction") => userActionToEventJson(data)
+        case Some("userActionItem") => userActionItemToEventJson(data)
+        case Some(x) => throw new ConnectorException(
+          s"Cannot convert unknown type ${x} to event JSON")
+        case None => throw new ConnectorException(
+          s"The field 'type' is required.")
+      }
+    } catch {
+      case e: ConnectorException => throw e
+      case e: Exception => throw new ConnectorException(
+        s"Cannot convert ${data} to event JSON. ${e.getMessage()}", e)
+    }
+    json
+  }
+
+  def userActionToEventJson(data: Map[String, String]): JObject = {
+    import org.json4s.JsonDSL._
+
+    // two level optional data
+    val context = if (data.exists(_._1.startsWith("context["))) {
+      Some(
+        ("ip" -> data.get("context[ip]")) ~
+        ("prop1" -> data.get("context[prop1]").map(_.toDouble)) ~
+        ("prop2" -> data.get("context[prop2]"))
+      )
+    } else {
+      None
+    }
+
+    val json =
+      ("event" -> data("event")) ~
+      ("entityType" -> "user") ~
+      ("entityId" -> data("userId")) ~
+      ("eventTime" -> data("timestamp")) ~
+      ("properties" -> (
+        ("context" -> context) ~
+        ("anotherProperty1" -> data("anotherProperty1").toInt) ~
+        ("anotherProperty2" -> data.get("anotherProperty2"))
+      ))
+    json
+  }
+
+
+  def userActionItemToEventJson(data: Map[String, String]): JObject = {
+    import org.json4s.JsonDSL._
+
+    val json =
+      ("event" -> data("event")) ~
+      ("entityType" -> "user") ~
+      ("entityId" -> data("userId")) ~
+      ("targetEntityType" -> "item") ~
+      ("targetEntityId" -> data("itemId")) ~
+      ("eventTime" -> data("timestamp")) ~
+      ("properties" -> (
+        ("context" -> (
+          ("ip" -> data("context[ip]")) ~
+          ("prop1" -> data("context[prop1]").toDouble) ~
+          ("prop2" -> data("context[prop2]"))
+        )) ~
+        ("anotherPropertyA" -> data.get("anotherPropertyA").map(_.toDouble)) ~
+        ("anotherPropertyB" -> data.get("anotherPropertyB").map(_.toBoolean))
+      ))
+    json
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/webhooks/examplejson/ExampleJsonConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/examplejson/ExampleJsonConnector.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/examplejson/ExampleJsonConnector.scala
new file mode 100644
index 0000000..2129134
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/examplejson/ExampleJsonConnector.scala
@@ -0,0 +1,153 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.webhooks.examplejson
+
+import org.apache.predictionio.data.webhooks.JsonConnector
+import org.apache.predictionio.data.webhooks.ConnectorException
+
+import org.json4s.Formats
+import org.json4s.DefaultFormats
+import org.json4s.JObject
+
+/** Example JsonConnector with following types of webhooks JSON input:
+  *
+  * UserAction
+  *
+  * {
+  *   "type": "userAction"
+  *   "userId": "as34smg4",
+  *   "event": "do_something",
+  *   "context": {
+  *     "ip": "24.5.68.47",
+  *     "prop1": 2.345,
+  *     "prop2": "value1"
+  *   },
+  *   "anotherProperty1": 100,
+  *   "anotherProperty2": "optional1",
+  *   "timestamp": "2015-01-02T00:30:12.984Z"
+  * }
+  *
+  * UserActionItem
+  *
+  * {
+  *   "type": "userActionItem"
+  *   "userId": "as34smg4",
+  *   "event": "do_something_on",
+  *   "itemId": "kfjd312bc",
+  *   "context": {
+  *     "ip": "1.23.4.56",
+  *     "prop1": 2.345,
+  *     "prop2": "value1"
+  *   },
+  *   "anotherPropertyA": 4.567,
+  *   "anotherPropertyB": false,
+  *   "timestamp": "2015-01-15T04:20:23.567Z"
+  * }
+  */
+private[prediction] object ExampleJsonConnector extends JsonConnector {
+
+  implicit val json4sFormats: Formats = DefaultFormats
+
+  override def toEventJson(data: JObject): JObject = {
+    val common = try {
+      data.extract[Common]
+    } catch {
+      case e: Exception => throw new ConnectorException(
+        s"Cannot extract Common field from ${data}. ${e.getMessage()}", e)
+    }
+
+    val json = try {
+      common.`type` match {
+        case "userAction" =>
+          toEventJson(common = common, userAction = data.extract[UserAction])
+        case "userActionItem" =>
+          toEventJson(common = common, userActionItem = data.extract[UserActionItem])
+        case x: String =>
+          throw new ConnectorException(
+            s"Cannot convert unknown type '${x}' to Event JSON.")
+      }
+    } catch {
+      case e: ConnectorException => throw e
+      case e: Exception => throw new ConnectorException(
+        s"Cannot convert ${data} to eventJson. ${e.getMessage()}", e)
+    }
+
+    json
+  }
+
+  def toEventJson(common: Common, userAction: UserAction): JObject = {
+    import org.json4s.JsonDSL._
+
+    // map to EventAPI JSON
+    val json =
+      ("event" -> userAction.event) ~
+        ("entityType" -> "user") ~
+        ("entityId" -> userAction.userId) ~
+        ("eventTime" -> userAction.timestamp) ~
+        ("properties" -> (
+          ("context" -> userAction.context) ~
+            ("anotherProperty1" -> userAction.anotherProperty1) ~
+            ("anotherProperty2" -> userAction.anotherProperty2)
+          ))
+    json
+  }
+
+  def toEventJson(common: Common, userActionItem: UserActionItem): JObject = {
+    import org.json4s.JsonDSL._
+
+    // map to EventAPI JSON
+    val json =
+      ("event" -> userActionItem.event) ~
+        ("entityType" -> "user") ~
+        ("entityId" -> userActionItem.userId) ~
+        ("targetEntityType" -> "item") ~
+        ("targetEntityId" -> userActionItem.itemId) ~
+        ("eventTime" -> userActionItem.timestamp) ~
+        ("properties" -> (
+          ("context" -> userActionItem.context) ~
+            ("anotherPropertyA" -> userActionItem.anotherPropertyA) ~
+            ("anotherPropertyB" -> userActionItem.anotherPropertyB)
+          ))
+    json
+  }
+
+  // Common required fields
+  case class Common(
+    `type`: String
+  )
+
+  // User Actions fields
+  case class UserAction (
+    userId: String,
+    event: String,
+    context: Option[JObject],
+    anotherProperty1: Int,
+    anotherProperty2: Option[String],
+    timestamp: String
+  )
+
+  // UserActionItem fields
+  case class UserActionItem (
+    userId: String,
+    event: String,
+    itemId: String,
+    context: JObject,
+    anotherPropertyA: Option[Double],
+    anotherPropertyB: Option[Boolean],
+    timestamp: String
+  )
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnector.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnector.scala
new file mode 100644
index 0000000..abf8a7f
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnector.scala
@@ -0,0 +1,305 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+
+package org.apache.predictionio.data.webhooks.mailchimp
+
+import org.apache.predictionio.data.webhooks.FormConnector
+import org.apache.predictionio.data.webhooks.ConnectorException
+import org.apache.predictionio.data.storage.EventValidation
+import org.apache.predictionio.data.Utils
+
+import org.json4s.JObject
+
+import org.joda.time.DateTime
+import org.joda.time.format.DateTimeFormat
+
+private[prediction] object MailChimpConnector extends FormConnector {
+
+  override
+  def toEventJson(data: Map[String, String]): JObject = {
+
+    val json = data.get("type") match {
+      case Some("subscribe") => subscribeToEventJson(data)
+      // UNSUBSCRIBE
+      case Some("unsubscribe") => unsubscribeToEventJson(data)
+      // PROFILE UPDATES
+      case Some("profile") => profileToEventJson(data)
+      // EMAIL UPDATE
+      case Some("upemail") => upemailToEventJson(data)
+      // CLEANED EMAILS
+      case Some("cleaned") => cleanedToEventJson(data)
+      // CAMPAIGN SENDING STATUS
+      case Some("campaign") => campaignToEventJson(data)
+      // invalid type
+      case Some(x) => throw new ConnectorException(
+        s"Cannot convert unknown MailChimp data type ${x} to event JSON")
+      case None => throw new ConnectorException(
+        s"The field 'type' is required for MailChimp data.")
+    }
+    json
+  }
+
+
+  val mailChimpDateTimeFormat = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss")
+    .withZone(EventValidation.defaultTimeZone)
+
+  def parseMailChimpDateTime(s: String): DateTime = {
+    mailChimpDateTimeFormat.parseDateTime(s)
+  }
+
+  def subscribeToEventJson(data: Map[String, String]): JObject = {
+
+    import org.json4s.JsonDSL._
+
+    /*
+    "type": "subscribe",
+    "fired_at": "2009-03-26 21:35:57",
+    "data[id]": "8a25ff1d98",
+    "data[list_id]": "a6b5da1054",
+    "data[email]": "api@mailchimp.com",
+    "data[email_type]": "html",
+    "data[merges][EMAIL]": "api@mailchimp.com",
+    "data[merges][FNAME]": "MailChimp",
+    "data[merges][LNAME]": "API",
+    "data[merges][INTERESTS]": "Group1,Group2",
+    "data[ip_opt]": "10.20.10.30",
+    "data[ip_signup]": "10.20.10.30"
+    */
+
+    // convert to ISO8601 format
+    val eventTime = Utils.dateTimeToString(parseMailChimpDateTime(data("fired_at")))
+
+    // TODO: handle optional fields
+    val json =
+      ("event" -> "subscribe") ~
+      ("entityType" -> "user") ~
+      ("entityId" -> data("data[id]")) ~
+      ("targetEntityType" -> "list") ~
+      ("targetEntityId" -> data("data[list_id]")) ~
+      ("eventTime" -> eventTime) ~
+      ("properties" -> (
+        ("email" -> data("data[email]")) ~
+        ("email_type" -> data("data[email_type]")) ~
+        ("merges" -> (
+          ("EMAIL" -> data("data[merges][EMAIL]")) ~
+          ("FNAME" -> data("data[merges][FNAME]"))) ~
+          ("LNAME" -> data("data[merges][LNAME]")) ~
+          ("INTERESTS" -> data.get("data[merges][INTERESTS]"))
+        )) ~
+        ("ip_opt" -> data("data[ip_opt]")) ~
+        ("ip_signup" -> data("data[ip_signup]")
+      ))
+
+    json
+
+  }
+
+  def unsubscribeToEventJson(data: Map[String, String]): JObject = {
+
+    import org.json4s.JsonDSL._
+
+    /*
+    "action" will either be "unsub" or "delete".
+    The reason will be "manual" unless caused by a spam complaint - then it will be "abuse"
+
+    "type": "unsubscribe",
+    "fired_at": "2009-03-26 21:40:57",
+    "data[action]": "unsub",
+    "data[reason]": "manual",
+    "data[id]": "8a25ff1d98",
+    "data[list_id]": "a6b5da1054",
+    "data[email]": "api+unsub@mailchimp.com",
+    "data[email_type]": "html",
+    "data[merges][EMAIL]": "api+unsub@mailchimp.com",
+    "data[merges][FNAME]": "MailChimp",
+    "data[merges][LNAME]": "API",
+    "data[merges][INTERESTS]": "Group1,Group2",
+    "data[ip_opt]": "10.20.10.30",
+    "data[campaign_id]": "cb398d21d2",
+    */
+
+    // convert to ISO8601 format
+    val eventTime = Utils.dateTimeToString(parseMailChimpDateTime(data("fired_at")))
+
+    val json =
+      ("event" -> "unsubscribe") ~
+      ("entityType" -> "user") ~
+      ("entityId" -> data("data[id]")) ~
+      ("targetEntityType" -> "list") ~
+      ("targetEntityId" -> data("data[list_id]")) ~
+      ("eventTime" -> eventTime) ~
+      ("properties" -> (
+        ("action" -> data("data[action]")) ~
+        ("reason" -> data("data[reason]")) ~
+        ("email" -> data("data[email]")) ~
+        ("email_type" -> data("data[email_type]")) ~
+        ("merges" -> (
+          ("EMAIL" -> data("data[merges][EMAIL]")) ~
+          ("FNAME" -> data("data[merges][FNAME]"))) ~
+          ("LNAME" -> data("data[merges][LNAME]")) ~
+          ("INTERESTS" -> data.get("data[merges][INTERESTS]"))
+        )) ~
+        ("ip_opt" -> data("data[ip_opt]")) ~
+        ("campaign_id" -> data("data[campaign_id]")
+      ))
+
+    json
+
+  }
+
+  def profileToEventJson(data: Map[String, String]): JObject = {
+
+    import org.json4s.JsonDSL._
+
+    /*
+    "type": "profile",
+    "fired_at": "2009-03-26 21:31:21",
+    "data[id]": "8a25ff1d98",
+    "data[list_id]": "a6b5da1054",
+    "data[email]": "api@mailchimp.com",
+    "data[email_type]": "html",
+    "data[merges][EMAIL]": "api@mailchimp.com",
+    "data[merges][FNAME]": "MailChimp",
+    "data[merges][LNAME]": "API",
+    "data[merges][INTERESTS]": "Group1,Group2", \\OPTIONAL
+    "data[ip_opt]": "10.20.10.30"
+    */
+
+    // convert to ISO8601 format
+    val eventTime = Utils.dateTimeToString(parseMailChimpDateTime(data("fired_at")))
+
+    val json =
+      ("event" -> "profile") ~
+      ("entityType" -> "user") ~
+      ("entityId" -> data("data[id]")) ~
+      ("targetEntityType" -> "list") ~
+      ("targetEntityId" -> data("data[list_id]")) ~
+      ("eventTime" -> eventTime) ~
+      ("properties" -> (
+        ("email" -> data("data[email]")) ~
+        ("email_type" -> data("data[email_type]")) ~
+        ("merges" -> (
+          ("EMAIL" -> data("data[merges][EMAIL]")) ~
+          ("FNAME" -> data("data[merges][FNAME]"))) ~
+          ("LNAME" -> data("data[merges][LNAME]")) ~
+          ("INTERESTS" -> data.get("data[merges][INTERESTS]"))
+        )) ~
+        ("ip_opt" -> data("data[ip_opt]")
+      ))
+
+    json
+
+  }
+
+  def upemailToEventJson(data: Map[String, String]): JObject = {
+
+    import org.json4s.JsonDSL._
+
+    /*
+    "type": "upemail",
+    "fired_at": "2009-03-26 22:15:09",
+    "data[list_id]": "a6b5da1054",
+    "data[new_id]": "51da8c3259",
+    "data[new_email]": "api+new@mailchimp.com",
+    "data[old_email]": "api+old@mailchimp.com"
+    */
+
+    // convert to ISO8601 format
+    val eventTime = Utils.dateTimeToString(parseMailChimpDateTime(data("fired_at")))
+
+    val json =
+      ("event" -> "upemail") ~
+      ("entityType" -> "user") ~
+      ("entityId" -> data("data[new_id]")) ~
+      ("targetEntityType" -> "list") ~
+      ("targetEntityId" -> data("data[list_id]")) ~
+      ("eventTime" -> eventTime) ~
+      ("properties" -> (
+        ("new_email" -> data("data[new_email]")) ~
+        ("old_email" -> data("data[old_email]"))
+      ))
+
+    json
+
+  }
+
+  def cleanedToEventJson(data: Map[String, String]): JObject = {
+
+    import org.json4s.JsonDSL._
+
+    /*
+    Reason will be one of "hard" (for hard bounces) or "abuse"
+    "type": "cleaned",
+    "fired_at": "2009-03-26 22:01:00",
+    "data[list_id]": "a6b5da1054",
+    "data[campaign_id]": "4fjk2ma9xd",
+    "data[reason]": "hard",
+    "data[email]": "api+cleaned@mailchimp.com"
+    */
+
+    // convert to ISO8601 format
+    val eventTime = Utils.dateTimeToString(parseMailChimpDateTime(data("fired_at")))
+
+    val json =
+      ("event" -> "cleaned") ~
+      ("entityType" -> "list") ~
+      ("entityId" -> data("data[list_id]")) ~
+      ("eventTime" -> eventTime) ~
+      ("properties" -> (
+        ("campaignId" -> data("data[campaign_id]")) ~
+        ("reason" -> data("data[reason]")) ~
+        ("email" -> data("data[email]"))
+      ))
+
+    json
+
+  }
+
+  def campaignToEventJson(data: Map[String, String]): JObject = {
+
+    import org.json4s.JsonDSL._
+
+    /*
+    "type": "campaign",
+    "fired_at": "2009-03-26 21:31:21",
+    "data[id]": "5aa2102003",
+    "data[subject]": "Test Campaign Subject",
+    "data[status]": "sent",
+    "data[reason]": "",
+    "data[list_id]": "a6b5da1054"
+    */
+
+    // convert to ISO8601 format
+    val eventTime = Utils.dateTimeToString(parseMailChimpDateTime(data("fired_at")))
+
+    val json =
+      ("event" -> "campaign") ~
+      ("entityType" -> "campaign") ~
+      ("entityId" -> data("data[id]")) ~
+      ("targetEntityType" -> "list") ~
+      ("targetEntityId" -> data("data[list_id]")) ~
+      ("eventTime" -> eventTime) ~
+      ("properties" -> (
+        ("subject" -> data("data[subject]")) ~
+        ("status" -> data("data[status]")) ~
+        ("reason" -> data("data[reason]"))
+      ))
+
+    json
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/webhooks/segmentio/SegmentIOConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/segmentio/SegmentIOConnector.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/segmentio/SegmentIOConnector.scala
new file mode 100644
index 0000000..b7548b0
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/segmentio/SegmentIOConnector.scala
@@ -0,0 +1,306 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.webhooks.segmentio
+
+import org.apache.predictionio.data.webhooks.{ConnectorException, JsonConnector}
+import org.json4s._
+
+private[prediction] object SegmentIOConnector extends JsonConnector {
+
+  // private lazy val supportedAPI = Vector("2", "2.0", "2.0.0")
+
+  implicit val json4sFormats: Formats = DefaultFormats
+
+  override
+  def toEventJson(data: JObject): JObject = {
+    try {
+      val version: String = data.values("version").toString
+/*
+      if (!supportedAPI.contains(version)) {
+        throw new ConnectorException(
+          s"Supported segment.io API versions: [2]. got [$version]"
+        )
+      }
+*/
+    } catch { case _: Throwable \u21d2
+      throw new ConnectorException(s"Failed to get segment.io API version.")
+    }
+
+    val common = try {
+      data.extract[Common]
+    } catch {
+      case e: Throwable \u21d2 throw new ConnectorException(
+        s"Cannot extract Common field from $data. ${e.getMessage}", e
+      )
+    }
+
+    try {
+      common.`type` match {
+        case "identify" \u21d2
+          toEventJson(
+            common = common,
+            identify = data.extract[Events.Identify]
+          )
+
+        case "track" \u21d2
+          toEventJson(
+            common = common,
+            track = data.extract[Events.Track]
+          )
+
+        case "alias" \u21d2
+          toEventJson(
+            common = common,
+            alias = data.extract[Events.Alias]
+          )
+
+        case "page" \u21d2
+          toEventJson(
+            common = common,
+            page = data.extract[Events.Page]
+          )
+
+        case "screen" \u21d2
+          toEventJson(
+            common = common,
+            screen = data.extract[Events.Screen]
+          )
+
+        case "group" \u21d2
+          toEventJson(
+            common = common,
+            group = data.extract[Events.Group]
+          )
+
+        case _ \u21d2
+          throw new ConnectorException(
+            s"Cannot convert unknown type ${common.`type`} to event JSON."
+          )
+      }
+    } catch {
+      case e: ConnectorException => throw e
+      case e: Exception =>
+        throw new ConnectorException(
+          s"Cannot convert $data to event JSON. ${e.getMessage}", e
+        )
+    }
+  }
+
+  def toEventJson(common: Common, identify: Events.Identify ): JObject = {
+    import org.json4s.JsonDSL._
+    val eventProperties = "traits" \u2192 identify.traits
+    toJson(common, eventProperties)
+  }
+
+  def toEventJson(common: Common, track: Events.Track): JObject = {
+    import org.json4s.JsonDSL._
+    val eventProperties =
+      ("properties" \u2192 track.properties) ~
+      ("event" \u2192 track.event)
+    toJson(common, eventProperties)
+  }
+
+  def toEventJson(common: Common, alias: Events.Alias): JObject = {
+    import org.json4s.JsonDSL._
+    toJson(common, "previous_id" \u2192 alias.previous_id)
+  }
+
+  def toEventJson(common: Common, screen: Events.Screen): JObject = {
+    import org.json4s.JsonDSL._
+    val eventProperties =
+      ("name" \u2192 screen.name) ~
+      ("properties" \u2192 screen.properties)
+    toJson(common, eventProperties)
+  }
+
+  def toEventJson(common: Common, page: Events.Page): JObject = {
+    import org.json4s.JsonDSL._
+    val eventProperties =
+      ("name" \u2192 page.name) ~
+      ("properties" \u2192 page.properties)
+    toJson(common, eventProperties)
+  }
+
+  def toEventJson(common: Common, group: Events.Group): JObject = {
+    import org.json4s.JsonDSL._
+    val eventProperties =
+      ("group_id" \u2192 group.group_id) ~
+      ("traits" \u2192 group.traits)
+    toJson(common, eventProperties)
+  }
+
+  private def toJson(common: Common, props: JObject): JsonAST.JObject = {
+    val commonFields = commonToJson(common)
+    JObject(("properties" \u2192 properties(common, props)) :: commonFields.obj)
+  }
+
+  private def properties(common: Common, eventProps: JObject): JObject = {
+    import org.json4s.JsonDSL._
+    common.context map { context \u21d2
+      try {
+        ("context" \u2192 Extraction.decompose(context)) ~ eventProps
+      } catch {
+        case e: Throwable \u21d2
+          throw new ConnectorException(
+            s"Cannot convert $context to event JSON. ${e.getMessage }", e
+          )
+      }
+    } getOrElse eventProps
+  }
+
+  private def commonToJson(common: Common): JObject =
+    commonToJson(common, common.`type`)
+
+  private def commonToJson(common: Common, typ: String): JObject = {
+    import org.json4s.JsonDSL._
+      common.user_id.orElse(common.anonymous_id) match {
+        case Some(userId) \u21d2
+          ("event" \u2192 typ) ~
+            ("entityType" \u2192 "user") ~
+            ("entityId" \u2192 userId) ~
+            ("eventTime" \u2192 common.timestamp)
+
+        case None \u21d2
+          throw new ConnectorException(
+            "there was no `userId` or `anonymousId` in the common fields."
+          )
+      }
+  }
+}
+
+object Events {
+
+  private[prediction] case class Track(
+    event: String,
+    properties: Option[JObject] = None
+  )
+
+  private[prediction] case class Alias(previous_id: String, user_id: String)
+
+  private[prediction] case class Group(
+    group_id: String,
+    traits: Option[JObject] = None
+  )
+
+  private[prediction] case class Screen(
+    name: Option[String] = None,
+    properties: Option[JObject] = None
+  )
+
+  private[prediction] case class Page(
+    name: Option[String] = None,
+    properties: Option[JObject] = None
+  )
+
+  private[prediction] case class Identify(
+    user_id: String,
+    traits: Option[JObject]
+  )
+
+}
+
+object Common {
+
+  private[prediction] case class Integrations(
+    All: Boolean = false,
+    Mixpanel: Boolean = false,
+    Marketo: Boolean = false,
+    Salesforse: Boolean = false
+  )
+
+  private[prediction] case class Context(
+    ip: String,
+    library: Library,
+    user_agent: String,
+    app: Option[App] = None,
+    campaign: Option[Campaign] = None,
+    device: Option[Device] = None,
+    network: Option[Network] = None,
+    location: Option[Location] = None,
+    os: Option[OS] = None,
+    referrer: Option[Referrer] = None,
+    screen: Option[Screen] = None,
+    timezone: Option[String] = None
+  )
+
+  private[prediction] case class Screen(width: Int, height: Int, density: Int)
+
+  private[prediction] case class Referrer(id: String, `type`: String)
+
+  private[prediction] case class OS(name: String, version: String)
+
+  private[prediction] case class Location(
+    city: Option[String] = None,
+    country: Option[String] = None,
+    latitude: Option[Double] = None,
+    longitude: Option[Double] = None,
+    speed: Option[Int] = None
+  )
+
+  case class Page(
+    path: String,
+    referrer: String,
+    search: String,
+    title: String,
+    url: String
+  )
+
+  private[prediction] case class Network(
+    bluetooth: Option[Boolean] = None,
+    carrier: Option[String] = None,
+    cellular: Option[Boolean] = None,
+    wifi: Option[Boolean] = None
+  )
+
+  private[prediction] case class Library(name: String, version: String)
+
+  private[prediction] case class Device(
+    id: Option[String] = None,
+    advertising_id: Option[String] = None,
+    ad_tracking_enabled: Option[Boolean] = None,
+    manufacturer: Option[String] = None,
+    model: Option[String] = None,
+    name: Option[String] = None,
+    `type`: Option[String] = None,
+    token: Option[String] = None
+  )
+
+  private[prediction] case class Campaign(
+    name: Option[String] = None,
+    source: Option[String] = None,
+    medium: Option[String] = None,
+    term: Option[String] = None,
+    content: Option[String] = None
+  )
+
+  private[prediction] case class App(
+    name: Option[String] = None,
+    version: Option[String] = None,
+    build: Option[String] = None
+  )
+
+}
+
+private[prediction] case class Common(
+  `type`: String,
+  sent_at: String,
+  timestamp: String,
+  version: String,
+  anonymous_id: Option[String] = None,
+  user_id: Option[String] = None,
+  context: Option[Common.Context] = None,
+  integrations: Option[Common.Integrations] = None
+)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/api/EventServiceSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/api/EventServiceSpec.scala b/data/src/test/scala/io/prediction/data/api/EventServiceSpec.scala
deleted file mode 100644
index 9f7a74e..0000000
--- a/data/src/test/scala/io/prediction/data/api/EventServiceSpec.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.api
-
-import io.prediction.data.storage.Storage
-
-import akka.testkit.TestProbe
-import akka.actor.ActorSystem
-import akka.actor.Props
-
-import spray.http.HttpEntity
-import spray.http.HttpResponse
-import spray.http.ContentTypes
-import spray.httpx.RequestBuilding.Get
-
-import org.specs2.mutable.Specification
-
-class EventServiceSpec extends Specification {
-
-  val system = ActorSystem("EventServiceSpecSystem")
-
-  val eventClient = Storage.getLEvents()
-  val accessKeysClient = Storage.getMetaDataAccessKeys()
-  val channelsClient = Storage.getMetaDataChannels()
-  
-  val eventServiceActor = system.actorOf(
-    Props(
-      new EventServiceActor(
-        eventClient,
-        accessKeysClient,
-        channelsClient,
-        EventServerConfig()
-      )
-    )
-  )
-
-  "GET / request" should {
-    "properly produce OK HttpResponses" in {
-      val probe = TestProbe()(system)
-      probe.send(eventServiceActor, Get("/"))
-      probe.expectMsg(
-        HttpResponse(
-          200,
-          HttpEntity(
-            contentType = ContentTypes.`application/json`,
-            string = """{"status":"alive"}"""
-          )
-        )
-      )
-      success
-    }
-  }
-
-  step(system.shutdown())
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/api/SegmentIOAuthSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/api/SegmentIOAuthSpec.scala b/data/src/test/scala/io/prediction/data/api/SegmentIOAuthSpec.scala
deleted file mode 100644
index bae0f0b..0000000
--- a/data/src/test/scala/io/prediction/data/api/SegmentIOAuthSpec.scala
+++ /dev/null
@@ -1,175 +0,0 @@
-package io.prediction.data.api
-
-import akka.actor.{ActorSystem, Props}
-import akka.testkit.TestProbe
-import io.prediction.data.storage._
-import org.joda.time.DateTime
-import org.specs2.mutable.Specification
-import spray.http.HttpHeaders.RawHeader
-import spray.http.{ContentTypes, HttpEntity, HttpResponse}
-import spray.httpx.RequestBuilding._
-import sun.misc.BASE64Encoder
-
-import scala.concurrent.{Future, ExecutionContext}
-
-class SegmentIOAuthSpec extends Specification {
-
-  val system = ActorSystem("EventServiceSpecSystem")
-  sequential
-  isolated
-  val eventClient = new LEvents {
-    override def init(appId: Int, channelId: Option[Int]): Boolean = true
-
-    override def futureInsert(event: Event, appId: Int, channelId: Option[Int])
-        (implicit ec: ExecutionContext): Future[String] =
-      Future successful "event_id"
-
-    override def futureFind(
-      appId: Int, channelId: Option[Int], startTime: Option[DateTime],
-      untilTime: Option[DateTime], entityType: Option[String],
-      entityId: Option[String], eventNames: Option[Seq[String]],
-      targetEntityType: Option[Option[String]],
-      targetEntityId: Option[Option[String]], limit: Option[Int],
-      reversed: Option[Boolean])
-        (implicit ec: ExecutionContext): Future[Iterator[Event]] =
-      Future successful List.empty[Event].iterator
-
-    override def futureGet(eventId: String, appId: Int, channelId: Option[Int])
-        (implicit ec: ExecutionContext): Future[Option[Event]] =
-      Future successful None
-
-    override def remove(appId: Int, channelId: Option[Int]): Boolean = true
-
-    override def futureDelete(eventId: String, appId: Int, channelId: Option[Int])
-        (implicit ec: ExecutionContext): Future[Boolean] =
-      Future successful true
-
-    override def close(): Unit = {}
-  }
-  val appId = 0
-  val accessKeysClient = new AccessKeys {
-    override def insert(k: AccessKey): Option[String] = null
-    override def getByAppid(appid: Int): Seq[AccessKey] = null
-    override def update(k: AccessKey): Unit = {}
-    override def delete(k: String): Unit = {}
-    override def getAll(): Seq[AccessKey] = null
-
-    override def get(k: String): Option[AccessKey] =
-      k match {
-        case "abc" \u21d2 Some(AccessKey(k, appId, Seq.empty))
-        case _ \u21d2 None
-      }
-  }
-
-  val channelsClient = Storage.getMetaDataChannels()
-  val eventServiceActor = system.actorOf(
-    Props(
-      new EventServiceActor(
-        eventClient,
-        accessKeysClient,
-        channelsClient,
-        EventServerConfig()
-      )
-    )
-  )
-
-  val base64Encoder = new BASE64Encoder
-
-  "Event Service" should {
-
-    "reject with CredentialsRejected with invalid credentials" in {
-      val accessKey = "abc123:"
-      val probe = TestProbe()(system)
-      probe.send(
-        eventServiceActor,
-        Post("/webhooks/segmentio.json")
-          .withHeaders(
-            List(
-              RawHeader("Authorization", s"Basic $accessKey")
-            )
-          )
-      )
-      probe.expectMsg(
-        HttpResponse(
-          401,
-          HttpEntity(
-            contentType = ContentTypes.`application/json`,
-            string = """{"message":"Invalid accessKey."}"""
-          )
-        )
-      )
-      success
-    }
-
-    "reject with CredentialsMissed without credentials" in {
-      val probe = TestProbe()(system)
-      probe.send(
-        eventServiceActor,
-        Post("/webhooks/segmentio.json")
-      )
-      probe.expectMsg(
-        HttpResponse(
-          401,
-          HttpEntity(
-            contentType = ContentTypes.`application/json`,
-            string = """{"message":"Missing accessKey."}"""
-          )
-        )
-      )
-      success
-    }
-
-    "process SegmentIO identity request properly" in {
-      val jsonReq =
-        """
-          |{
-          |  "anonymous_id": "507f191e810c19729de860ea",
-          |  "channel": "browser",
-          |  "context": {
-          |    "ip": "8.8.8.8",
-          |    "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5)"
-          |  },
-          |  "message_id": "022bb90c-bbac-11e4-8dfc-aa07a5b093db",
-          |  "timestamp": "2015-02-23T22:28:55.387Z",
-          |  "sent_at": "2015-02-23T22:28:55.111Z",
-          |  "traits": {
-          |    "name": "Peter Gibbons",
-          |    "email": "peter@initech.com",
-          |    "plan": "premium",
-          |    "logins": 5
-          |  },
-          |  "type": "identify",
-          |  "user_id": "97980cfea0067",
-          |  "version": "2"
-          |}
-        """.stripMargin
-
-      val accessKey = "abc:"
-      val accessKeyEncoded = base64Encoder.encodeBuffer(accessKey.getBytes)
-      val probe = TestProbe()(system)
-      probe.send(
-        eventServiceActor,
-        Post(
-          "/webhooks/segmentio.json",
-          HttpEntity(ContentTypes.`application/json`, jsonReq.getBytes)
-        ).withHeaders(
-            List(
-              RawHeader("Authorization", s"Basic $accessKeyEncoded")
-            )
-          )
-      )
-      probe.expectMsg(
-        HttpResponse(
-          201,
-          HttpEntity(
-            contentType = ContentTypes.`application/json`,
-            string = """{"eventId":"event_id"}"""
-          )
-        )
-      )
-      success
-    }
-  }
-
-  step(system.shutdown())
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/storage/BiMapSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/storage/BiMapSpec.scala b/data/src/test/scala/io/prediction/data/storage/BiMapSpec.scala
deleted file mode 100644
index e6d28b3..0000000
--- a/data/src/test/scala/io/prediction/data/storage/BiMapSpec.scala
+++ /dev/null
@@ -1,196 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import org.specs2.mutable._
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.SparkConf
-import org.apache.spark.rdd.RDD
-
-class BiMapSpec extends Specification {
-
-  System.clearProperty("spark.driver.port")
-  System.clearProperty("spark.hostPort")
-  val sc = new SparkContext("local[4]", "BiMapSpec test")
-
-  "BiMap created with map" should {
-
-    val keys = Seq(1, 4, 6)
-    val orgValues = Seq(2, 5, 7)
-    val org = keys.zip(orgValues).toMap
-    val bi = BiMap(org)
-
-    "return correct values for each key of original map" in {
-      val biValues = keys.map(k => bi(k))
-
-      biValues must beEqualTo(orgValues)
-    }
-
-    "get return Option[V]" in {
-      val checkKeys = keys ++ Seq(12345)
-      val biValues = checkKeys.map(k => bi.get(k))
-      val expected = orgValues.map(Some(_)) ++ Seq(None)
-
-      biValues must beEqualTo(expected)
-    }
-
-    "getOrElse return value for each key of original map" in {
-      val biValues = keys.map(k => bi.getOrElse(k, -1))
-
-      biValues must beEqualTo(orgValues)
-    }
-
-    "getOrElse return default values for invalid key" in {
-      val keys = Seq(999, -1, -2)
-      val defaults = Seq(1234, 5678, 987)
-      val biValues = keys.zip(defaults).map{ case (k,d) => bi.getOrElse(k, d) }
-
-      biValues must beEqualTo(defaults)
-    }
-
-    "contains() returns true/false correctly" in {
-      val checkKeys = keys ++ Seq(12345)
-      val biValues = checkKeys.map(k => bi.contains(k))
-      val expected = orgValues.map(_ => true) ++ Seq(false)
-
-      biValues must beEqualTo(expected)
-    }
-
-    "same size as original map" in {
-      (bi.size) must beEqualTo(org.size)
-    }
-
-    "take(2) returns BiMap of size 2" in {
-      bi.take(2).size must beEqualTo(2)
-    }
-
-    "toMap contain same element as original map" in {
-      (bi.toMap) must beEqualTo(org)
-    }
-
-    "toSeq contain same element as original map" in {
-      (bi.toSeq) must containTheSameElementsAs(org.toSeq)
-    }
-
-    "inverse and return correct keys for each values of original map" in {
-      val biKeys = orgValues.map(v => bi.inverse(v))
-      biKeys must beEqualTo(keys)
-    }
-
-    "inverse with same size" in {
-      bi.inverse.size must beEqualTo(org.size)
-    }
-
-    "inverse's inverse reference back to the same original object" in {
-      // NOTE: reference equality
-      bi.inverse.inverse == bi
-    }
-  }
-
-  "BiMap created with duplicated values in map" should {
-    val dup = Map(1 -> 2, 4 -> 7, 6 -> 7)
-    "return IllegalArgumentException" in {
-      BiMap(dup) must throwA[IllegalArgumentException]
-    }
-  }
-
-  "BiMap.stringLong and stringInt" should {
-
-    "create BiMap from set of string" in {
-      val keys = Set("a", "b", "foo", "bar")
-      val values: Seq[Long] = Seq(0, 1, 2, 3)
-
-      val bi = BiMap.stringLong(keys)
-      val biValues = keys.map(k => bi(k))
-
-      val biInt = BiMap.stringInt(keys)
-      val valuesInt: Seq[Int] = values.map(_.toInt)
-      val biIntValues = keys.map(k => biInt(k))
-
-      biValues must containTheSameElementsAs(values) and
-        (biIntValues must containTheSameElementsAs(valuesInt))
-    }
-
-    "create BiMap from Array of unique string" in {
-      val keys = Array("a", "b", "foo", "bar")
-      val values: Seq[Long] = Seq(0, 1, 2, 3)
-
-      val bi = BiMap.stringLong(keys)
-      val biValues = keys.toSeq.map(k => bi(k))
-
-      val biInt = BiMap.stringInt(keys)
-      val valuesInt: Seq[Int] = values.map(_.toInt)
-      val biIntValues = keys.toSeq.map(k => biInt(k))
-
-      biValues must containTheSameElementsAs(values) and
-        (biIntValues must containTheSameElementsAs(valuesInt))
-    }
-
-    "not guarantee sequential index for Array with duplicated string" in {
-      val keys = Array("a", "b", "foo", "bar", "a", "b", "x")
-      val dupValues: Seq[Long] = Seq(0, 1, 2, 3, 4, 5, 6)
-      val values = keys.zip(dupValues).toMap.values.toSeq
-
-      val bi = BiMap.stringLong(keys)
-      val biValues = keys.toSet[String].map(k => bi(k))
-
-      val biInt = BiMap.stringInt(keys)
-      val valuesInt: Seq[Int] = values.map(_.toInt)
-      val biIntValues = keys.toSet[String].map(k => biInt(k))
-
-      biValues must containTheSameElementsAs(values) and
-        (biIntValues must containTheSameElementsAs(valuesInt))
-    }
-
-    "create BiMap from RDD[String]" in {
-
-      val keys = Seq("a", "b", "foo", "bar")
-      val values: Seq[Long] = Seq(0, 1, 2, 3)
-      val rdd = sc.parallelize(keys)
-
-      val bi = BiMap.stringLong(rdd)
-      val biValues = keys.map(k => bi(k))
-
-      val biInt = BiMap.stringInt(rdd)
-      val valuesInt: Seq[Int] = values.map(_.toInt)
-      val biIntValues = keys.map(k => biInt(k))
-
-      biValues must containTheSameElementsAs(values) and
-        (biIntValues must containTheSameElementsAs(valuesInt))
-    }
-
-    "create BiMap from RDD[String] with duplicated string" in {
-
-      val keys = Seq("a", "b", "foo", "bar", "a", "b", "x")
-      val values: Seq[Long] = Seq(0, 1, 2, 3, 4)
-      val rdd = sc.parallelize(keys)
-
-      val bi = BiMap.stringLong(rdd)
-      val biValues = keys.distinct.map(k => bi(k))
-
-      val biInt = BiMap.stringInt(rdd)
-      val valuesInt: Seq[Int] = values.map(_.toInt)
-      val biIntValues = keys.distinct.map(k => biInt(k))
-
-      biValues must containTheSameElementsAs(values) and
-        (biIntValues must containTheSameElementsAs(valuesInt))
-    }
-  }
-
-  step(sc.stop())
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/storage/DataMapSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/storage/DataMapSpec.scala b/data/src/test/scala/io/prediction/data/storage/DataMapSpec.scala
deleted file mode 100644
index 97e9b09..0000000
--- a/data/src/test/scala/io/prediction/data/storage/DataMapSpec.scala
+++ /dev/null
@@ -1,243 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import org.specs2.mutable._
-
-class DataMapSpec extends Specification {
-
-  "DataMap" should {
-
-    val properties = DataMap("""
-      {
-        "prop1" : 1,
-        "prop2" : "value2",
-        "prop3" : [1, 2, 3],
-        "prop4" : true,
-        "prop5" : ["a", "b", "c", "c"],
-        "prop6" : 4.56
-      }
-      """)
-
-    "get Int data" in {
-      properties.get[Int]("prop1") must beEqualTo(1)
-      properties.getOpt[Int]("prop1") must beEqualTo(Some(1))
-    }
-
-    "get String data" in {
-      properties.get[String]("prop2") must beEqualTo("value2")
-      properties.getOpt[String]("prop2") must beEqualTo(Some("value2"))
-    }
-
-    "get List of Int data" in {
-      properties.get[List[Int]]("prop3") must beEqualTo(List(1,2,3))
-      properties.getOpt[List[Int]]("prop3") must beEqualTo(Some(List(1,2,3)))
-    }
-
-    "get Boolean data" in {
-      properties.get[Boolean]("prop4") must beEqualTo(true)
-      properties.getOpt[Boolean]("prop4") must beEqualTo(Some(true))
-    }
-
-    "get List of String data" in {
-      properties.get[List[String]]("prop5") must beEqualTo(List("a", "b", "c", "c"))
-      properties.getOpt[List[String]]("prop5") must beEqualTo(Some(List("a", "b", "c", "c")))
-    }
-
-    "get Set of String data" in {
-      properties.get[Set[String]]("prop5") must beEqualTo(Set("a", "b", "c"))
-      properties.getOpt[Set[String]]("prop5") must beEqualTo(Some(Set("a", "b", "c")))
-    }
-
-    "get Double data" in {
-      properties.get[Double]("prop6") must beEqualTo(4.56)
-      properties.getOpt[Double]("prop6") must beEqualTo(Some(4.56))
-    }
-
-    "get empty optional Int data" in {
-      properties.getOpt[Int]("prop9999") must beEqualTo(None)
-    }
-
-  }
-
-  "DataMap with multi-level data" should {
-    val properties = DataMap("""
-      {
-        "context": {
-          "ip": "1.23.4.56",
-          "prop1": 2.345
-          "prop2": "value1",
-          "prop4": [1, 2, 3]
-        },
-        "anotherPropertyA": 4.567,
-        "anotherPropertyB": false
-      }
-      """)
-
-    "get case class data" in {
-      val expected = DataMapSpec.Context(
-        ip = "1.23.4.56",
-        prop1 = Some(2.345),
-        prop2 = Some("value1"),
-        prop3 = None,
-        prop4 = List(1,2,3)
-      )
-
-      properties.get[DataMapSpec.Context]("context") must beEqualTo(expected)
-    }
-
-    "get empty optional case class data" in {
-      properties.getOpt[DataMapSpec.Context]("context999") must beEqualTo(None)
-    }
-
-    "get double data" in {
-      properties.get[Double]("anotherPropertyA") must beEqualTo(4.567)
-    }
-
-    "get boolean data" in {
-      properties.get[Boolean]("anotherPropertyB") must beEqualTo(false)
-    }
-  }
-
-  "DataMap extract" should {
-
-    "extract to case class object" in {
-      val properties = DataMap("""
-        {
-          "prop1" : 1,
-          "prop2" : "value2",
-          "prop3" : [1, 2, 3],
-          "prop4" : true,
-          "prop5" : ["a", "b", "c", "c"],
-          "prop6" : 4.56
-        }
-        """)
-
-      val result = properties.extract[DataMapSpec.BasicProperty]
-      val expected = DataMapSpec.BasicProperty(
-        prop1 = 1,
-        prop2 = "value2",
-        prop3 = List(1,2,3),
-        prop4 = true,
-        prop5 = List("a", "b", "c", "c"),
-        prop6 = 4.56
-      )
-
-      result must beEqualTo(expected)
-    }
-
-    "extract with optional fields" in {
-      val propertiesEmpty = DataMap("""{}""")
-      val propertiesSome = DataMap("""
-        {
-          "prop1" : 1,
-          "prop5" : ["a", "b", "c", "c"],
-          "prop6" : 4.56
-        }
-        """)
-
-      val resultEmpty = propertiesEmpty.extract[DataMapSpec.OptionProperty]
-      val expectedEmpty = DataMapSpec.OptionProperty(
-        prop1 = None,
-        prop2 = None,
-        prop3 = None,
-        prop4 = None,
-        prop5 = None,
-        prop6 = None
-      )
-
-      val resultSome = propertiesSome.extract[DataMapSpec.OptionProperty]
-      val expectedSome = DataMapSpec.OptionProperty(
-        prop1 = Some(1),
-        prop2 = None,
-        prop3 = None,
-        prop4 = None,
-        prop5 = Some(List("a", "b", "c", "c")),
-        prop6 = Some(4.56)
-      )
-
-      resultEmpty must beEqualTo(expectedEmpty)
-      resultSome must beEqualTo(expectedSome)
-    }
-
-    "extract to multi-level object" in {
-      val properties = DataMap("""
-        {
-          "context": {
-            "ip": "1.23.4.56",
-            "prop1": 2.345
-            "prop2": "value1",
-            "prop4": [1, 2, 3]
-          },
-          "anotherPropertyA": 4.567,
-          "anotherPropertyB": false
-        }
-        """)
-
-      val result = properties.extract[DataMapSpec.MultiLevelProperty]
-      val expected = DataMapSpec.MultiLevelProperty(
-        context = DataMapSpec.Context(
-          ip = "1.23.4.56",
-          prop1 = Some(2.345),
-          prop2 = Some("value1"),
-          prop3 = None,
-          prop4 = List(1,2,3)
-        ),
-        anotherPropertyA = 4.567,
-        anotherPropertyB = false
-      )
-
-      result must beEqualTo(expected)
-    }
-
-  }
-}
-
-object DataMapSpec {
-
-  // define this case class inside object to avoid case class name conflict with other tests
-  case class Context(
-    ip: String,
-    prop1: Option[Double],
-    prop2: Option[String],
-    prop3: Option[Int],
-    prop4: List[Int]
-  )
-
-  case class BasicProperty(
-    prop1: Int,
-    prop2: String,
-    prop3: List[Int],
-    prop4: Boolean,
-    prop5: List[String],
-    prop6: Double
-  )
-
-  case class OptionProperty(
-    prop1: Option[Int],
-    prop2: Option[String],
-    prop3: Option[List[Int]],
-    prop4: Option[Boolean],
-    prop5: Option[List[String]],
-    prop6: Option[Double]
-  )
-
-  case class MultiLevelProperty(
-    context: Context,
-    anotherPropertyA: Double,
-    anotherPropertyB: Boolean
-  )
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/storage/LEventAggregatorSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/storage/LEventAggregatorSpec.scala b/data/src/test/scala/io/prediction/data/storage/LEventAggregatorSpec.scala
deleted file mode 100644
index 77a66d5..0000000
--- a/data/src/test/scala/io/prediction/data/storage/LEventAggregatorSpec.scala
+++ /dev/null
@@ -1,103 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import org.specs2.mutable._
-
-import org.json4s.JObject
-import org.json4s.native.JsonMethods.parse
-
-import org.joda.time.DateTime
-
-class LEventAggregatorSpec extends Specification with TestEvents {
-
-  "LEventAggregator.aggregateProperties()" should {
-
-    "aggregate two entities' properties as DataMap correctly" in {
-      val events = Vector(u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2)
-      val result: Map[String, DataMap] =
-        LEventAggregator.aggregateProperties(events.toIterator)
-
-      val expected = Map(
-        "u1" -> DataMap(u1),
-        "u2" -> DataMap(u2)
-      )
-
-      result must beEqualTo(expected)
-    }
-
-    "aggregate two entities' properties as PropertyMap correctly" in {
-      val events = Vector(u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2)
-      val result: Map[String, PropertyMap] =
-        LEventAggregator.aggregateProperties(events.toIterator)
-
-      val expected = Map(
-        "u1" -> PropertyMap(u1, u1BaseTime, u1LastTime),
-        "u2" -> PropertyMap(u2, u2BaseTime, u2LastTime)
-      )
-
-      result must beEqualTo(expected)
-    }
-
-
-    "aggregate deleted entity correctly" in {
-      val events = Vector(u1e5, u2e2, u1e3, u1ed, u1e1, u2e3, u2e1, u1e4, u1e2)
-
-      val result = LEventAggregator.aggregateProperties(events.toIterator)
-      val expected = Map(
-        "u2" -> PropertyMap(u2, u2BaseTime, u2LastTime)
-      )
-
-      result must beEqualTo(expected)
-    }
-  }
-
-
-  "LEventAggregator.aggregatePropertiesSingle()" should {
-
-    "aggregate single entity properties as DataMap correctly" in {
-        val events = Vector(u1e5, u1e3, u1e1, u1e4, u1e2)
-        val eventsIt = events.toIterator
-
-        val result: Option[DataMap] = LEventAggregator
-          .aggregatePropertiesSingle(eventsIt)
-        val expected = DataMap(u1)
-
-        result must beEqualTo(Some(expected))
-    }
-
-    "aggregate single entity properties as PropertyMap correctly" in {
-        val events = Vector(u1e5, u1e3, u1e1, u1e4, u1e2)
-        val eventsIt = events.toIterator
-
-        val result: Option[PropertyMap] = LEventAggregator
-          .aggregatePropertiesSingle(eventsIt)
-        val expected = PropertyMap(u1, u1BaseTime, u1LastTime)
-
-        result must beEqualTo(Some(expected))
-    }
-
-    "aggregate deleted entity correctly" in {
-      // put the delete event in the middle
-      val events = Vector(u1e4, u1e2, u1ed, u1e3, u1e1, u1e5)
-      val eventsIt = events.toIterator
-
-      val result = LEventAggregator.aggregatePropertiesSingle(eventsIt)
-
-      result must beEqualTo(None)
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/storage/LEventsSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/storage/LEventsSpec.scala b/data/src/test/scala/io/prediction/data/storage/LEventsSpec.scala
deleted file mode 100644
index 5b38cdb..0000000
--- a/data/src/test/scala/io/prediction/data/storage/LEventsSpec.scala
+++ /dev/null
@@ -1,245 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import org.specs2._
-import org.specs2.specification.Step
-
-class LEventsSpec extends Specification with TestEvents {
-  def is = s2"""
-
-  PredictionIO Storage LEvents Specification
-
-    Events can be implemented by:
-    - HBLEvents ${hbEvents}
-    - JDBCLEvents ${jdbcLEvents}
-
-  """
-
-  def hbEvents = sequential ^ s2"""
-
-    HBLEvents should
-    - behave like any LEvents implementation ${events(hbDO)}
-    - (table cleanup) ${Step(StorageTestUtils.dropHBaseNamespace(dbName))}
-
-  """
-
-  def jdbcLEvents = sequential ^ s2"""
-
-    JDBCLEvents should
-    - behave like any LEvents implementation ${events(jdbcDO)}
-
-  """
-
-  val appId = 1
-
-  def events(eventClient: LEvents) = sequential ^ s2"""
-
-    init default ${initDefault(eventClient)}
-    insert 3 test events and get back by event ID ${insertAndGetEvents(eventClient)}
-    insert 3 test events with timezone and get back by event ID ${insertAndGetTimezone(eventClient)}
-    insert and delete by ID ${insertAndDelete(eventClient)}
-    insert test user events ${insertTestUserEvents(eventClient)}
-    find user events ${findUserEvents(eventClient)}
-    aggregate user properties ${aggregateUserProperties(eventClient)}
-    aggregate one user properties ${aggregateOneUserProperties(eventClient)}
-    aggregate non-existent user properties ${aggregateNonExistentUserProperties(eventClient)}
-    init channel ${initChannel(eventClient)}
-    insert 2 events to channel ${insertChannel(eventClient)}
-    insert 1 event to channel and delete by ID  ${insertAndDeleteChannel(eventClient)}
-    find events from channel ${findChannel(eventClient)}
-    remove default ${removeDefault(eventClient)}
-    remove channel ${removeChannel(eventClient)}
-
-  """
-
-  val dbName = "test_pio_storage_events_" + hashCode
-  def hbDO = Storage.getDataObject[LEvents](
-    StorageTestUtils.hbaseSourceName,
-    dbName
-  )
-
-  def jdbcDO = Storage.getDataObject[LEvents](StorageTestUtils.jdbcSourceName, dbName)
-
-  def initDefault(eventClient: LEvents) = {
-    eventClient.init(appId)
-  }
-
-  def insertAndGetEvents(eventClient: LEvents) = {
-
-    // events from TestEvents trait
-    val listOfEvents = List(r1,r2,r3)
-
-    val insertResp = listOfEvents.map { eventClient.insert(_, appId) }
-
-    val insertedEventId: List[String] = insertResp
-
-    val insertedEvent: List[Option[Event]] = listOfEvents.zip(insertedEventId)
-      .map { case (e, id) => Some(e.copy(eventId = Some(id))) }
-
-    val getResp = insertedEventId.map { id => eventClient.get(id, appId) }
-
-    val getEvents = getResp
-
-    insertedEvent must containTheSameElementsAs(getEvents)
-  }
-
-  def insertAndGetTimezone(eventClient: LEvents) = {
-    val listOfEvents = List(tz1, tz2, tz3)
-
-    val insertResp = listOfEvents.map { eventClient.insert(_, appId) }
-
-    val insertedEventId: List[String] = insertResp
-
-    val insertedEvent: List[Option[Event]] = listOfEvents.zip(insertedEventId)
-      .map { case (e, id) => Some(e.copy(eventId = Some(id))) }
-
-    val getResp = insertedEventId.map { id => eventClient.get(id, appId) }
-
-    val getEvents = getResp
-
-    insertedEvent must containTheSameElementsAs(getEvents)
-  }
-
-  def insertAndDelete(eventClient: LEvents) = {
-    val eventId = eventClient.insert(r2, appId)
-
-    val resultBefore = eventClient.get(eventId, appId)
-
-    val expectedBefore = r2.copy(eventId = Some(eventId))
-
-    val deleteStatus = eventClient.delete(eventId, appId)
-
-    val resultAfter = eventClient.get(eventId, appId)
-
-    (resultBefore must beEqualTo(Some(expectedBefore))) and
-    (deleteStatus must beEqualTo(true)) and
-    (resultAfter must beEqualTo(None))
-  }
-
-  def insertTestUserEvents(eventClient: LEvents) = {
-    // events from TestEvents trait
-    val listOfEvents = Vector(u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2)
-
-    listOfEvents.map{ eventClient.insert(_, appId) }
-
-    success
-  }
-
-  def findUserEvents(eventClient: LEvents) = {
-
-    val results: List[Event] = eventClient.find(
-      appId = appId,
-      entityType = Some("user"))
-      .toList
-      .map(e => e.copy(eventId = None)) // ignore eventID
-
-    // same events in insertTestUserEvents
-    val expected = List(u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2)
-
-    results must containTheSameElementsAs(expected)
-  }
-
-  def aggregateUserProperties(eventClient: LEvents) = {
-
-    val result: Map[String, PropertyMap] = eventClient.aggregateProperties(
-      appId = appId,
-      entityType = "user")
-
-    val expected = Map(
-      "u1" -> PropertyMap(u1, u1BaseTime, u1LastTime),
-      "u2" -> PropertyMap(u2, u2BaseTime, u2LastTime)
-    )
-
-    result must beEqualTo(expected)
-  }
-
-  def aggregateOneUserProperties(eventClient: LEvents) = {
-    val result: Option[PropertyMap] = eventClient.aggregatePropertiesOfEntity(
-      appId = appId,
-      entityType = "user",
-      entityId = "u1")
-
-    val expected = Some(PropertyMap(u1, u1BaseTime, u1LastTime))
-
-    result must beEqualTo(expected)
-  }
-
-  def aggregateNonExistentUserProperties(eventClient: LEvents) = {
-    val result: Option[PropertyMap] = eventClient.aggregatePropertiesOfEntity(
-      appId = appId,
-      entityType = "user",
-      entityId = "u999999")
-
-    result must beEqualTo(None)
-  }
-
-  val channelId = 12
-
-  def initChannel(eventClient: LEvents) = {
-    eventClient.init(appId, Some(channelId))
-  }
-
-  def insertChannel(eventClient: LEvents) = {
-
-    // events from TestEvents trait
-    val listOfEvents = List(r4,r5)
-
-    listOfEvents.map( eventClient.insert(_, appId, Some(channelId)) )
-
-    success
-  }
-
-  def insertAndDeleteChannel(eventClient: LEvents) = {
-
-    val eventId = eventClient.insert(r2, appId, Some(channelId))
-
-    val resultBefore = eventClient.get(eventId, appId, Some(channelId))
-
-    val expectedBefore = r2.copy(eventId = Some(eventId))
-
-    val deleteStatus = eventClient.delete(eventId, appId, Some(channelId))
-
-    val resultAfter = eventClient.get(eventId, appId, Some(channelId))
-
-    (resultBefore must beEqualTo(Some(expectedBefore))) and
-    (deleteStatus must beEqualTo(true)) and
-    (resultAfter must beEqualTo(None))
-  }
-
-  def findChannel(eventClient: LEvents) = {
-
-    val results: List[Event] = eventClient.find(
-      appId = appId,
-      channelId = Some(channelId)
-    )
-    .toList
-    .map(e => e.copy(eventId = None)) // ignore eventId
-
-    // same events in insertChannel
-    val expected = List(r4, r5)
-
-    results must containTheSameElementsAs(expected)
-  }
-
-  def removeDefault(eventClient: LEvents) = {
-    eventClient.remove(appId)
-  }
-
-  def removeChannel(eventClient: LEvents) = {
-    eventClient.remove(appId, Some(channelId))
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/storage/PEventAggregatorSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/storage/PEventAggregatorSpec.scala b/data/src/test/scala/io/prediction/data/storage/PEventAggregatorSpec.scala
deleted file mode 100644
index b00ec7c..0000000
--- a/data/src/test/scala/io/prediction/data/storage/PEventAggregatorSpec.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import org.specs2.mutable._
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-
-class PEventAggregatorSpec extends Specification with TestEvents {
-
-  System.clearProperty("spark.driver.port")
-  System.clearProperty("spark.hostPort")
-  val sc = new SparkContext("local[4]", "PEventAggregatorSpec test")
-
-  "PEventAggregator" should {
-
-    "aggregate two entities' properties as DataMap/PropertyMap correctly" in {
-      val events = sc.parallelize(Seq(
-        u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2))
-
-      val users = PEventAggregator.aggregateProperties(events)
-
-      val userMap = users.collectAsMap.toMap
-      val expectedDM = Map(
-        "u1" -> DataMap(u1),
-        "u2" -> DataMap(u2)
-      )
-
-      val expectedPM = Map(
-        "u1" -> PropertyMap(u1, u1BaseTime, u1LastTime),
-        "u2" -> PropertyMap(u2, u2BaseTime, u2LastTime)
-      )
-
-      userMap must beEqualTo(expectedDM)
-      userMap must beEqualTo(expectedPM)
-    }
-
-    "aggregate deleted entity correctly" in {
-      // put the delete event in middle
-      val events = sc.parallelize(Seq(
-        u1e5, u2e2, u1e3, u1ed, u1e1, u2e3, u2e1, u1e4, u1e2))
-
-      val users = PEventAggregator.aggregateProperties(events)
-
-      val userMap = users.collectAsMap.toMap
-      val expectedPM = Map(
-        "u2" -> PropertyMap(u2, u2BaseTime, u2LastTime)
-      )
-
-      userMap must beEqualTo(expectedPM)
-    }
-
-  }
-
-  step(sc.stop())
-}



[22/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/Engine.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/Engine.scala b/core/src/main/scala/org/apache/predictionio/controller/Engine.scala
new file mode 100644
index 0000000..c875a9f
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/Engine.scala
@@ -0,0 +1,829 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import grizzled.slf4j.Logger
+import org.apache.predictionio.core.BaseAlgorithm
+import org.apache.predictionio.core.BaseDataSource
+import org.apache.predictionio.core.BaseEngine
+import org.apache.predictionio.core.BasePreparator
+import org.apache.predictionio.core.BaseServing
+import org.apache.predictionio.core.Doer
+import org.apache.predictionio.data.storage.EngineInstance
+import org.apache.predictionio.data.storage.StorageClientException
+import org.apache.predictionio.workflow.CreateWorkflow
+import org.apache.predictionio.workflow.EngineLanguage
+import org.apache.predictionio.workflow.JsonExtractorOption.JsonExtractorOption
+import org.apache.predictionio.workflow.NameParamsSerializer
+import org.apache.predictionio.workflow.PersistentModelManifest
+import org.apache.predictionio.workflow.SparkWorkflowUtils
+import org.apache.predictionio.workflow.StopAfterPrepareInterruption
+import org.apache.predictionio.workflow.StopAfterReadInterruption
+import org.apache.predictionio.workflow.WorkflowParams
+import org.apache.predictionio.workflow.WorkflowUtils
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+import org.json4s._
+import org.json4s.native.JsonMethods._
+import org.json4s.native.Serialization.read
+
+import scala.collection.JavaConversions
+import scala.language.implicitConversions
+
+/** This class chains up the entire data process. PredictionIO uses this
+  * information to create workflows and deployments. In Scala, you should
+  * implement an object that extends the [[EngineFactory]] trait similar to the
+  * following example.
+  *
+  * {{{
+  * object ItemRankEngine extends EngineFactory {
+  *   def apply() = {
+  *     new Engine(
+  *       classOf[ItemRankDataSource],
+  *       classOf[ItemRankPreparator],
+  *       Map(
+  *         "knn" -> classOf[KNNAlgorithm],
+  *         "rand" -> classOf[RandomAlgorithm],
+  *         "mahoutItemBased" -> classOf[MahoutItemBasedAlgorithm]),
+  *       classOf[ItemRankServing])
+  *   }
+  * }
+  * }}}
+  *
+  * @see [[EngineFactory]]
+  * @tparam TD Training data class.
+  * @tparam EI Evaluation info class.
+  * @tparam PD Prepared data class.
+  * @tparam Q Input query class.
+  * @tparam P Output prediction class.
+  * @tparam A Actual value class.
+  * @param dataSourceClassMap Map of data source names to class.
+  * @param preparatorClassMap Map of preparator names to class.
+  * @param algorithmClassMap Map of algorithm names to classes.
+  * @param servingClassMap Map of serving names to class.
+  * @group Engine
+  */
+class Engine[TD, EI, PD, Q, P, A](
+    val dataSourceClassMap: Map[String,
+      Class[_ <: BaseDataSource[TD, EI, Q, A]]],
+    val preparatorClassMap: Map[String, Class[_ <: BasePreparator[TD, PD]]],
+    val algorithmClassMap: Map[String, Class[_ <: BaseAlgorithm[PD, _, Q, P]]],
+    val servingClassMap: Map[String, Class[_ <: BaseServing[Q, P]]])
+  extends BaseEngine[EI, Q, P, A] {
+
+  private[prediction]
+  implicit lazy val formats = Utils.json4sDefaultFormats +
+    new NameParamsSerializer
+
+  @transient lazy protected val logger = Logger[this.type]
+
+  /** This auxiliary constructor is provided for backward compatibility.
+    *
+    * @param dataSourceClass Data source class.
+    * @param preparatorClass Preparator class.
+    * @param algorithmClassMap Map of algorithm names to classes.
+    * @param servingClass Serving class.
+    */
+  def this(
+    dataSourceClass: Class[_ <: BaseDataSource[TD, EI, Q, A]],
+    preparatorClass: Class[_ <: BasePreparator[TD, PD]],
+    algorithmClassMap: Map[String, Class[_ <: BaseAlgorithm[PD, _, Q, P]]],
+    servingClass: Class[_ <: BaseServing[Q, P]]) = this(
+      Map("" -> dataSourceClass),
+      Map("" -> preparatorClass),
+      algorithmClassMap,
+      Map("" -> servingClass)
+    )
+
+  /** Java-friendly constructor
+    *
+    * @param dataSourceClass Data source class.
+    * @param preparatorClass Preparator class.
+    * @param algorithmClassMap Map of algorithm names to classes.
+    * @param servingClass Serving class.
+    */
+  def this(dataSourceClass: Class[_ <: BaseDataSource[TD, EI, Q, A]],
+    preparatorClass: Class[_ <: BasePreparator[TD, PD]],
+    algorithmClassMap: _root_.java.util.Map[String, Class[_ <: BaseAlgorithm[PD, _, Q, P]]],
+    servingClass: Class[_ <: BaseServing[Q, P]]) = this(
+    Map("" -> dataSourceClass),
+    Map("" -> preparatorClass),
+    JavaConversions.mapAsScalaMap(algorithmClassMap).toMap,
+    Map("" -> servingClass)
+  )
+
+  /** Returns a new Engine instance, mimicking case class's copy method behavior.
+    */
+  def copy(
+    dataSourceClassMap: Map[String, Class[_ <: BaseDataSource[TD, EI, Q, A]]]
+      = dataSourceClassMap,
+    preparatorClassMap: Map[String, Class[_ <: BasePreparator[TD, PD]]]
+      = preparatorClassMap,
+    algorithmClassMap: Map[String, Class[_ <: BaseAlgorithm[PD, _, Q, P]]]
+      = algorithmClassMap,
+    servingClassMap: Map[String, Class[_ <: BaseServing[Q, P]]]
+      = servingClassMap): Engine[TD, EI, PD, Q, P, A] = {
+    new Engine(
+      dataSourceClassMap,
+      preparatorClassMap,
+      algorithmClassMap,
+      servingClassMap)
+  }
+
+  /** Training this engine would return a list of models.
+    *
+    * @param sc An instance of SparkContext.
+    * @param engineParams An instance of [[EngineParams]] for running a single training.
+    * @param params An instance of [[WorkflowParams]] that controls the workflow.
+    * @return A list of models.
+    */
+  def train(
+      sc: SparkContext,
+      engineParams: EngineParams,
+      engineInstanceId: String,
+      params: WorkflowParams): Seq[Any] = {
+    val (dataSourceName, dataSourceParams) = engineParams.dataSourceParams
+    val dataSource = Doer(dataSourceClassMap(dataSourceName), dataSourceParams)
+
+    val (preparatorName, preparatorParams) = engineParams.preparatorParams
+    val preparator = Doer(preparatorClassMap(preparatorName), preparatorParams)
+
+    val algoParamsList = engineParams.algorithmParamsList
+    require(
+      algoParamsList.size > 0,
+      "EngineParams.algorithmParamsList must have at least 1 element.")
+
+    val algorithms = algoParamsList.map { case (algoName, algoParams) =>
+      Doer(algorithmClassMap(algoName), algoParams)
+    }
+
+    val models = Engine.train(
+      sc, dataSource, preparator, algorithms, params)
+
+    val algoCount = algorithms.size
+    val algoTuples: Seq[(String, Params, BaseAlgorithm[_, _, _, _], Any)] =
+    (0 until algoCount).map { ax => {
+      // val (name, params) = algoParamsList(ax)
+      val (name, params) = algoParamsList(ax)
+      (name, params, algorithms(ax), models(ax))
+    }}
+
+    makeSerializableModels(
+      sc,
+      engineInstanceId = engineInstanceId,
+      algoTuples = algoTuples)
+  }
+
+  /** Algorithm models can be persisted before deploy. However, it is also
+    * possible that models are not persisted. This method retrains non-persisted
+    * models and return a list of models that can be used directly in deploy.
+    */
+  private[prediction]
+  def prepareDeploy(
+    sc: SparkContext,
+    engineParams: EngineParams,
+    engineInstanceId: String,
+    persistedModels: Seq[Any],
+    params: WorkflowParams): Seq[Any] = {
+
+    val algoParamsList = engineParams.algorithmParamsList
+    val algorithms = algoParamsList.map { case (algoName, algoParams) =>
+      Doer(algorithmClassMap(algoName), algoParams)
+    }
+
+    val models = if (persistedModels.exists(m => m.isInstanceOf[Unit.type])) {
+      // If any of persistedModels is Unit, we need to re-train the model.
+      logger.info("Some persisted models are Unit, need to re-train.")
+      val (dataSourceName, dataSourceParams) = engineParams.dataSourceParams
+      val dataSource = Doer(dataSourceClassMap(dataSourceName), dataSourceParams)
+
+      val (preparatorName, preparatorParams) = engineParams.preparatorParams
+      val preparator = Doer(preparatorClassMap(preparatorName), preparatorParams)
+
+      val td = dataSource.readTrainingBase(sc)
+      val pd = preparator.prepareBase(sc, td)
+
+      val models = algorithms.zip(persistedModels).map { case (algo, m) =>
+        m match {
+          case Unit => algo.trainBase(sc, pd)
+          case _ => m
+        }
+      }
+      models
+    } else {
+      logger.info("Using persisted model")
+      persistedModels
+    }
+
+    models
+    .zip(algorithms)
+    .zip(algoParamsList)
+    .zipWithIndex
+    .map {
+      case (((model, algo), (algoName, algoParams)), ax) => {
+        model match {
+          case modelManifest: PersistentModelManifest => {
+            logger.info("Custom-persisted model detected for algorithm " +
+              algo.getClass.getName)
+            SparkWorkflowUtils.getPersistentModel(
+              modelManifest,
+              Seq(engineInstanceId, ax, algoName).mkString("-"),
+              algoParams,
+              Some(sc),
+              getClass.getClassLoader)
+          }
+          case m => {
+            try {
+              logger.info(
+                s"Loaded model ${m.getClass.getName} for algorithm " +
+                s"${algo.getClass.getName}")
+              sc.stop
+              m
+            } catch {
+              case e: NullPointerException =>
+                logger.warn(
+                  s"Null model detected for algorithm ${algo.getClass.getName}")
+                m
+            }
+          }
+        }  // model match
+      }
+    }
+  }
+
+  /** Extract model for persistent layer.
+    *
+    * PredictionIO presist models for future use. It allows custom
+    * implementation for persisting models. You need to implement the
+    * [[org.apache.predictionio.controller.PersistentModel]] interface. This method
+    * traverses all models in the workflow. If the model is a
+    * [[org.apache.predictionio.controller.PersistentModel]], it calls the save method
+    * for custom persistence logic.
+    *
+    * For model doesn't support custom logic, PredictionIO serializes the whole
+    * model if the corresponding algorithm is local. On the other hand, if the
+    * model is parallel (i.e. model associated with a number of huge RDDS), this
+    * method return Unit, in which case PredictionIO will retrain the whole
+    * model from scratch next time it is used.
+    */
+  private def makeSerializableModels(
+    sc: SparkContext,
+    engineInstanceId: String,
+    // AlgoName, Algo, Model
+    algoTuples: Seq[(String, Params, BaseAlgorithm[_, _, _, _], Any)]
+  ): Seq[Any] = {
+
+    logger.info(s"engineInstanceId=$engineInstanceId")
+
+    algoTuples
+    .zipWithIndex
+    .map { case ((name, params, algo, model), ax) =>
+      algo.makePersistentModel(
+        sc = sc,
+        modelId = Seq(engineInstanceId, ax, name).mkString("-"),
+        algoParams = params,
+        bm = model)
+    }
+  }
+
+  /** This is implemented such that [[org.apache.predictionio.controller.Evaluation]] can
+    * use this method to generate inputs for [[org.apache.predictionio.controller.Metric]].
+    *
+    * @param sc An instance of SparkContext.
+    * @param engineParams An instance of [[EngineParams]] for running a single evaluation.
+    * @param params An instance of [[WorkflowParams]] that controls the workflow.
+    * @return A list of evaluation information and RDD of query, predicted
+    *         result, and actual result tuple tuple.
+    */
+  def eval(
+    sc: SparkContext,
+    engineParams: EngineParams,
+    params: WorkflowParams)
+  : Seq[(EI, RDD[(Q, P, A)])] = {
+    val (dataSourceName, dataSourceParams) = engineParams.dataSourceParams
+    val dataSource = Doer(dataSourceClassMap(dataSourceName), dataSourceParams)
+
+    val (preparatorName, preparatorParams) = engineParams.preparatorParams
+    val preparator = Doer(preparatorClassMap(preparatorName), preparatorParams)
+
+    val algoParamsList = engineParams.algorithmParamsList
+    require(
+      algoParamsList.size > 0,
+      "EngineParams.algorithmParamsList must have at least 1 element.")
+
+    val algorithms = algoParamsList.map { case (algoName, algoParams) => {
+      try {
+        Doer(algorithmClassMap(algoName), algoParams)
+      } catch {
+        case e: NoSuchElementException => {
+          if (algoName == "") {
+            logger.error("Empty algorithm name supplied but it could not " +
+              "match with any algorithm in the engine's definition. " +
+              "Existing algorithm name(s) are: " +
+              s"${algorithmClassMap.keys.mkString(", ")}. Aborting.")
+          } else {
+            logger.error(s"$algoName cannot be found in the engine's " +
+              "definition. Existing algorithm name(s) are: " +
+              s"${algorithmClassMap.keys.mkString(", ")}. Aborting.")
+          }
+          sys.exit(1)
+        }
+      }
+    }}
+
+    val (servingName, servingParams) = engineParams.servingParams
+    val serving = Doer(servingClassMap(servingName), servingParams)
+
+    Engine.eval(sc, dataSource, preparator, algorithms, serving)
+  }
+
+  override def jValueToEngineParams(
+    variantJson: JValue,
+    jsonExtractor: JsonExtractorOption): EngineParams = {
+
+    val engineLanguage = EngineLanguage.Scala
+    // Extract EngineParams
+    logger.info(s"Extracting datasource params...")
+    val dataSourceParams: (String, Params) =
+      WorkflowUtils.getParamsFromJsonByFieldAndClass(
+        variantJson,
+        "datasource",
+        dataSourceClassMap,
+        engineLanguage,
+        jsonExtractor)
+    logger.info(s"Datasource params: $dataSourceParams")
+
+    logger.info(s"Extracting preparator params...")
+    val preparatorParams: (String, Params) =
+      WorkflowUtils.getParamsFromJsonByFieldAndClass(
+        variantJson,
+        "preparator",
+        preparatorClassMap,
+        engineLanguage,
+        jsonExtractor)
+    logger.info(s"Preparator params: $preparatorParams")
+
+    val algorithmsParams: Seq[(String, Params)] =
+      variantJson findField {
+        case JField("algorithms", _) => true
+        case _ => false
+      } map { jv =>
+        val algorithmsParamsJson = jv._2
+        algorithmsParamsJson match {
+          case JArray(s) => s.map { algorithmParamsJValue =>
+            val eap = algorithmParamsJValue.extract[CreateWorkflow.AlgorithmParams]
+            (
+              eap.name,
+              WorkflowUtils.extractParams(
+                engineLanguage,
+                compact(render(eap.params)),
+                algorithmClassMap(eap.name),
+                jsonExtractor)
+            )
+          }
+          case _ => Nil
+        }
+      } getOrElse Seq(("", EmptyParams()))
+
+    logger.info(s"Extracting serving params...")
+    val servingParams: (String, Params) =
+      WorkflowUtils.getParamsFromJsonByFieldAndClass(
+        variantJson,
+        "serving",
+        servingClassMap,
+        engineLanguage,
+        jsonExtractor)
+    logger.info(s"Serving params: $servingParams")
+
+    new EngineParams(
+      dataSourceParams = dataSourceParams,
+      preparatorParams = preparatorParams,
+      algorithmParamsList = algorithmsParams,
+      servingParams = servingParams)
+  }
+
+  private[prediction] def engineInstanceToEngineParams(
+    engineInstance: EngineInstance,
+    jsonExtractor: JsonExtractorOption): EngineParams = {
+
+    implicit val formats = DefaultFormats
+    val engineLanguage = EngineLanguage.Scala
+
+    val dataSourceParamsWithName: (String, Params) = {
+      val (name, params) =
+        read[(String, JValue)](engineInstance.dataSourceParams)
+      if (!dataSourceClassMap.contains(name)) {
+        logger.error(s"Unable to find datasource class with name '$name'" +
+          " defined in Engine.")
+        sys.exit(1)
+      }
+      val extractedParams = WorkflowUtils.extractParams(
+        engineLanguage,
+        compact(render(params)),
+        dataSourceClassMap(name),
+        jsonExtractor)
+      (name, extractedParams)
+    }
+
+    val preparatorParamsWithName: (String, Params) = {
+      val (name, params) =
+        read[(String, JValue)](engineInstance.preparatorParams)
+      if (!preparatorClassMap.contains(name)) {
+        logger.error(s"Unable to find preparator class with name '$name'" +
+          " defined in Engine.")
+        sys.exit(1)
+      }
+      val extractedParams = WorkflowUtils.extractParams(
+        engineLanguage,
+        compact(render(params)),
+        preparatorClassMap(name),
+        jsonExtractor)
+      (name, extractedParams)
+    }
+
+    val algorithmsParamsWithNames =
+      read[Seq[(String, JValue)]](engineInstance.algorithmsParams).map {
+        case (algoName, params) =>
+          val extractedParams = WorkflowUtils.extractParams(
+            engineLanguage,
+            compact(render(params)),
+            algorithmClassMap(algoName),
+            jsonExtractor)
+          (algoName, extractedParams)
+      }
+
+    val servingParamsWithName: (String, Params) = {
+      val (name, params) = read[(String, JValue)](engineInstance.servingParams)
+      if (!servingClassMap.contains(name)) {
+        logger.error(s"Unable to find serving class with name '$name'" +
+          " defined in Engine.")
+        sys.exit(1)
+      }
+      val extractedParams = WorkflowUtils.extractParams(
+        engineLanguage,
+        compact(render(params)),
+        servingClassMap(name),
+        jsonExtractor)
+      (name, extractedParams)
+    }
+
+    new EngineParams(
+      dataSourceParams = dataSourceParamsWithName,
+      preparatorParams = preparatorParamsWithName,
+      algorithmParamsList = algorithmsParamsWithNames,
+      servingParams = servingParamsWithName)
+  }
+}
+
+/** This object contains concrete implementation for some methods of the
+  * [[Engine]] class.
+  *
+  * @group Engine
+  */
+object Engine {
+  private type EX = Int
+  private type AX = Int
+  private type QX = Long
+
+  @transient lazy private val logger = Logger[this.type]
+
+  /** Helper class to accept either a single data source, or a map of data
+    * sources, with a companion object providing implicit conversions, so
+    * using this class directly is not necessary.
+    *
+    * @tparam TD Training data class
+    * @tparam EI Evaluation information class
+    * @tparam Q Input query class
+    * @tparam A Actual result class
+    */
+  class DataSourceMap[TD, EI, Q, A](
+    val m: Map[String, Class[_ <: BaseDataSource[TD, EI, Q, A]]]) {
+    def this(c: Class[_ <: BaseDataSource[TD, EI, Q, A]]) = this(Map("" -> c))
+  }
+
+  /** Companion object providing implicit conversions, so using this directly
+    * is not necessary.
+    */
+  object DataSourceMap {
+    implicit def cToMap[TD, EI, Q, A](
+      c: Class[_ <: BaseDataSource[TD, EI, Q, A]]):
+      DataSourceMap[TD, EI, Q, A] = new DataSourceMap(c)
+    implicit def mToMap[TD, EI, Q, A](
+      m: Map[String, Class[_ <: BaseDataSource[TD, EI, Q, A]]]):
+      DataSourceMap[TD, EI, Q, A] = new DataSourceMap(m)
+  }
+
+  /** Helper class to accept either a single preparator, or a map of
+    * preparators, with a companion object providing implicit conversions, so
+    * using this class directly is not necessary.
+    *
+    * @tparam TD Training data class
+    * @tparam PD Prepared data class
+    */
+  class PreparatorMap[TD, PD](
+    val m: Map[String, Class[_ <: BasePreparator[TD, PD]]]) {
+    def this(c: Class[_ <: BasePreparator[TD, PD]]) = this(Map("" -> c))
+  }
+
+  /** Companion object providing implicit conversions, so using this directly
+    * is not necessary.
+    */
+  object PreparatorMap {
+    implicit def cToMap[TD, PD](
+      c: Class[_ <: BasePreparator[TD, PD]]):
+      PreparatorMap[TD, PD] = new PreparatorMap(c)
+    implicit def mToMap[TD, PD](
+      m: Map[String, Class[_ <: BasePreparator[TD, PD]]]):
+      PreparatorMap[TD, PD] = new PreparatorMap(m)
+  }
+
+  /** Helper class to accept either a single serving, or a map of serving, with
+    * a companion object providing implicit conversions, so using this class
+    * directly is not necessary.
+    *
+    * @tparam Q Input query class
+    * @tparam P Predicted result class
+    */
+  class ServingMap[Q, P](
+    val m: Map[String, Class[_ <: BaseServing[Q, P]]]) {
+    def this(c: Class[_ <: BaseServing[Q, P]]) = this(Map("" -> c))
+  }
+
+  /** Companion object providing implicit conversions, so using this directly
+    * is not necessary.
+    */
+  object ServingMap {
+    implicit def cToMap[Q, P](
+      c: Class[_ <: BaseServing[Q, P]]): ServingMap[Q, P] =
+        new ServingMap(c)
+    implicit def mToMap[Q, P](
+      m: Map[String, Class[_ <: BaseServing[Q, P]]]): ServingMap[Q, P] =
+        new ServingMap(m)
+  }
+
+  /** Convenient method for returning an instance of [[Engine]].
+    *
+    * @param dataSourceMap Accepts either an instance of Class of the data
+    *                      source, or a Map of data source classes (implicitly
+    *                      converted to [[DataSourceMap]].
+    * @param preparatorMap Accepts either an instance of Class of the
+    *                      preparator, or a Map of preparator classes
+    *                      (implicitly converted to [[PreparatorMap]].
+    * @param algorithmClassMap Accepts a Map of algorithm classes.
+    * @param servingMap Accepts either an instance of Class of the serving, or
+    *                   a Map of serving classes (implicitly converted to
+    *                   [[ServingMap]].
+    * @tparam TD Training data class
+    * @tparam EI Evaluation information class
+    * @tparam PD Prepared data class
+    * @tparam Q Input query class
+    * @tparam P Predicted result class
+    * @tparam A Actual result class
+    * @return An instance of [[Engine]]
+    */
+  def apply[TD, EI, PD, Q, P, A](
+    dataSourceMap: DataSourceMap[TD, EI, Q, A],
+    preparatorMap: PreparatorMap[TD, PD],
+    algorithmClassMap: Map[String, Class[_ <: BaseAlgorithm[PD, _, Q, P]]],
+    servingMap: ServingMap[Q, P]): Engine[TD, EI, PD, Q, P, A] = new Engine(
+      dataSourceMap.m,
+      preparatorMap.m,
+      algorithmClassMap,
+      servingMap.m
+    )
+
+  /** Provides concrete implementation of training for [[Engine]].
+    *
+    * @param sc An instance of SparkContext
+    * @param dataSource An instance of data source
+    * @param preparator An instance of preparator
+    * @param algorithmList A list of algorithm instances
+    * @param params An instance of [[WorkflowParams]] that controls the training
+    *               process.
+    * @tparam TD Training data class
+    * @tparam PD Prepared data class
+    * @tparam Q Input query class
+    * @return A list of trained models
+    */
+  def train[TD, PD, Q](
+      sc: SparkContext,
+      dataSource: BaseDataSource[TD, _, Q, _],
+      preparator: BasePreparator[TD, PD],
+      algorithmList: Seq[BaseAlgorithm[PD, _, Q, _]],
+      params: WorkflowParams
+    ): Seq[Any] = {
+    logger.info("EngineWorkflow.train")
+    logger.info(s"DataSource: $dataSource")
+    logger.info(s"Preparator: $preparator")
+    logger.info(s"AlgorithmList: $algorithmList")
+
+    if (params.skipSanityCheck) {
+      logger.info("Data sanity check is off.")
+    } else {
+      logger.info("Data sanity check is on.")
+    }
+
+    val td = try {
+      dataSource.readTrainingBase(sc)
+    } catch {
+      case e: StorageClientException =>
+        logger.error(s"Error occured reading from data source. (Reason: " +
+          e.getMessage + ") Please see the log for debugging details.", e)
+        sys.exit(1)
+    }
+
+    if (!params.skipSanityCheck) {
+      td match {
+        case sanityCheckable: SanityCheck => {
+          logger.info(s"${td.getClass.getName} supports data sanity" +
+            " check. Performing check.")
+          sanityCheckable.sanityCheck()
+        }
+        case _ => {
+          logger.info(s"${td.getClass.getName} does not support" +
+            " data sanity check. Skipping check.")
+        }
+      }
+    }
+
+    if (params.stopAfterRead) {
+      logger.info("Stopping here because --stop-after-read is set.")
+      throw StopAfterReadInterruption()
+    }
+
+    val pd = preparator.prepareBase(sc, td)
+
+    if (!params.skipSanityCheck) {
+      pd match {
+        case sanityCheckable: SanityCheck => {
+          logger.info(s"${pd.getClass.getName} supports data sanity" +
+            " check. Performing check.")
+          sanityCheckable.sanityCheck()
+        }
+        case _ => {
+          logger.info(s"${pd.getClass.getName} does not support" +
+            " data sanity check. Skipping check.")
+        }
+      }
+    }
+
+    if (params.stopAfterPrepare) {
+      logger.info("Stopping here because --stop-after-prepare is set.")
+      throw StopAfterPrepareInterruption()
+    }
+
+    val models: Seq[Any] = algorithmList.map(_.trainBase(sc, pd))
+
+    if (!params.skipSanityCheck) {
+      models.foreach { model => {
+        model match {
+          case sanityCheckable: SanityCheck => {
+            logger.info(s"${model.getClass.getName} supports data sanity" +
+              " check. Performing check.")
+            sanityCheckable.sanityCheck()
+          }
+          case _ => {
+            logger.info(s"${model.getClass.getName} does not support" +
+              " data sanity check. Skipping check.")
+          }
+        }
+      }}
+    }
+
+    logger.info("EngineWorkflow.train completed")
+    models
+  }
+
+  /** Provides concrete implementation of evaluation for [[Engine]].
+    *
+    * @param sc An instance of SparkContext
+    * @param dataSource An instance of data source
+    * @param preparator An instance of preparator
+    * @param algorithmList A list of algorithm instances
+    * @param serving An instance of serving
+    * @tparam TD Training data class
+    * @tparam PD Prepared data class
+    * @tparam Q Input query class
+    * @tparam P Predicted result class
+    * @tparam A Actual result class
+    * @tparam EI Evaluation information class
+    * @return A list of evaluation information, RDD of query, predicted result,
+    *         and actual result tuple tuple.
+    */
+  def eval[TD, PD, Q, P, A, EI](
+      sc: SparkContext,
+      dataSource: BaseDataSource[TD, EI, Q, A],
+      preparator: BasePreparator[TD, PD],
+      algorithmList: Seq[BaseAlgorithm[PD, _, Q, P]],
+      serving: BaseServing[Q, P]): Seq[(EI, RDD[(Q, P, A)])] = {
+    logger.info(s"DataSource: $dataSource")
+    logger.info(s"Preparator: $preparator")
+    logger.info(s"AlgorithmList: $algorithmList")
+    logger.info(s"Serving: $serving")
+
+    val algoMap: Map[AX, BaseAlgorithm[PD, _, Q, P]] = algorithmList
+      .zipWithIndex
+      .map(_.swap)
+      .toMap
+    val algoCount = algoMap.size
+
+    val evalTupleMap: Map[EX, (TD, EI, RDD[(Q, A)])] = dataSource
+      .readEvalBase(sc)
+      .zipWithIndex
+      .map(_.swap)
+      .toMap
+
+    val evalCount = evalTupleMap.size
+
+    val evalTrainMap: Map[EX, TD] = evalTupleMap.mapValues(_._1)
+    val evalInfoMap: Map[EX, EI] = evalTupleMap.mapValues(_._2)
+    val evalQAsMap: Map[EX, RDD[(QX, (Q, A))]] = evalTupleMap
+      .mapValues(_._3)
+      .mapValues{ _.zipWithUniqueId().map(_.swap) }
+
+    val preparedMap: Map[EX, PD] = evalTrainMap.mapValues { td => {
+      preparator.prepareBase(sc, td)
+    }}
+
+    val algoModelsMap: Map[EX, Map[AX, Any]] = preparedMap.mapValues { pd => {
+      algoMap.mapValues(_.trainBase(sc,pd))
+    }}
+
+    val suppQAsMap: Map[EX, RDD[(QX, (Q, A))]] = evalQAsMap.mapValues { qas =>
+      qas.map { case (qx, (q, a)) => (qx, (serving.supplementBase(q), a)) }
+    }
+
+    val algoPredictsMap: Map[EX, RDD[(QX, Seq[P])]] = (0 until evalCount)
+    .map { ex => {
+      val modelMap: Map[AX, Any] = algoModelsMap(ex)
+
+      val qs: RDD[(QX, Q)] = suppQAsMap(ex).mapValues(_._1)
+
+      val algoPredicts: Seq[RDD[(QX, (AX, P))]] = (0 until algoCount)
+      .map { ax => {
+        val algo = algoMap(ax)
+        val model = modelMap(ax)
+        val rawPredicts: RDD[(QX, P)] = algo.batchPredictBase(sc, model, qs)
+        val predicts: RDD[(QX, (AX, P))] = rawPredicts.map { case (qx, p) => {
+          (qx, (ax, p))
+        }}
+        predicts
+      }}
+
+      val unionAlgoPredicts: RDD[(QX, Seq[P])] = sc.union(algoPredicts)
+      .groupByKey()
+      .mapValues { ps => {
+        assert (ps.size == algoCount, "Must have same length as algoCount")
+        // TODO. Check size == algoCount
+        ps.toSeq.sortBy(_._1).map(_._2)
+      }}
+
+      (ex, unionAlgoPredicts)
+    }}
+    .toMap
+
+    val servingQPAMap: Map[EX, RDD[(Q, P, A)]] = algoPredictsMap
+    .map { case (ex, psMap) => {
+      // The query passed to serving.serve is the original one, not
+      // supplemented.
+      val qasMap: RDD[(QX, (Q, A))] = evalQAsMap(ex)
+      val qpsaMap: RDD[(QX, Q, Seq[P], A)] = psMap.join(qasMap)
+      .map { case (qx, t) => (qx, t._2._1, t._1, t._2._2) }
+
+      val qpaMap: RDD[(Q, P, A)] = qpsaMap.map {
+        case (qx, q, ps, a) => (q, serving.serveBase(q, ps), a)
+      }
+      (ex, qpaMap)
+    }}
+
+    (0 until evalCount).map { ex => {
+      (evalInfoMap(ex), servingQPAMap(ex))
+    }}
+    .toSeq
+  }
+}
+
+/** Mix in this trait for queries that contain prId (PredictedResultId).
+  * This is useful when your engine expects queries to also be associated with
+  * prId keys when feedback loop is enabled.
+  *
+  * @group Helper
+  */
+@deprecated("To be removed in future releases.", "0.9.2")
+trait WithPrId {
+  val prId: String = ""
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/EngineFactory.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/EngineFactory.scala b/core/src/main/scala/org/apache/predictionio/controller/EngineFactory.scala
new file mode 100644
index 0000000..e9db35b
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/EngineFactory.scala
@@ -0,0 +1,41 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.core.BaseEngine
+
+import scala.language.implicitConversions
+
+/** If you intend to let PredictionIO create workflow and deploy serving
+  * automatically, you will need to implement an object that extends this class
+  * and return an [[Engine]].
+  *
+  * @group Engine
+  */
+abstract class EngineFactory {
+  /** Creates an instance of an [[Engine]]. */
+  def apply(): BaseEngine[_, _, _, _]
+
+  /** Override this method to programmatically return engine parameters. */
+  def engineParams(key: String): EngineParams = EngineParams()
+}
+
+/** DEPRECATED. Use [[EngineFactory]] instead.
+  *
+  * @group Engine
+  */
+@deprecated("Use EngineFactory instead.", "0.9.2")
+trait IEngineFactory extends EngineFactory

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/EngineParams.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/EngineParams.scala b/core/src/main/scala/org/apache/predictionio/controller/EngineParams.scala
new file mode 100644
index 0000000..b419255
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/EngineParams.scala
@@ -0,0 +1,149 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.core.BaseDataSource
+import org.apache.predictionio.core.BaseAlgorithm
+
+import scala.collection.JavaConversions
+import scala.language.implicitConversions
+
+/** This class serves as a logical grouping of all required engine's parameters.
+  *
+  * @param dataSourceParams Data Source name-parameters tuple.
+  * @param preparatorParams Preparator name-parameters tuple.
+  * @param algorithmParamsList List of algorithm name-parameter pairs.
+  * @param servingParams Serving name-parameters tuple.
+  * @group Engine
+  */
+class EngineParams(
+    val dataSourceParams: (String, Params) = ("", EmptyParams()),
+    val preparatorParams: (String, Params) = ("", EmptyParams()),
+    val algorithmParamsList: Seq[(String, Params)] = Seq(),
+    val servingParams: (String, Params) = ("", EmptyParams()))
+  extends Serializable {
+
+  /** Java-friendly constructor
+    *
+    * @param dataSourceName Data Source name
+    * @param dataSourceParams Data Source parameters
+    * @param preparatorName Preparator name
+    * @param preparatorParams Preparator parameters
+    * @param algorithmParamsList Map of algorithm name-parameters
+    * @param servingName Serving name
+    * @param servingParams Serving parameters
+    */
+  def this(
+    dataSourceName: String,
+    dataSourceParams: Params,
+    preparatorName: String,
+    preparatorParams: Params,
+    algorithmParamsList: _root_.java.util.Map[String, _ <: Params],
+    servingName: String,
+    servingParams: Params) = {
+
+    // To work around a json4s weird limitation, the parameter names can not be changed
+    this(
+      (dataSourceName, dataSourceParams),
+      (preparatorName, preparatorParams),
+      JavaConversions.mapAsScalaMap(algorithmParamsList).toSeq,
+      (servingName, servingParams)
+    )
+  }
+
+  // A case class style copy method.
+  def copy(
+    dataSourceParams: (String, Params) = dataSourceParams,
+    preparatorParams: (String, Params) = preparatorParams,
+    algorithmParamsList: Seq[(String, Params)] = algorithmParamsList,
+    servingParams: (String, Params) = servingParams): EngineParams = {
+
+    new EngineParams(
+      dataSourceParams,
+      preparatorParams,
+      algorithmParamsList,
+      servingParams)
+  }
+}
+
+/** Companion object for creating [[EngineParams]] instances.
+  *
+  * @group Engine
+  */
+object EngineParams {
+  /** Create EngineParams.
+    *
+    * @param dataSourceName Data Source name
+    * @param dataSourceParams Data Source parameters
+    * @param preparatorName Preparator name
+    * @param preparatorParams Preparator parameters
+    * @param algorithmParamsList List of algorithm name-parameter pairs.
+    * @param servingName Serving name
+    * @param servingParams Serving parameters
+    */
+  def apply(
+    dataSourceName: String = "",
+    dataSourceParams: Params = EmptyParams(),
+    preparatorName: String = "",
+    preparatorParams: Params = EmptyParams(),
+    algorithmParamsList: Seq[(String, Params)] = Seq(),
+    servingName: String = "",
+    servingParams: Params = EmptyParams()): EngineParams = {
+      new EngineParams(
+        dataSourceParams = (dataSourceName, dataSourceParams),
+        preparatorParams = (preparatorName, preparatorParams),
+        algorithmParamsList = algorithmParamsList,
+        servingParams = (servingName, servingParams)
+      )
+    }
+}
+
+/** SimpleEngine has only one algorithm, and uses default preparator and serving
+  * layer. Current default preparator is `IdentityPreparator` and serving is
+  * `FirstServing`.
+  *
+  * @tparam TD Training data class.
+  * @tparam EI Evaluation info class.
+  * @tparam Q Input query class.
+  * @tparam P Output prediction class.
+  * @tparam A Actual value class.
+  * @param dataSourceClass Data source class.
+  * @param algorithmClass of algorithm names to classes.
+  * @group Engine
+  */
+class SimpleEngine[TD, EI, Q, P, A](
+    dataSourceClass: Class[_ <: BaseDataSource[TD, EI, Q, A]],
+    algorithmClass: Class[_ <: BaseAlgorithm[TD, _, Q, P]])
+  extends Engine(
+    dataSourceClass,
+    IdentityPreparator(dataSourceClass),
+    Map("" -> algorithmClass),
+    LFirstServing(algorithmClass))
+
+/** This shorthand class serves the `SimpleEngine` class.
+  *
+  * @param dataSourceParams Data source parameters.
+  * @param algorithmParams List of algorithm name-parameter pairs.
+  * @group Engine
+  */
+class SimpleEngineParams(
+    dataSourceParams: Params = EmptyParams(),
+    algorithmParams: Params = EmptyParams())
+  extends EngineParams(
+    dataSourceParams = ("", dataSourceParams),
+    algorithmParamsList = Seq(("", algorithmParams)))
+
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/EngineParamsGenerator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/EngineParamsGenerator.scala b/core/src/main/scala/org/apache/predictionio/controller/EngineParamsGenerator.scala
new file mode 100644
index 0000000..2e26b83
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/EngineParamsGenerator.scala
@@ -0,0 +1,43 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import scala.language.implicitConversions
+
+/** Defines an engine parameters generator.
+  *
+  * Implementations of this trait can be supplied to "pio eval" as the second
+  * command line argument.
+  *
+  * @group Evaluation
+  */
+trait EngineParamsGenerator {
+  protected[this] var epList: Seq[EngineParams] = _
+  protected[this] var epListSet: Boolean = false
+
+  /** Returns the list of [[EngineParams]] of this [[EngineParamsGenerator]]. */
+  def engineParamsList: Seq[EngineParams] = {
+    assert(epListSet, "EngineParamsList not set")
+    epList
+  }
+
+  /** Sets the list of [[EngineParams]] of this [[EngineParamsGenerator]]. */
+  def engineParamsList_=(l: Seq[EngineParams]) {
+    assert(!epListSet, "EngineParamsList can bet set at most once")
+    epList = Seq(l:_*)
+    epListSet = true
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/Evaluation.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/Evaluation.scala b/core/src/main/scala/org/apache/predictionio/controller/Evaluation.scala
new file mode 100644
index 0000000..c720c4f
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/Evaluation.scala
@@ -0,0 +1,122 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.core.BaseEngine
+import org.apache.predictionio.core.BaseEvaluator
+import org.apache.predictionio.core.BaseEvaluatorResult
+
+import scala.language.implicitConversions
+
+/** Defines an evaluation that contains an engine and a metric.
+  *
+  * Implementations of this trait can be supplied to "pio eval" as the first
+  * argument.
+  *
+  * @group Evaluation
+  */
+trait Evaluation extends Deployment {
+  protected [this] var _evaluatorSet: Boolean = false
+  protected [this] var _evaluator: BaseEvaluator[_, _, _, _, _ <: BaseEvaluatorResult] = _
+
+  private [prediction]
+  def evaluator: BaseEvaluator[_, _, _, _, _ <: BaseEvaluatorResult] = {
+    assert(_evaluatorSet, "Evaluator not set")
+    _evaluator
+  }
+
+  /** Gets the tuple of the [[Engine]] and the implementation of
+    * [[org.apache.predictionio.core.BaseEvaluator]]
+    */
+  def engineEvaluator
+  : (BaseEngine[_, _, _, _], BaseEvaluator[_, _, _, _, _]) = {
+    assert(_evaluatorSet, "Evaluator not set")
+    (engine, _evaluator)
+  }
+
+  /** Sets both an [[Engine]] and an implementation of
+    * [[org.apache.predictionio.core.BaseEvaluator]] for this [[Evaluation]]
+    *
+    * @param engineEvaluator A tuple an [[Engine]] and an implementation of
+    *                        [[org.apache.predictionio.core.BaseEvaluator]]
+    * @tparam EI Evaluation information class
+    * @tparam Q Query class
+    * @tparam P Predicted result class
+    * @tparam A Actual result class
+    * @tparam R Metric result class
+    */
+  def engineEvaluator_=[EI, Q, P, A, R <: BaseEvaluatorResult](
+    engineEvaluator: (
+      BaseEngine[EI, Q, P, A],
+      BaseEvaluator[EI, Q, P, A, R])) {
+    assert(!_evaluatorSet, "Evaluator can be set at most once")
+    engine = engineEvaluator._1
+    _evaluator = engineEvaluator._2
+    _evaluatorSet = true
+  }
+
+  /** Returns both the [[Engine]] and the implementation of [[Metric]] for this
+    * [[Evaluation]]
+    */
+  def engineMetric: (BaseEngine[_, _, _, _], Metric[_, _, _, _, _]) = {
+    throw new NotImplementedError("This method is to keep the compiler happy")
+  }
+
+  /** Sets both an [[Engine]] and an implementation of [[Metric]] for this
+    * [[Evaluation]]
+    *
+    * @param engineMetric A tuple of [[Engine]] and an implementation of
+    *                     [[Metric]]
+    * @tparam EI Evaluation information class
+    * @tparam Q Query class
+    * @tparam P Predicted result class
+    * @tparam A Actual result class
+    */
+  def engineMetric_=[EI, Q, P, A](
+    engineMetric: (BaseEngine[EI, Q, P, A], Metric[EI, Q, P, A, _])) {
+    engineEvaluator = (
+      engineMetric._1,
+      MetricEvaluator(
+        metric = engineMetric._2,
+        otherMetrics = Seq[Metric[EI, Q, P, A, _]](),
+        outputPath = "best.json"))
+  }
+
+  private [prediction]
+  def engineMetrics: (BaseEngine[_, _, _, _], Metric[_, _, _, _, _]) = {
+    throw new NotImplementedError("This method is to keep the compiler happy")
+  }
+
+  /** Sets an [[Engine]], an implementation of [[Metric]], and sequence of
+    * implementations of [[Metric]] for this [[Evaluation]]
+    *
+    * @param engineMetrics A tuple of [[Engine]], an implementation of
+    *                      [[Metric]] and sequence of implementations of [[Metric]]
+    * @tparam EI Evaluation information class
+    * @tparam Q Query class
+    * @tparam P Predicted result class
+    * @tparam A Actual result class
+    */
+  def engineMetrics_=[EI, Q, P, A](
+    engineMetrics: (
+      BaseEngine[EI, Q, P, A],
+      Metric[EI, Q, P, A, _],
+      Seq[Metric[EI, Q, P, A, _]])) {
+    engineEvaluator = (
+      engineMetrics._1,
+      MetricEvaluator(engineMetrics._2, engineMetrics._3))
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/FastEvalEngine.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/FastEvalEngine.scala b/core/src/main/scala/org/apache/predictionio/controller/FastEvalEngine.scala
new file mode 100644
index 0000000..868d818
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/FastEvalEngine.scala
@@ -0,0 +1,343 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.core.BaseDataSource
+import org.apache.predictionio.core.BasePreparator
+import org.apache.predictionio.core.BaseAlgorithm
+import org.apache.predictionio.core.BaseServing
+import org.apache.predictionio.core.Doer
+import org.apache.predictionio.annotation.Experimental
+
+import grizzled.slf4j.Logger
+import org.apache.predictionio.workflow.WorkflowParams
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+import scala.language.implicitConversions
+
+import _root_.java.util.NoSuchElementException
+
+import scala.collection.mutable.{ HashMap => MutableHashMap }
+
+/** :: Experimental ::
+  * Workflow based on [[FastEvalEngine]]
+  *
+  * @group Evaluation
+  */
+@Experimental
+object FastEvalEngineWorkflow  {
+  @transient lazy val logger = Logger[this.type]
+
+  type EX = Int
+  type AX = Int
+  type QX = Long
+
+  case class DataSourcePrefix(dataSourceParams: (String, Params)) {
+    def this(pp: PreparatorPrefix) = this(pp.dataSourceParams)
+    def this(ap: AlgorithmsPrefix) = this(ap.dataSourceParams)
+    def this(sp: ServingPrefix) = this(sp.dataSourceParams)
+  }
+
+  case class PreparatorPrefix(
+    dataSourceParams: (String, Params),
+    preparatorParams: (String, Params)) {
+    def this(ap: AlgorithmsPrefix) = {
+      this(ap.dataSourceParams, ap.preparatorParams)
+    }
+  }
+
+  case class AlgorithmsPrefix(
+    dataSourceParams: (String, Params),
+    preparatorParams: (String, Params),
+    algorithmParamsList: Seq[(String, Params)]) {
+    def this(sp: ServingPrefix) = {
+      this(sp.dataSourceParams, sp.preparatorParams, sp.algorithmParamsList)
+    }
+  }
+
+  case class ServingPrefix(
+    dataSourceParams: (String, Params),
+    preparatorParams: (String, Params),
+    algorithmParamsList: Seq[(String, Params)],
+    servingParams: (String, Params)) {
+    def this(ep: EngineParams) = this(
+      ep.dataSourceParams,
+      ep.preparatorParams,
+      ep.algorithmParamsList,
+      ep.servingParams)
+  }
+
+  def getDataSourceResult[TD, EI, PD, Q, P, A](
+    workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
+    prefix: DataSourcePrefix)
+  : Map[EX, (TD, EI, RDD[(QX, (Q, A))])] = {
+    val cache = workflow.dataSourceCache
+
+    if (!cache.contains(prefix)) {
+      val dataSource = Doer(
+        workflow.engine.dataSourceClassMap(prefix.dataSourceParams._1),
+        prefix.dataSourceParams._2)
+
+      val result = dataSource
+      .readEvalBase(workflow.sc)
+      .map { case (td, ei, qaRDD) => {
+        (td, ei, qaRDD.zipWithUniqueId().map(_.swap))
+      }}
+      .zipWithIndex
+      .map(_.swap)
+      .toMap
+
+      cache += Tuple2(prefix, result)
+    }
+    cache(prefix)
+  }
+
+  def getPreparatorResult[TD, EI, PD, Q, P, A](
+    workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
+    prefix: PreparatorPrefix): Map[EX, PD] = {
+    val cache = workflow.preparatorCache
+
+    if (!cache.contains(prefix)) {
+      val preparator = Doer(
+        workflow.engine.preparatorClassMap(prefix.preparatorParams._1),
+        prefix.preparatorParams._2)
+
+      val result = getDataSourceResult(
+        workflow = workflow,
+        prefix = new DataSourcePrefix(prefix))
+      .mapValues { case (td, _, _) => preparator.prepareBase(workflow.sc, td) }
+
+      cache += Tuple2(prefix, result)
+    }
+    cache(prefix)
+  }
+
+  def computeAlgorithmsResult[TD, EI, PD, Q, P, A](
+    workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
+    prefix: AlgorithmsPrefix): Map[EX, RDD[(QX, Seq[P])]] = {
+
+    val algoMap: Map[AX, BaseAlgorithm[PD, _, Q, P]] = prefix.algorithmParamsList
+      .map { case (algoName, algoParams) => {
+        try {
+          Doer(workflow.engine.algorithmClassMap(algoName), algoParams)
+        } catch {
+          case e: NoSuchElementException => {
+            val algorithmClassMap = workflow.engine.algorithmClassMap
+            if (algoName == "") {
+              logger.error("Empty algorithm name supplied but it could not " +
+                "match with any algorithm in the engine's definition. " +
+                "Existing algorithm name(s) are: " +
+                s"${algorithmClassMap.keys.mkString(", ")}. Aborting.")
+            } else {
+              logger.error(s"${algoName} cannot be found in the engine's " +
+                "definition. Existing algorithm name(s) are: " +
+                s"${algorithmClassMap.keys.mkString(", ")}. Aborting.")
+            }
+            sys.exit(1)
+          }
+        }
+      }}
+      .zipWithIndex
+      .map(_.swap)
+      .toMap
+
+    val algoCount = algoMap.size
+
+    // Model Train
+    val algoModelsMap: Map[EX, Map[AX, Any]] = getPreparatorResult(
+      workflow,
+      new PreparatorPrefix(prefix))
+    .mapValues {
+      pd => algoMap.mapValues(_.trainBase(workflow.sc,pd))
+    }
+
+    // Predict
+    val dataSourceResult =
+      FastEvalEngineWorkflow.getDataSourceResult(
+        workflow = workflow,
+        prefix = new DataSourcePrefix(prefix))
+
+    val algoResult: Map[EX, RDD[(QX, Seq[P])]] = dataSourceResult
+    .par
+    .map { case (ex, (td, ei, iqaRDD)) => {
+      val modelsMap: Map[AX, Any] = algoModelsMap(ex)
+      val qs: RDD[(QX, Q)] = iqaRDD.mapValues(_._1)
+
+      val algoPredicts: Seq[RDD[(QX, (AX, P))]] = (0 until algoCount)
+      .map { ax => {
+        val algo = algoMap(ax)
+        val model = modelsMap(ax)
+        val rawPredicts: RDD[(QX, P)] = algo.batchPredictBase(
+          workflow.sc,
+          model,
+          qs)
+
+        val predicts: RDD[(QX, (AX, P))] = rawPredicts.map {
+          case (qx, p) => (qx, (ax, p))
+        }
+        predicts
+      }}
+
+      val unionAlgoPredicts: RDD[(QX, Seq[P])] = workflow.sc
+      .union(algoPredicts)
+      .groupByKey
+      .mapValues { ps => {
+        assert (ps.size == algoCount, "Must have same length as algoCount")
+        // TODO. Check size == algoCount
+        ps.toSeq.sortBy(_._1).map(_._2)
+      }}
+      (ex, unionAlgoPredicts)
+    }}
+    .seq
+    .toMap
+
+    algoResult
+  }
+
+  def getAlgorithmsResult[TD, EI, PD, Q, P, A](
+    workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
+    prefix: AlgorithmsPrefix): Map[EX, RDD[(QX, Seq[P])]] = {
+    val cache = workflow.algorithmsCache
+    if (!cache.contains(prefix)) {
+      val result = computeAlgorithmsResult(workflow, prefix)
+      cache += Tuple2(prefix, result)
+    }
+    cache(prefix)
+  }
+
+  def getServingResult[TD, EI, PD, Q, P, A](
+    workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
+    prefix: ServingPrefix)
+  : Seq[(EI, RDD[(Q, P, A)])] = {
+    val cache = workflow.servingCache
+    if (!cache.contains(prefix)) {
+      val serving = Doer(
+        workflow.engine.servingClassMap(prefix.servingParams._1),
+        prefix.servingParams._2)
+
+      val algoPredictsMap = getAlgorithmsResult(
+        workflow = workflow,
+        prefix = new AlgorithmsPrefix(prefix))
+
+      val dataSourceResult = getDataSourceResult(
+        workflow = workflow,
+        prefix = new DataSourcePrefix(prefix))
+
+      val evalQAsMap = dataSourceResult.mapValues(_._3)
+      val evalInfoMap = dataSourceResult.mapValues(_._2)
+
+      val servingQPAMap: Map[EX, RDD[(Q, P, A)]] = algoPredictsMap
+      .map { case (ex, psMap) => {
+        val qasMap: RDD[(QX, (Q, A))] = evalQAsMap(ex)
+        val qpsaMap: RDD[(QX, Q, Seq[P], A)] = psMap.join(qasMap)
+        .map { case (qx, t) => (qx, t._2._1, t._1, t._2._2) }
+
+        val qpaMap: RDD[(Q, P, A)] = qpsaMap.map {
+          case (qx, q, ps, a) => (q, serving.serveBase(q, ps), a)
+        }
+        (ex, qpaMap)
+      }}
+
+      val servingResult = (0 until evalQAsMap.size).map { ex => {
+        (evalInfoMap(ex), servingQPAMap(ex))
+      }}
+      .toSeq
+
+      cache += Tuple2(prefix, servingResult)
+    }
+    cache(prefix)
+  }
+
+  def get[TD, EI, PD, Q, P, A](
+    workflow: FastEvalEngineWorkflow[TD, EI, PD, Q, P, A],
+    engineParamsList: Seq[EngineParams])
+  : Seq[(EngineParams, Seq[(EI, RDD[(Q, P, A)])])] = {
+    engineParamsList.map { engineParams => {
+      (engineParams,
+        getServingResult(workflow, new ServingPrefix(engineParams)))
+    }}
+  }
+}
+
+/** :: Experimental ::
+  * Workflow based on [[FastEvalEngine]]
+  *
+  * @group Evaluation
+  */
+@Experimental
+class FastEvalEngineWorkflow[TD, EI, PD, Q, P, A](
+  val engine: FastEvalEngine[TD, EI, PD, Q, P, A],
+  val sc: SparkContext,
+  val workflowParams: WorkflowParams) extends Serializable {
+
+  import org.apache.predictionio.controller.FastEvalEngineWorkflow._
+
+  type DataSourceResult = Map[EX, (TD, EI, RDD[(QX, (Q, A))])]
+  type PreparatorResult = Map[EX, PD]
+  type AlgorithmsResult = Map[EX, RDD[(QX, Seq[P])]]
+  type ServingResult = Seq[(EI, RDD[(Q, P, A)])]
+
+  val dataSourceCache = MutableHashMap[DataSourcePrefix, DataSourceResult]()
+  val preparatorCache = MutableHashMap[PreparatorPrefix, PreparatorResult]()
+  val algorithmsCache = MutableHashMap[AlgorithmsPrefix, AlgorithmsResult]()
+  val servingCache = MutableHashMap[ServingPrefix, ServingResult]()
+}
+
+
+
+/** :: Experimental ::
+  * FastEvalEngine is a subclass of [[Engine]] that exploits the immutability of
+  * controllers to optimize the evaluation process
+  *
+  * @group Evaluation
+  */
+@Experimental
+class FastEvalEngine[TD, EI, PD, Q, P, A](
+    dataSourceClassMap: Map[String, Class[_ <: BaseDataSource[TD, EI, Q, A]]],
+    preparatorClassMap: Map[String, Class[_ <: BasePreparator[TD, PD]]],
+    algorithmClassMap: Map[String, Class[_ <: BaseAlgorithm[PD, _, Q, P]]],
+    servingClassMap: Map[String, Class[_ <: BaseServing[Q, P]]])
+  extends Engine[TD, EI, PD, Q, P, A](
+    dataSourceClassMap,
+    preparatorClassMap,
+    algorithmClassMap,
+    servingClassMap) {
+  @transient override lazy val logger = Logger[this.type]
+
+  override def eval(
+    sc: SparkContext,
+    engineParams: EngineParams,
+    params: WorkflowParams): Seq[(EI, RDD[(Q, P, A)])] = {
+    logger.info("FastEvalEngine.eval")
+    batchEval(sc, Seq(engineParams), params).head._2
+  }
+
+  override def batchEval(
+    sc: SparkContext,
+    engineParamsList: Seq[EngineParams],
+    params: WorkflowParams)
+  : Seq[(EngineParams, Seq[(EI, RDD[(Q, P, A)])])] = {
+
+    val fastEngineWorkflow = new FastEvalEngineWorkflow(
+      this, sc, params)
+
+    FastEvalEngineWorkflow.get(
+      fastEngineWorkflow,
+      engineParamsList)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/IdentityPreparator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/IdentityPreparator.scala b/core/src/main/scala/org/apache/predictionio/controller/IdentityPreparator.scala
new file mode 100644
index 0000000..c7669ba
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/IdentityPreparator.scala
@@ -0,0 +1,92 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.core.BaseDataSource
+import org.apache.predictionio.core.BasePreparator
+import org.apache.spark.SparkContext
+
+import scala.reflect._
+
+/** A helper concrete implementation of [[org.apache.predictionio.core.BasePreparator]]
+  * that passes training data through without any special preparation. This can
+  * be used in place for both [[PPreparator]] and [[LPreparator]].
+  *
+  * @tparam TD Training data class.
+  * @group Preparator
+  */
+class IdentityPreparator[TD] extends BasePreparator[TD, TD] {
+  def prepareBase(sc: SparkContext, td: TD): TD = td
+}
+
+/** Companion object of [[IdentityPreparator]] that conveniently returns an
+  * instance of the class of [[IdentityPreparator]] for use with
+  * [[EngineFactory]].
+  *
+  * @group Preparator
+  */
+object IdentityPreparator {
+  /** Produces an instance of the class of [[IdentityPreparator]].
+    *
+    * @param ds Instance of the class of the data source for this preparator.
+    */
+  def apply[TD](ds: Class[_ <: BaseDataSource[TD, _, _, _]]): Class[IdentityPreparator[TD]] =
+    classOf[IdentityPreparator[TD]]
+}
+
+/** DEPRECATED. Use [[IdentityPreparator]] instead.
+  *
+  * @tparam TD Training data class.
+  * @group Preparator
+  */
+@deprecated("Use IdentityPreparator instead.", "0.9.2")
+class PIdentityPreparator[TD] extends IdentityPreparator[TD]
+
+/** DEPRECATED. Use [[IdentityPreparator]] instead.
+  *
+  * @group Preparator
+  */
+@deprecated("Use IdentityPreparator instead.", "0.9.2")
+object PIdentityPreparator {
+  /** Produces an instance of the class of [[IdentityPreparator]].
+    *
+    * @param ds Instance of the class of the data source for this preparator.
+    */
+  def apply[TD](ds: Class[_ <: BaseDataSource[TD, _, _, _]]): Class[IdentityPreparator[TD]] =
+    classOf[IdentityPreparator[TD]]
+}
+
+/** DEPRECATED. Use [[IdentityPreparator]] instead.
+  *
+  * @tparam TD Training data class.
+  * @group Preparator
+  */
+@deprecated("Use IdentityPreparator instead.", "0.9.2")
+class LIdentityPreparator[TD] extends IdentityPreparator[TD]
+
+/** DEPRECATED. Use [[IdentityPreparator]] instead.
+  *
+  * @group Preparator
+  */
+@deprecated("Use IdentityPreparator instead.", "0.9.2")
+object LIdentityPreparator {
+  /** Produces an instance of the class of [[IdentityPreparator]].
+    *
+    * @param ds Instance of the class of the data source for this preparator.
+    */
+  def apply[TD](ds: Class[_ <: BaseDataSource[TD, _, _, _]]): Class[IdentityPreparator[TD]] =
+    classOf[IdentityPreparator[TD]]
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/LAlgorithm.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/LAlgorithm.scala b/core/src/main/scala/org/apache/predictionio/controller/LAlgorithm.scala
new file mode 100644
index 0000000..664ebb7
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/LAlgorithm.scala
@@ -0,0 +1,130 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import _root_.org.apache.predictionio.annotation.DeveloperApi
+import org.apache.predictionio.core.BaseAlgorithm
+import org.apache.predictionio.workflow.PersistentModelManifest
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+import scala.reflect._
+
+/** Base class of a local algorithm.
+  *
+  * A local algorithm runs locally within a single machine and produces a model
+  * that can fit within a single machine.
+  *
+  * If your input query class requires custom JSON4S serialization, the most
+  * idiomatic way is to implement a trait that extends [[CustomQuerySerializer]],
+  * and mix that into your algorithm class, instead of overriding
+  * [[querySerializer]] directly.
+  *
+  * @tparam PD Prepared data class.
+  * @tparam M Trained model class.
+  * @tparam Q Input query class.
+  * @tparam P Output prediction class.
+  * @group Algorithm
+  */
+abstract class LAlgorithm[PD, M : ClassTag, Q, P]
+  extends BaseAlgorithm[RDD[PD], RDD[M], Q, P] {
+
+  def trainBase(sc: SparkContext, pd: RDD[PD]): RDD[M] = pd.map(train)
+
+  /** Implement this method to produce a model from prepared data.
+    *
+    * @param pd Prepared data for model training.
+    * @return Trained model.
+    */
+  def train(pd: PD): M
+
+  def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
+  : RDD[(Long, P)] = {
+    val mRDD = bm.asInstanceOf[RDD[M]]
+    batchPredict(mRDD, qs)
+  }
+
+  /** This is a default implementation to perform batch prediction. Override
+    * this method for a custom implementation.
+    *
+    * @param mRDD A single model wrapped inside an RDD
+    * @param qs An RDD of index-query tuples. The index is used to keep track of
+    *           predicted results with corresponding queries.
+    * @return Batch of predicted results
+    */
+  def batchPredict(mRDD: RDD[M], qs: RDD[(Long, Q)]): RDD[(Long, P)] = {
+    val glomQs: RDD[Array[(Long, Q)]] = qs.glom()
+    val cartesian: RDD[(M, Array[(Long, Q)])] = mRDD.cartesian(glomQs)
+    cartesian.flatMap { case (m, qArray) =>
+      qArray.map { case (qx, q) => (qx, predict(m, q)) }
+    }
+  }
+
+  def predictBase(localBaseModel: Any, q: Q): P = {
+    predict(localBaseModel.asInstanceOf[M], q)
+  }
+
+  /** Implement this method to produce a prediction from a query and trained
+    * model.
+    *
+    * @param m Trained model produced by [[train]].
+    * @param q An input query.
+    * @return A prediction.
+    */
+  def predict(m: M, q: Q): P
+
+  /** :: DeveloperApi ::
+    * Engine developers should not use this directly (read on to see how local
+    * algorithm models are persisted).
+    *
+    * Local algorithms produce local models. By default, models will be
+    * serialized and stored automatically. Engine developers can override this behavior by
+    * mixing the [[PersistentModel]] trait into the model class, and
+    * PredictionIO will call [[PersistentModel.save]] instead. If it returns
+    * true, a [[org.apache.predictionio.workflow.PersistentModelManifest]] will be
+    * returned so that during deployment, PredictionIO will use
+    * [[PersistentModelLoader]] to retrieve the model. Otherwise, Unit will be
+    * returned and the model will be re-trained on-the-fly.
+    *
+    * @param sc Spark context
+    * @param modelId Model ID
+    * @param algoParams Algorithm parameters that trained this model
+    * @param bm Model
+    * @return The model itself for automatic persistence, an instance of
+    *         [[org.apache.predictionio.workflow.PersistentModelManifest]] for manual
+    *         persistence, or Unit for re-training on deployment
+    */
+  @DeveloperApi
+  override
+  def makePersistentModel(
+    sc: SparkContext,
+    modelId: String,
+    algoParams: Params,
+    bm: Any): Any = {
+    // Check RDD[M].count == 1
+    val m = bm.asInstanceOf[RDD[M]].first()
+    if (m.isInstanceOf[PersistentModel[_]]) {
+      if (m.asInstanceOf[PersistentModel[Params]].save(
+        modelId, algoParams, sc)) {
+        PersistentModelManifest(className = m.getClass.getName)
+      } else {
+        Unit
+      }
+    } else {
+      m
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/LAverageServing.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/LAverageServing.scala b/core/src/main/scala/org/apache/predictionio/controller/LAverageServing.scala
new file mode 100644
index 0000000..7fbe7ac
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/LAverageServing.scala
@@ -0,0 +1,41 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.core.BaseAlgorithm
+
+/** A concrete implementation of [[LServing]] returning the average of all
+  * algorithms' predictions, where their classes are expected to be all Double.
+  *
+  * @group Serving
+  */
+class LAverageServing[Q] extends LServing[Q, Double] {
+  /** Returns the average of all algorithms' predictions. */
+  def serve(query: Q, predictions: Seq[Double]): Double = {
+    predictions.sum / predictions.length
+  }
+}
+
+/** A concrete implementation of [[LServing]] returning the average of all
+  * algorithms' predictions, where their classes are expected to be all Double.
+  *
+  * @group Serving
+  */
+object LAverageServing {
+  /** Returns an instance of [[LAverageServing]]. */
+  def apply[Q](a: Class[_ <: BaseAlgorithm[_, _, Q, _]]): Class[LAverageServing[Q]] =
+    classOf[LAverageServing[Q]]
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/LDataSource.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/LDataSource.scala b/core/src/main/scala/org/apache/predictionio/controller/LDataSource.scala
new file mode 100644
index 0000000..adb8e20
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/LDataSource.scala
@@ -0,0 +1,67 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.core.BaseDataSource
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+import scala.reflect._
+
+/** Base class of a local data source.
+  *
+  * A local data source runs locally within a single machine and return data
+  * that can fit within a single machine.
+  *
+  * @tparam TD Training data class.
+  * @tparam EI Evaluation Info class.
+  * @tparam Q Input query class.
+  * @tparam A Actual value class.
+  * @group Data Source
+  */
+abstract class LDataSource[TD: ClassTag, EI, Q, A]
+  extends BaseDataSource[RDD[TD], EI, Q, A] {
+
+  def readTrainingBase(sc: SparkContext): RDD[TD] = {
+    sc.parallelize(Seq(None)).map(_ => readTraining())
+  }
+
+  /** Implement this method to only return training data from a data source */
+  def readTraining(): TD
+
+  def readEvalBase(sc: SparkContext): Seq[(RDD[TD], EI, RDD[(Q, A)])] = {
+    val localEvalData: Seq[(TD, EI, Seq[(Q, A)])] = readEval()
+
+    localEvalData.map { case (td, ei, qaSeq) => {
+      val tdRDD = sc.parallelize(Seq(None)).map(_ => td)
+      val qaRDD = sc.parallelize(qaSeq)
+      (tdRDD, ei, qaRDD)
+    }}
+  }
+
+  /** To provide evaluation feature for your engine, your must override this
+    * method to return data for evaluation from a data source. Returned data can
+    * optionally include a sequence of query and actual value pairs for
+    * evaluation purpose.
+    *
+    * The default implementation returns an empty sequence as a stub, so that
+    * an engine can be compiled without implementing evaluation.
+    */
+  def readEval(): Seq[(TD, EI, Seq[(Q, A)])] = Seq[(TD, EI, Seq[(Q, A)])]()
+
+  @deprecated("Use readEval() instead.", "0.9.0")
+  def read(): Seq[(TD, EI, Seq[(Q, A)])] = readEval()
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/LFirstServing.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/LFirstServing.scala b/core/src/main/scala/org/apache/predictionio/controller/LFirstServing.scala
new file mode 100644
index 0000000..e677743
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/LFirstServing.scala
@@ -0,0 +1,39 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.core.BaseAlgorithm
+
+/** A concrete implementation of [[LServing]] returning the first algorithm's
+  * prediction result directly without any modification.
+  *
+  * @group Serving
+  */
+class LFirstServing[Q, P] extends LServing[Q, P] {
+  /** Returns the first algorithm's prediction. */
+  def serve(query: Q, predictions: Seq[P]): P = predictions.head
+}
+
+/** A concrete implementation of [[LServing]] returning the first algorithm's
+  * prediction result directly without any modification.
+  *
+  * @group Serving
+  */
+object LFirstServing {
+  /** Returns an instance of [[LFirstServing]]. */
+  def apply[Q, P](a: Class[_ <: BaseAlgorithm[_, _, Q, P]]): Class[LFirstServing[Q, P]] =
+    classOf[LFirstServing[Q, P]]
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/LPreparator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/LPreparator.scala b/core/src/main/scala/org/apache/predictionio/controller/LPreparator.scala
new file mode 100644
index 0000000..32ffd5d
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/LPreparator.scala
@@ -0,0 +1,46 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.core.BasePreparator
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+import scala.reflect._
+
+/** Base class of a local preparator.
+  *
+  * A local preparator runs locally within a single machine and produces
+  * prepared data that can fit within a single machine.
+  *
+  * @tparam TD Training data class.
+  * @tparam PD Prepared data class.
+  * @group Preparator
+  */
+abstract class LPreparator[TD, PD : ClassTag]
+  extends BasePreparator[RDD[TD], RDD[PD]] {
+
+  def prepareBase(sc: SparkContext, rddTd: RDD[TD]): RDD[PD] = {
+    rddTd.map(prepare)
+  }
+
+  /** Implement this method to produce prepared data that is ready for model
+    * training.
+    *
+    * @param trainingData Training data to be prepared.
+    */
+  def prepare(trainingData: TD): PD
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/LServing.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/LServing.scala b/core/src/main/scala/org/apache/predictionio/controller/LServing.scala
new file mode 100644
index 0000000..653b998
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/LServing.scala
@@ -0,0 +1,52 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.annotation.Experimental
+import org.apache.predictionio.core.BaseServing
+
+/** Base class of serving.
+  *
+  * @tparam Q Input query class.
+  * @tparam P Output prediction class.
+  * @group Serving
+  */
+abstract class LServing[Q, P] extends BaseServing[Q, P] {
+  def supplementBase(q: Q): Q = supplement(q)
+
+  /** :: Experimental ::
+    * Implement this method to supplement the query before sending it to
+    * algorithms.
+    *
+    * @param q Query
+    * @return A supplemented Query
+    */
+  @Experimental
+  def supplement(q: Q): Q = q
+
+  def serveBase(q: Q, ps: Seq[P]): P = {
+    serve(q, ps)
+  }
+
+  /** Implement this method to combine multiple algorithms' predictions to
+    * produce a single final prediction. The query is the original query sent to
+    * the engine, not the supplemented produced by [[LServing.supplement]].
+    *
+    * @param query Original input query.
+    * @param predictions A list of algorithms' predictions.
+    */
+  def serve(query: Q, predictions: Seq[P]): P
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/LocalFileSystemPersistentModel.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/LocalFileSystemPersistentModel.scala b/core/src/main/scala/org/apache/predictionio/controller/LocalFileSystemPersistentModel.scala
new file mode 100644
index 0000000..f90e28d
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/LocalFileSystemPersistentModel.scala
@@ -0,0 +1,74 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.spark.SparkContext
+
+/** This trait is a convenience helper for persisting your model to the local
+  * filesystem. This trait and [[LocalFileSystemPersistentModelLoader]] contain
+  * concrete implementation and need not be implemented.
+  *
+  * The underlying implementation is [[Utils.save]].
+  *
+  * {{{
+  * class MyModel extends LocalFileSystemPersistentModel[MyParams] {
+  *   ...
+  * }
+  *
+  * object MyModel extends LocalFileSystemPersistentModelLoader[MyParams, MyModel] {
+  *   ...
+  * }
+  * }}}
+  *
+  * @tparam AP Algorithm parameters class.
+  * @see [[LocalFileSystemPersistentModelLoader]]
+  * @group Algorithm
+  */
+trait LocalFileSystemPersistentModel[AP <: Params] extends PersistentModel[AP] {
+  def save(id: String, params: AP, sc: SparkContext): Boolean = {
+    Utils.save(id, this)
+    true
+  }
+}
+
+/** Implement an object that extends this trait for PredictionIO to support
+  * loading a persisted model from local filesystem during serving deployment.
+  *
+  * The underlying implementation is [[Utils.load]].
+  *
+  * @tparam AP Algorithm parameters class.
+  * @tparam M Model class.
+  * @see [[LocalFileSystemPersistentModel]]
+  * @group Algorithm
+  */
+trait LocalFileSystemPersistentModelLoader[AP <: Params, M]
+  extends PersistentModelLoader[AP, M] {
+  def apply(id: String, params: AP, sc: Option[SparkContext]): M = {
+    Utils.load(id).asInstanceOf[M]
+  }
+}
+
+/** DEPRECATED. Use [[LocalFileSystemPersistentModel]] instead.
+  *
+  * @group Algorithm */
+@deprecated("Use LocalFileSystemPersistentModel instead.", "0.9.2")
+trait IFSPersistentModel[AP <: Params] extends LocalFileSystemPersistentModel[AP]
+
+/** DEPRECATED. Use [[LocalFileSystemPersistentModelLoader]] instead.
+  *
+  * @group Algorithm */
+@deprecated("Use LocalFileSystemPersistentModelLoader instead.", "0.9.2")
+trait IFSPersistentModelLoader[AP <: Params, M] extends LocalFileSystemPersistentModelLoader[AP, M]


[07/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/storage/PEventsSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/storage/PEventsSpec.scala b/data/src/test/scala/io/prediction/data/storage/PEventsSpec.scala
deleted file mode 100644
index 74614b2..0000000
--- a/data/src/test/scala/io/prediction/data/storage/PEventsSpec.scala
+++ /dev/null
@@ -1,210 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import org.specs2._
-import org.specs2.specification.Step
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-class PEventsSpec extends Specification with TestEvents {
-
-  System.clearProperty("spark.driver.port")
-  System.clearProperty("spark.hostPort")
-  val sc = new SparkContext("local[4]", "PEventAggregatorSpec test")
-
-  val appId = 1
-  val channelId = 6
-  val dbName = "test_pio_storage_events_" + hashCode
-
-  def hbLocal = Storage.getDataObject[LEvents](
-    StorageTestUtils.hbaseSourceName,
-    dbName
-  )
-
-  def hbPar = Storage.getDataObject[PEvents](
-    StorageTestUtils.hbaseSourceName,
-    dbName
-  )
-
-  def jdbcLocal = Storage.getDataObject[LEvents](
-    StorageTestUtils.jdbcSourceName,
-    dbName
-  )
-
-  def jdbcPar = Storage.getDataObject[PEvents](
-    StorageTestUtils.jdbcSourceName,
-    dbName
-  )
-
-  def stopSpark = {
-    sc.stop()
-  }
-
-  def is = s2"""
-
-  PredictionIO Storage PEvents Specification
-
-    PEvents can be implemented by:
-    - HBPEvents ${hbPEvents}
-    - JDBCPEvents ${jdbcPEvents}
-    - (stop Spark) ${Step(sc.stop())}
-
-  """
-
-  def hbPEvents = sequential ^ s2"""
-
-    HBPEvents should
-    - behave like any PEvents implementation ${events(hbLocal, hbPar)}
-    - (table cleanup) ${Step(StorageTestUtils.dropHBaseNamespace(dbName))}
-
-  """
-
-  def jdbcPEvents = sequential ^ s2"""
-
-    JDBCPEvents should
-    - behave like any PEvents implementation ${events(jdbcLocal, jdbcPar)}
-    - (table cleanup) ${Step(StorageTestUtils.dropJDBCTable(s"${dbName}_$appId"))}
-    - (table cleanup) ${Step(StorageTestUtils.dropJDBCTable(s"${dbName}_${appId}_$channelId"))}
-
-  """
-
-  def events(localEventClient: LEvents, parEventClient: PEvents) = sequential ^ s2"""
-
-    - (init test) ${initTest(localEventClient)}
-    - (insert test events) ${insertTestEvents(localEventClient)}
-    find in default ${find(parEventClient)}
-    find in channel ${findChannel(parEventClient)}
-    aggregate user properties in default ${aggregateUserProperties(parEventClient)}
-    aggregate user properties in channel ${aggregateUserPropertiesChannel(parEventClient)}
-    write to default ${write(parEventClient)}
-    write to channel ${writeChannel(parEventClient)}
-
-  """
-
-  /* setup */
-
-  // events from TestEvents trait
-  val listOfEvents = List(u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2, r1, r2)
-  val listOfEventsChannel = List(u3e1, u3e2, u3e3, r3, r4)
-
-  def initTest(localEventClient: LEvents) = {
-    localEventClient.init(appId)
-    localEventClient.init(appId, Some(channelId))
-  }
-
-  def insertTestEvents(localEventClient: LEvents) = {
-    listOfEvents.map( localEventClient.insert(_, appId) )
-    // insert to channel
-    listOfEventsChannel.map( localEventClient.insert(_, appId, Some(channelId)) )
-    success
-  }
-
-  /* following are tests */
-
-  def find(parEventClient: PEvents) = {
-    val resultRDD: RDD[Event] = parEventClient.find(
-      appId = appId
-    )(sc)
-
-    val results = resultRDD.collect.toList
-      .map {_.copy(eventId = None)} // ignore eventId
-
-    results must containTheSameElementsAs(listOfEvents)
-  }
-
-  def findChannel(parEventClient: PEvents) = {
-    val resultRDD: RDD[Event] = parEventClient.find(
-      appId = appId,
-      channelId = Some(channelId)
-    )(sc)
-
-    val results = resultRDD.collect.toList
-      .map {_.copy(eventId = None)} // ignore eventId
-
-    results must containTheSameElementsAs(listOfEventsChannel)
-  }
-
-  def aggregateUserProperties(parEventClient: PEvents) = {
-    val resultRDD: RDD[(String, PropertyMap)] = parEventClient.aggregateProperties(
-      appId = appId,
-      entityType = "user"
-    )(sc)
-    val result: Map[String, PropertyMap] = resultRDD.collectAsMap.toMap
-
-    val expected = Map(
-      "u1" -> PropertyMap(u1, u1BaseTime, u1LastTime),
-      "u2" -> PropertyMap(u2, u2BaseTime, u2LastTime)
-    )
-
-    result must beEqualTo(expected)
-  }
-
-  def aggregateUserPropertiesChannel(parEventClient: PEvents) = {
-    val resultRDD: RDD[(String, PropertyMap)] = parEventClient.aggregateProperties(
-      appId = appId,
-      channelId = Some(channelId),
-      entityType = "user"
-    )(sc)
-    val result: Map[String, PropertyMap] = resultRDD.collectAsMap.toMap
-
-    val expected = Map(
-      "u3" -> PropertyMap(u3, u3BaseTime, u3LastTime)
-    )
-
-    result must beEqualTo(expected)
-  }
-
-  def write(parEventClient: PEvents) = {
-    val written = List(r5, r6)
-    val writtenRDD = sc.parallelize(written)
-    parEventClient.write(writtenRDD, appId)(sc)
-
-    // read back
-    val resultRDD = parEventClient.find(
-      appId = appId
-    )(sc)
-
-    val results = resultRDD.collect.toList
-      .map { _.copy(eventId = None)} // ignore eventId
-
-    val expected = listOfEvents ++ written
-
-    results must containTheSameElementsAs(expected)
-  }
-
-  def writeChannel(parEventClient: PEvents) = {
-    val written = List(r1, r5, r6)
-    val writtenRDD = sc.parallelize(written)
-    parEventClient.write(writtenRDD, appId, Some(channelId))(sc)
-
-    // read back
-    val resultRDD = parEventClient.find(
-      appId = appId,
-      channelId = Some(channelId)
-    )(sc)
-
-    val results = resultRDD.collect.toList
-      .map { _.copy(eventId = None)} // ignore eventId
-
-    val expected = listOfEventsChannel ++ written
-
-    results must containTheSameElementsAs(expected)
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/storage/StorageTestUtils.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/storage/StorageTestUtils.scala b/data/src/test/scala/io/prediction/data/storage/StorageTestUtils.scala
deleted file mode 100644
index 74615a1..0000000
--- a/data/src/test/scala/io/prediction/data/storage/StorageTestUtils.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import io.prediction.data.storage.hbase.HBLEvents
-import scalikejdbc._
-
-object StorageTestUtils {
-  val hbaseSourceName = "HBASE"
-  val jdbcSourceName = "PGSQL"
-
-  def dropHBaseNamespace(namespace: String): Unit = {
-    val eventDb = Storage.getDataObject[LEvents](hbaseSourceName, namespace)
-      .asInstanceOf[HBLEvents]
-    val admin = eventDb.client.admin
-    val tableNames = admin.listTableNamesByNamespace(namespace)
-    tableNames.foreach { name =>
-      admin.disableTable(name)
-      admin.deleteTable(name)
-    }
-
-    //Only empty namespaces (no tables) can be removed.
-    admin.deleteNamespace(namespace)
-  }
-
-  def dropJDBCTable(table: String): Unit = DB autoCommit { implicit s =>
-    SQL(s"drop table $table").execute().apply()
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/storage/TestEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/storage/TestEvents.scala b/data/src/test/scala/io/prediction/data/storage/TestEvents.scala
deleted file mode 100644
index 4fc2469..0000000
--- a/data/src/test/scala/io/prediction/data/storage/TestEvents.scala
+++ /dev/null
@@ -1,263 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import org.joda.time.DateTime
-import org.joda.time.DateTimeZone
-
-trait TestEvents {
-
-  val u1BaseTime = new DateTime(654321)
-  val u2BaseTime = new DateTime(6543210)
-  val u3BaseTime = new DateTime(6543410)
-
-  // u1 events
-  val u1e1 = Event(
-    event = "$set",
-    entityType = "user",
-    entityId = "u1",
-    properties = DataMap(
-      """{
-        "a" : 1,
-        "b" : "value2",
-        "d" : [1, 2, 3],
-      }"""),
-    eventTime = u1BaseTime
-  )
-
-  val u1e2 = u1e1.copy(
-    event = "$set",
-    properties = DataMap("""{"a" : 2}"""),
-    eventTime = u1BaseTime.plusDays(1)
-  )
-
-  val u1e3 = u1e1.copy(
-    event = "$set",
-    properties = DataMap("""{"b" : "value4"}"""),
-    eventTime = u1BaseTime.plusDays(2)
-  )
-
-  val u1e4 = u1e1.copy(
-    event = "$unset",
-    properties = DataMap("""{"b" : null}"""),
-    eventTime = u1BaseTime.plusDays(3)
-  )
-
-  val u1e5 = u1e1.copy(
-    event = "$set",
-    properties = DataMap("""{"e" : "new"}"""),
-    eventTime = u1BaseTime.plusDays(4)
-  )
-
-  val u1LastTime = u1BaseTime.plusDays(4)
-  val u1 = """{"a": 2, "d": [1, 2, 3], "e": "new"}"""
-
-  // delete event for u1
-  val u1ed = u1e1.copy(
-    event = "$delete",
-    properties = DataMap(),
-    eventTime = u1BaseTime.plusDays(5)
-  )
-
-  // u2 events
-  val u2e1 = Event(
-    event = "$set",
-    entityType = "user",
-    entityId = "u2",
-    properties = DataMap(
-      """{
-        "a" : 21,
-        "b" : "value12",
-        "d" : [7, 5, 6],
-      }"""),
-    eventTime = u2BaseTime
-  )
-
-  val u2e2 = u2e1.copy(
-    event = "$unset",
-    properties = DataMap("""{"a" : null}"""),
-    eventTime = u2BaseTime.plusDays(1)
-  )
-
-  val u2e3 = u2e1.copy(
-    event = "$set",
-    properties = DataMap("""{"b" : "value9", "g": "new11"}"""),
-    eventTime = u2BaseTime.plusDays(2)
-  )
-
-  val u2LastTime = u2BaseTime.plusDays(2)
-  val u2 = """{"b": "value9", "d": [7, 5, 6], "g": "new11"}"""
-
-  // u3 events
-  val u3e1 = Event(
-    event = "$set",
-    entityType = "user",
-    entityId = "u3",
-    properties = DataMap(
-      """{
-        "a" : 22,
-        "b" : "value13",
-        "d" : [5, 6, 1],
-      }"""),
-    eventTime = u3BaseTime
-  )
-
-  val u3e2 = u3e1.copy(
-    event = "$unset",
-    properties = DataMap("""{"a" : null}"""),
-    eventTime = u3BaseTime.plusDays(1)
-  )
-
-  val u3e3 = u3e1.copy(
-    event = "$set",
-    properties = DataMap("""{"b" : "value10", "f": "new12", "d" : [1, 3, 2]}"""),
-    eventTime = u3BaseTime.plusDays(2)
-  )
-
-  val u3LastTime = u3BaseTime.plusDays(2)
-  val u3 = """{"b": "value10", "d": [1, 3, 2], "f": "new12"}"""
-
-  // some random events
-  val r1 = Event(
-    event = "my_event",
-    entityType = "my_entity_type",
-    entityId = "my_entity_id",
-    targetEntityType = Some("my_target_entity_type"),
-    targetEntityId = Some("my_target_entity_id"),
-    properties = DataMap(
-      """{
-        "prop1" : 1,
-        "prop2" : "value2",
-        "prop3" : [1, 2, 3],
-        "prop4" : true,
-        "prop5" : ["a", "b", "c"],
-        "prop6" : 4.56
-      }"""
-    ),
-    eventTime = DateTime.now,
-    prId = Some("my_prid")
-  )
-  val r2 = Event(
-    event = "my_event2",
-    entityType = "my_entity_type2",
-    entityId = "my_entity_id2"
-  )
-  val r3 = Event(
-    event = "my_event3",
-    entityType = "my_entity_type",
-    entityId = "my_entity_id",
-    targetEntityType = Some("my_target_entity_type"),
-    targetEntityId = Some("my_target_entity_id"),
-    properties = DataMap(
-      """{
-        "propA" : 1.2345,
-        "propB" : "valueB",
-      }"""
-    ),
-    prId = Some("my_prid")
-  )
-  val r4 = Event(
-    event = "my_event4",
-    entityType = "my_entity_type4",
-    entityId = "my_entity_id4",
-    targetEntityType = Some("my_target_entity_type4"),
-    targetEntityId = Some("my_target_entity_id4"),
-    properties = DataMap(
-      """{
-        "prop1" : 1,
-        "prop2" : "value2",
-        "prop3" : [1, 2, 3],
-        "prop4" : true,
-        "prop5" : ["a", "b", "c"],
-        "prop6" : 4.56
-      }"""),
-    eventTime = DateTime.now
-  )
-  val r5 = Event(
-    event = "my_event5",
-    entityType = "my_entity_type5",
-    entityId = "my_entity_id5",
-    targetEntityType = Some("my_target_entity_type5"),
-    targetEntityId = Some("my_target_entity_id5"),
-    properties = DataMap(
-      """{
-        "prop1" : 1,
-        "prop2" : "value2",
-        "prop3" : [1, 2, 3],
-        "prop4" : true,
-        "prop5" : ["a", "b", "c"],
-        "prop6" : 4.56
-      }"""
-    ),
-    eventTime = DateTime.now
-  )
-  val r6 = Event(
-    event = "my_event6",
-    entityType = "my_entity_type6",
-    entityId = "my_entity_id6",
-    targetEntityType = Some("my_target_entity_type6"),
-    targetEntityId = Some("my_target_entity_id6"),
-    properties = DataMap(
-      """{
-        "prop1" : 6,
-        "prop2" : "value2",
-        "prop3" : [6, 7, 8],
-        "prop4" : true,
-        "prop5" : ["a", "b", "c"],
-        "prop6" : 4.56
-      }"""
-    ),
-    eventTime = DateTime.now
-  )
-
-  // timezone
-  val tz1 = Event(
-    event = "my_event",
-    entityType = "my_entity_type",
-    entityId = "my_entity_id0",
-    targetEntityType = Some("my_target_entity_type"),
-    targetEntityId = Some("my_target_entity_id"),
-    properties = DataMap(
-      """{
-        "prop1" : 1,
-        "prop2" : "value2",
-        "prop3" : [1, 2, 3],
-        "prop4" : true,
-        "prop5" : ["a", "b", "c"],
-        "prop6" : 4.56
-      }"""
-    ),
-    eventTime = new DateTime(12345678, DateTimeZone.forID("-08:00")),
-    prId = Some("my_prid")
-  )
-
-  val tz2 = Event(
-    event = "my_event",
-    entityType = "my_entity_type",
-    entityId = "my_entity_id1",
-    eventTime = new DateTime(12345678, DateTimeZone.forID("+02:00")),
-    prId = Some("my_prid")
-  )
-
-  val tz3 = Event(
-    event = "my_event",
-    entityType = "my_entity_type",
-    entityId = "my_entity_id2",
-    eventTime = new DateTime(12345678, DateTimeZone.forID("+08:00")),
-    prId = Some("my_prid")
-  )
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/webhooks/ConnectorTestUtil.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/webhooks/ConnectorTestUtil.scala b/data/src/test/scala/io/prediction/data/webhooks/ConnectorTestUtil.scala
deleted file mode 100644
index 4009e0f..0000000
--- a/data/src/test/scala/io/prediction/data/webhooks/ConnectorTestUtil.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.webhooks
-
-import org.specs2.execute.Result
-import org.specs2.mutable._
-
-import org.json4s.JObject
-import org.json4s.DefaultFormats
-import org.json4s.native.JsonMethods.parse
-import org.json4s.native.Serialization.write
-
-/** TestUtil for JsonConnector */
-trait ConnectorTestUtil extends Specification {
-
-  implicit val formats = DefaultFormats
-
-  def check(connector: JsonConnector, original: String, event: String): Result = {
-    val originalJson = parse(original).asInstanceOf[JObject]
-    val eventJson = parse(event).asInstanceOf[JObject]
-    // write and parse back to discard any JNothing field
-    val result = parse(write(connector.toEventJson(originalJson))).asInstanceOf[JObject]
-    result.obj must containTheSameElementsAs(eventJson.obj)
-  }
-
-  def check(connector: FormConnector, original: Map[String, String], event: String) = {
-
-    val eventJson = parse(event).asInstanceOf[JObject]
-    // write and parse back to discard any JNothing field
-    val result = parse(write(connector.toEventJson(original))).asInstanceOf[JObject]
-
-    result.obj must containTheSameElementsAs(eventJson.obj)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnectorSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnectorSpec.scala b/data/src/test/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnectorSpec.scala
deleted file mode 100644
index 7f6ad8f..0000000
--- a/data/src/test/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnectorSpec.scala
+++ /dev/null
@@ -1,164 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.webhooks.exampleform
-
-import io.prediction.data.webhooks.ConnectorTestUtil
-
-import org.specs2.mutable._
-
-/** Test the ExampleFormConnector */
-class ExampleFormConnectorSpec extends Specification with ConnectorTestUtil {
-
-  "ExampleFormConnector" should {
-
-    "convert userAction to Event JSON" in {
-      // webhooks input
-      val userAction = Map(
-        "type" -> "userAction",
-        "userId" -> "as34smg4",
-        "event" -> "do_something",
-        "context[ip]" -> "24.5.68.47", // optional
-        "context[prop1]" -> "2.345", // optional
-        "context[prop2]" -> "value1", // optional
-        "anotherProperty1" -> "100",
-        "anotherProperty2"-> "optional1", // optional
-        "timestamp" -> "2015-01-02T00:30:12.984Z"
-      )
-
-      // expected converted Event JSON
-      val expected = """
-        {
-          "event": "do_something",
-          "entityType": "user",
-          "entityId": "as34smg4",
-          "properties": {
-            "context": {
-              "ip": "24.5.68.47",
-              "prop1": 2.345
-              "prop2": "value1"
-            },
-            "anotherProperty1": 100,
-            "anotherProperty2": "optional1"
-          }
-          "eventTime": "2015-01-02T00:30:12.984Z"
-        }
-      """
-
-      check(ExampleFormConnector, userAction, expected)
-    }
-
-    "convert userAction without optional fields to Event JSON" in {
-      // webhooks input
-      val userAction = Map(
-        "type" -> "userAction",
-        "userId" -> "as34smg4",
-        "event" -> "do_something",
-        "anotherProperty1" -> "100",
-        "timestamp" -> "2015-01-02T00:30:12.984Z"
-      )
-
-      // expected converted Event JSON
-      val expected = """
-        {
-          "event": "do_something",
-          "entityType": "user",
-          "entityId": "as34smg4",
-          "properties": {
-            "anotherProperty1": 100,
-          }
-          "eventTime": "2015-01-02T00:30:12.984Z"
-        }
-      """
-
-      check(ExampleFormConnector, userAction, expected)
-    }
-
-    "convert userActionItem to Event JSON" in {
-      // webhooks input
-      val userActionItem = Map(
-        "type" -> "userActionItem",
-        "userId" -> "as34smg4",
-        "event" -> "do_something_on",
-        "itemId" -> "kfjd312bc",
-        "context[ip]" -> "1.23.4.56",
-        "context[prop1]" -> "2.345",
-        "context[prop2]" -> "value1",
-        "anotherPropertyA" -> "4.567", // optional
-        "anotherPropertyB" -> "false", // optional
-        "timestamp" -> "2015-01-15T04:20:23.567Z"
-      )
-
-      // expected converted Event JSON
-      val expected = """
-        {
-          "event": "do_something_on",
-          "entityType": "user",
-          "entityId": "as34smg4",
-          "targetEntityType": "item",
-          "targetEntityId": "kfjd312bc"
-          "properties": {
-            "context": {
-              "ip": "1.23.4.56",
-              "prop1": 2.345
-              "prop2": "value1"
-            },
-            "anotherPropertyA": 4.567
-            "anotherPropertyB": false
-          }
-          "eventTime": "2015-01-15T04:20:23.567Z"
-        }
-      """
-
-      check(ExampleFormConnector, userActionItem, expected)
-    }
-
-    "convert userActionItem without optional fields to Event JSON" in {
-      // webhooks input
-      val userActionItem = Map(
-        "type" -> "userActionItem",
-        "userId" -> "as34smg4",
-        "event" -> "do_something_on",
-        "itemId" -> "kfjd312bc",
-        "context[ip]" -> "1.23.4.56",
-        "context[prop1]" -> "2.345",
-        "context[prop2]" -> "value1",
-        "timestamp" -> "2015-01-15T04:20:23.567Z"
-      )
-
-      // expected converted Event JSON
-      val expected = """
-        {
-          "event": "do_something_on",
-          "entityType": "user",
-          "entityId": "as34smg4",
-          "targetEntityType": "item",
-          "targetEntityId": "kfjd312bc"
-          "properties": {
-            "context": {
-              "ip": "1.23.4.56",
-              "prop1": 2.345
-              "prop2": "value1"
-            }
-          }
-          "eventTime": "2015-01-15T04:20:23.567Z"
-        }
-      """
-
-      check(ExampleFormConnector, userActionItem, expected)
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnectorSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnectorSpec.scala b/data/src/test/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnectorSpec.scala
deleted file mode 100644
index bdf1cc4..0000000
--- a/data/src/test/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnectorSpec.scala
+++ /dev/null
@@ -1,179 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.webhooks.examplejson
-
-import io.prediction.data.webhooks.ConnectorTestUtil
-
-import org.specs2.mutable._
-
-/** Test the ExampleJsonConnector */
-class ExampleJsonConnectorSpec extends Specification with ConnectorTestUtil {
-
-  "ExampleJsonConnector" should {
-
-    "convert userAction to Event JSON" in {
-      // webhooks input
-      val userAction = """
-        {
-          "type": "userAction"
-          "userId": "as34smg4",
-          "event": "do_something",
-          "context": {
-            "ip": "24.5.68.47",
-            "prop1": 2.345
-            "prop2": "value1"
-          },
-          "anotherProperty1": 100,
-          "anotherProperty2": "optional1",
-          "timestamp": "2015-01-02T00:30:12.984Z"
-        }
-      """
-
-      // expected converted Event JSON
-      val expected = """
-        {
-          "event": "do_something",
-          "entityType": "user",
-          "entityId": "as34smg4",
-          "properties": {
-            "context": {
-              "ip": "24.5.68.47",
-              "prop1": 2.345
-              "prop2": "value1"
-            },
-            "anotherProperty1": 100,
-            "anotherProperty2": "optional1"
-          }
-          "eventTime": "2015-01-02T00:30:12.984Z"
-        }
-      """
-
-      check(ExampleJsonConnector, userAction, expected)
-    }
-
-    "convert userAction without optional field to Event JSON" in {
-      // webhooks input
-      val userAction = """
-        {
-          "type": "userAction"
-          "userId": "as34smg4",
-          "event": "do_something",
-          "anotherProperty1": 100,
-          "timestamp": "2015-01-02T00:30:12.984Z"
-        }
-      """
-
-      // expected converted Event JSON
-      val expected = """
-        {
-          "event": "do_something",
-          "entityType": "user",
-          "entityId": "as34smg4",
-          "properties": {
-            "anotherProperty1": 100,
-          }
-          "eventTime": "2015-01-02T00:30:12.984Z"
-        }
-      """
-
-      check(ExampleJsonConnector, userAction, expected)
-    }
-
-    "convert userActionItem to Event JSON" in {
-      // webhooks input
-      val userActionItem = """
-        {
-          "type": "userActionItem"
-          "userId": "as34smg4",
-          "event": "do_something_on",
-          "itemId": "kfjd312bc",
-          "context": {
-            "ip": "1.23.4.56",
-            "prop1": 2.345
-            "prop2": "value1"
-          },
-          "anotherPropertyA": 4.567
-          "anotherPropertyB": false
-          "timestamp": "2015-01-15T04:20:23.567Z"
-      }
-      """
-
-      // expected converted Event JSON
-      val expected = """
-        {
-          "event": "do_something_on",
-          "entityType": "user",
-          "entityId": "as34smg4",
-          "targetEntityType": "item",
-          "targetEntityId": "kfjd312bc"
-          "properties": {
-            "context": {
-              "ip": "1.23.4.56",
-              "prop1": 2.345
-              "prop2": "value1"
-            },
-            "anotherPropertyA": 4.567
-            "anotherPropertyB": false
-          }
-          "eventTime": "2015-01-15T04:20:23.567Z"
-        }
-      """
-
-      check(ExampleJsonConnector, userActionItem, expected)
-    }
-
-    "convert userActionItem without optional fields to Event JSON" in {
-      // webhooks input
-      val userActionItem = """
-        {
-          "type": "userActionItem"
-          "userId": "as34smg4",
-          "event": "do_something_on",
-          "itemId": "kfjd312bc",
-          "context": {
-            "ip": "1.23.4.56",
-            "prop1": 2.345
-            "prop2": "value1"
-          }
-          "timestamp": "2015-01-15T04:20:23.567Z"
-      }
-      """
-
-      // expected converted Event JSON
-      val expected = """
-        {
-          "event": "do_something_on",
-          "entityType": "user",
-          "entityId": "as34smg4",
-          "targetEntityType": "item",
-          "targetEntityId": "kfjd312bc"
-          "properties": {
-            "context": {
-              "ip": "1.23.4.56",
-              "prop1": 2.345
-              "prop2": "value1"
-            }
-          }
-          "eventTime": "2015-01-15T04:20:23.567Z"
-        }
-      """
-
-      check(ExampleJsonConnector, userActionItem, expected)
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnectorSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnectorSpec.scala b/data/src/test/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnectorSpec.scala
deleted file mode 100644
index 56484c2..0000000
--- a/data/src/test/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnectorSpec.scala
+++ /dev/null
@@ -1,254 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.webhooks.mailchimp
-
-import io.prediction.data.webhooks.ConnectorTestUtil
-
-import org.specs2.mutable._
-
-class MailChimpConnectorSpec extends Specification with ConnectorTestUtil {
-
-  // TOOD: test other events
-  // TODO: test different optional fields
-
-  "MailChimpConnector" should {
-
-    "convert subscribe to event JSON" in {
-
-      val subscribe = Map(
-        "type" -> "subscribe",
-        "fired_at" -> "2009-03-26 21:35:57",
-        "data[id]" -> "8a25ff1d98",
-        "data[list_id]" -> "a6b5da1054",
-        "data[email]" -> "api@mailchimp.com",
-        "data[email_type]" -> "html",
-        "data[merges][EMAIL]" -> "api@mailchimp.com",
-        "data[merges][FNAME]" -> "MailChimp",
-        "data[merges][LNAME]" -> "API",
-        "data[merges][INTERESTS]" -> "Group1,Group2", //optional
-        "data[ip_opt]" -> "10.20.10.30",
-        "data[ip_signup]" -> "10.20.10.30"
-      )
-
-      val expected = """
-        {
-          "event" : "subscribe",
-          "entityType" : "user",
-          "entityId" : "8a25ff1d98",
-          "targetEntityType" : "list",
-          "targetEntityId" : "a6b5da1054",
-          "properties" : {
-            "email" : "api@mailchimp.com",
-            "email_type" : "html",
-            "merges" : {
-              "EMAIL" : "api@mailchimp.com",
-              "FNAME" : "MailChimp",
-              "LNAME" : "API"
-              "INTERESTS" : "Group1,Group2"
-            },
-            "ip_opt" : "10.20.10.30",
-            "ip_signup" : "10.20.10.30"
-          },
-          "eventTime" : "2009-03-26T21:35:57.000Z"
-        }
-      """
-
-      check(MailChimpConnector, subscribe, expected)
-    }
-
-    //check unsubscribe to event Json
-    "convert unsubscribe to event JSON" in {
-
-      val unsubscribe = Map(
-        "type" -> "unsubscribe",
-        "fired_at" -> "2009-03-26 21:40:57",
-        "data[action]" -> "unsub",
-        "data[reason]" -> "manual",
-        "data[id]" -> "8a25ff1d98",
-        "data[list_id]" -> "a6b5da1054",
-        "data[email]" -> "api+unsub@mailchimp.com",
-        "data[email_type]" -> "html",
-        "data[merges][EMAIL]" -> "api+unsub@mailchimp.com",
-        "data[merges][FNAME]" -> "MailChimp",
-        "data[merges][LNAME]" -> "API",
-        "data[merges][INTERESTS]" -> "Group1,Group2", //optional 
-        "data[ip_opt]" -> "10.20.10.30",
-        "data[campaign_id]" -> "cb398d21d2"
-      )
-
-      val expected = """
-        {
-          "event" : "unsubscribe",
-          "entityType" : "user",
-          "entityId" : "8a25ff1d98",
-          "targetEntityType" : "list",
-          "targetEntityId" : "a6b5da1054",
-          "properties" : {
-            "action" : "unsub",
-            "reason" : "manual",
-            "email" : "api+unsub@mailchimp.com",
-            "email_type" : "html",
-            "merges" : {
-              "EMAIL" : "api+unsub@mailchimp.com",
-              "FNAME" : "MailChimp",
-              "LNAME" : "API"
-              "INTERESTS" : "Group1,Group2"
-            },
-            "ip_opt" : "10.20.10.30",
-            "campaign_id" : "cb398d21d2"
-          },
-          "eventTime" : "2009-03-26T21:40:57.000Z"
-        }
-      """
-
-      check(MailChimpConnector, unsubscribe, expected)
-    }
-
-    //check profile update to event Json 
-    "convert profile update to event JSON" in {
-
-      val profileUpdate = Map(
-        "type" -> "profile",
-        "fired_at" -> "2009-03-26 21:31:21",
-        "data[id]" -> "8a25ff1d98",
-        "data[list_id]" -> "a6b5da1054",
-        "data[email]" -> "api@mailchimp.com",
-        "data[email_type]" -> "html",
-        "data[merges][EMAIL]" -> "api@mailchimp.com",
-        "data[merges][FNAME]" -> "MailChimp",
-        "data[merges][LNAME]" -> "API",
-        "data[merges][INTERESTS]" -> "Group1,Group2", //optional
-        "data[ip_opt]" -> "10.20.10.30"
-      )
-
-      val expected = """
-        {
-          "event" : "profile",
-          "entityType" : "user",
-          "entityId" : "8a25ff1d98",
-          "targetEntityType" : "list",
-          "targetEntityId" : "a6b5da1054",
-          "properties" : {
-            "email" : "api@mailchimp.com",
-            "email_type" : "html",
-            "merges" : {
-              "EMAIL" : "api@mailchimp.com",
-              "FNAME" : "MailChimp",
-              "LNAME" : "API"
-              "INTERESTS" : "Group1,Group2"
-            },
-            "ip_opt" : "10.20.10.30"
-          },
-          "eventTime" : "2009-03-26T21:31:21.000Z"
-        }
-      """
-
-      check(MailChimpConnector, profileUpdate, expected)
-    }
-
-    //check email update to event Json 
-    "convert email update to event JSON" in {
-
-      val emailUpdate = Map(
-        "type" -> "upemail",
-        "fired_at" -> "2009-03-26 22:15:09",
-        "data[list_id]" -> "a6b5da1054",
-        "data[new_id]" -> "51da8c3259",
-        "data[new_email]" -> "api+new@mailchimp.com",
-        "data[old_email]" -> "api+old@mailchimp.com"
-      )
-
-      val expected = """
-        {
-          "event" : "upemail",
-          "entityType" : "user",
-          "entityId" : "51da8c3259",
-          "targetEntityType" : "list",
-          "targetEntityId" : "a6b5da1054",
-          "properties" : {
-            "new_email" : "api+new@mailchimp.com",
-            "old_email" : "api+old@mailchimp.com"
-          },
-          "eventTime" : "2009-03-26T22:15:09.000Z"
-        }
-      """
-
-      check(MailChimpConnector, emailUpdate, expected)
-    }
-
-    //check cleaned email to event Json 
-    "convert cleaned email to event JSON" in {
-
-      val cleanedEmail = Map(
-        "type" -> "cleaned",
-        "fired_at" -> "2009-03-26 22:01:00",
-        "data[list_id]" -> "a6b5da1054",
-        "data[campaign_id]" -> "4fjk2ma9xd",
-        "data[reason]" -> "hard",
-        "data[email]" -> "api+cleaned@mailchimp.com"
-      )
-
-      val expected = """
-        {
-          "event" : "cleaned",
-          "entityType" : "list",
-          "entityId" : "a6b5da1054",
-          "properties" : {
-            "campaignId" : "4fjk2ma9xd",
-            "reason" : "hard",
-            "email" : "api+cleaned@mailchimp.com"
-          },
-          "eventTime" : "2009-03-26T22:01:00.000Z"
-        }
-      """
-
-      check(MailChimpConnector, cleanedEmail, expected)
-    }
-
-    //check campaign sending status to event Json 
-    "convert campaign sending status to event JSON" in {
-
-      val campaign = Map(
-        "type" -> "campaign",
-        "fired_at" -> "2009-03-26 22:15:09",
-        "data[id]" -> "5aa2102003",
-        "data[subject]" -> "Test Campaign Subject",
-        "data[status]" -> "sent",
-        "data[reason]" -> "",
-        "data[list_id]" -> "a6b5da1054"
-      )
-
-      val expected = """
-        {
-          "event" : "campaign",
-          "entityType" : "campaign",
-          "entityId" : "5aa2102003",
-          "targetEntityType" : "list",
-          "targetEntityId" : "a6b5da1054",
-          "properties" : {
-            "subject" : "Test Campaign Subject",
-            "status" : "sent",
-            "reason" : ""
-          },
-          "eventTime" : "2009-03-26T22:15:09.000Z"
-        }
-      """
-
-      check(MailChimpConnector, campaign, expected)
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnectorSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnectorSpec.scala b/data/src/test/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnectorSpec.scala
deleted file mode 100644
index d7587cd..0000000
--- a/data/src/test/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnectorSpec.scala
+++ /dev/null
@@ -1,335 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.webhooks.segmentio
-
-import io.prediction.data.webhooks.ConnectorTestUtil
-
-import org.specs2.mutable._
-
-class SegmentIOConnectorSpec extends Specification with ConnectorTestUtil {
-
-  // TODO: test different optional fields
-
-  val commonFields =
-    s"""
-       |  "anonymous_id": "id",
-       |  "sent_at": "sendAt",
-       |  "version": "2",
-     """.stripMargin
-
-  "SegmentIOConnector" should {
-
-    "convert group with context to event JSON" in {
-      val context =
-        """
-          |  "context": {
-          |    "app": {
-          |      "name": "InitechGlobal",
-          |      "version": "545",
-          |      "build": "3.0.1.545"
-          |    },
-          |    "campaign": {
-          |      "name": "TPS Innovation Newsletter",
-          |      "source": "Newsletter",
-          |      "medium": "email",
-          |      "term": "tps reports",
-          |      "content": "image link"
-          |    },
-          |    "device": {
-          |      "id": "B5372DB0-C21E-11E4-8DFC-AA07A5B093DB",
-          |      "advertising_id": "7A3CBEA0-BDF5-11E4-8DFC-AA07A5B093DB",
-          |      "ad_tracking_enabled": true,
-          |      "manufacturer": "Apple",
-          |      "model": "iPhone7,2",
-          |      "name": "maguro",
-          |      "type": "ios",
-          |      "token": "ff15bc0c20c4aa6cd50854ff165fd265c838e5405bfeb9571066395b8c9da449"
-          |    },
-          |    "ip": "8.8.8.8",
-          |    "library": {
-          |      "name": "analytics-ios",
-          |      "version": "1.8.0"
-          |    },
-          |    "network": {
-          |      "bluetooth": false,
-          |      "carrier": "T-Mobile NL",
-          |      "cellular": true,
-          |      "wifi": false
-          |    },
-          |    "location": {
-          |      "city": "San Francisco",
-          |      "country": "United States",
-          |      "latitude": 40.2964197,
-          |      "longitude": -76.9411617,
-          |      "speed": 0
-          |    },
-          |    "os": {
-          |      "name": "iPhone OS",
-          |      "version": "8.1.3"
-          |    },
-          |    "referrer": {
-          |      "id": "ABCD582CDEFFFF01919",
-          |      "type": "dataxu"
-          |    },
-          |    "screen": {
-          |      "width": 320,
-          |      "height": 568,
-          |      "density": 2
-          |    },
-          |    "timezone": "Europe/Amsterdam",
-          |    "user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5)"
-          |  }
-        """.stripMargin
-
-      val group =
-        s"""
-           |{ $commonFields
-            |  "type": "group",
-            |  "group_id": "groupId",
-            |  "user_id": "userIdValue",
-            |  "timestamp" : "2012-12-02T00:30:08.276Z",
-            |  "traits": {
-            |    "name": "groupName",
-            |    "employees": 329,
-            |  },
-            |  $context
-            |}
-        """.stripMargin
-
-      val expected =
-        s"""
-          |{
-          |  "event": "group",
-          |  "entityType": "user",
-          |  "entityId": "userIdValue",
-          |  "properties": {
-          |    $context,
-          |    "group_id": "groupId",
-          |    "traits": {
-          |      "name": "groupName",
-          |      "employees": 329
-          |    },
-          |  },
-          |  "eventTime" : "2012-12-02T00:30:08.276Z"
-          |}
-        """.stripMargin
-
-      check(SegmentIOConnector, group, expected)
-    }
-
-    "convert group to event JSON" in {
-      val group =
-        s"""
-          |{ $commonFields
-          |  "type": "group",
-          |  "group_id": "groupId",
-          |  "user_id": "userIdValue",
-          |  "timestamp" : "2012-12-02T00:30:08.276Z",
-          |  "traits": {
-          |    "name": "groupName",
-          |    "employees": 329,
-          |  }
-          |}
-        """.stripMargin
-
-      val expected =
-        """
-          |{
-          |  "event": "group",
-          |  "entityType": "user",
-          |  "entityId": "userIdValue",
-          |  "properties": {
-          |    "group_id": "groupId",
-          |    "traits": {
-          |      "name": "groupName",
-          |      "employees": 329
-          |    }
-          |  },
-          |  "eventTime" : "2012-12-02T00:30:08.276Z"
-          |}
-        """.stripMargin
-
-      check(SegmentIOConnector, group, expected)
-    }
-
-    "convert screen to event JSON" in {
-      val screen =
-        s"""
-          |{ $commonFields
-          |  "type": "screen",
-          |  "name": "screenName",
-          |  "user_id": "userIdValue",
-          |  "timestamp" : "2012-12-02T00:30:08.276Z",
-          |  "properties": {
-          |    "variation": "screenVariation"
-          |  }
-          |}
-        """.stripMargin
-
-      val expected =
-        """
-          |{
-          |  "event": "screen",
-          |  "entityType": "user",
-          |  "entityId": "userIdValue",
-          |  "properties": {
-          |    "properties": {
-          |      "variation": "screenVariation"
-          |    },
-          |    "name": "screenName"
-          |  },
-          |  "eventTime" : "2012-12-02T00:30:08.276Z"
-          |}
-        """.stripMargin
-
-      check(SegmentIOConnector, screen, expected)
-    }
-
-    "convert page to event JSON" in {
-      val page =
-       s"""
-          |{ $commonFields
-          |  "type": "page",
-          |  "name": "pageName",
-          |  "user_id": "userIdValue",
-          |  "timestamp" : "2012-12-02T00:30:08.276Z",
-          |  "properties": {
-          |    "title": "pageTitle",
-          |    "url": "pageUrl"
-          |  }
-          |}
-        """.stripMargin
-
-      val expected =
-        """
-          |{
-          |  "event": "page",
-          |  "entityType": "user",
-          |  "entityId": "userIdValue",
-          |  "properties": {
-          |    "properties": {
-          |      "title": "pageTitle",
-          |      "url": "pageUrl"
-          |    },
-          |    "name": "pageName"
-          |  },
-          |  "eventTime" : "2012-12-02T00:30:08.276Z"
-          |}
-        """.stripMargin
-
-      check(SegmentIOConnector, page, expected)
-    }
-
-    "convert alias to event JSON" in {
-      val alias =
-        s"""
-          |{ $commonFields
-          |  "type": "alias",
-          |  "previous_id": "previousIdValue",
-          |  "user_id": "userIdValue",
-          |  "timestamp" : "2012-12-02T00:30:08.276Z"
-          |}
-        """.stripMargin
-
-      val expected =
-        """
-          |{
-          |  "event": "alias",
-          |  "entityType": "user",
-          |  "entityId": "userIdValue",
-          |  "properties": {
-          |    "previous_id" : "previousIdValue"
-          |  },
-          |  "eventTime" : "2012-12-02T00:30:08.276Z"
-          |}
-        """.stripMargin
-
-      check(SegmentIOConnector, alias, expected)
-    }
-
-    "convert track to event JSON" in {
-      val track =
-       s"""
-          |{ $commonFields
-          |  "user_id": "some_user_id",
-          |  "type": "track",
-          |  "event": "Registered",
-          |  "timestamp" : "2012-12-02T00:30:08.276Z",
-          |  "properties": {
-          |    "plan": "Pro Annual",
-          |    "accountType" : "Facebook"
-          |  }
-          |}
-        """.stripMargin
-
-      val expected =
-        """
-          |{
-          |  "event": "track",
-          |  "entityType": "user",
-          |  "entityId": "some_user_id",
-          |  "properties": {
-          |    "event": "Registered",
-          |    "properties": {
-          |      "plan": "Pro Annual",
-          |      "accountType": "Facebook"
-          |    }
-          |  },
-          |  "eventTime" : "2012-12-02T00:30:08.276Z"
-          |}
-        """.stripMargin
-
-      check(SegmentIOConnector, track, expected)
-    }
-
-    "convert identify to event JSON" in {
-      val identify = s"""
-        { $commonFields
-          "type"      : "identify",
-          "user_id"    : "019mr8mf4r",
-          "traits"    : {
-              "email"            : "achilles@segment.com",
-              "name"             : "Achilles",
-              "subscription_plan" : "Premium",
-              "friendCount"      : 29
-          },
-          "timestamp" : "2012-12-02T00:30:08.276Z"
-        }
-      """
-
-      val expected = """
-        {
-          "event" : "identify",
-          "entityType": "user",
-          "entityId" : "019mr8mf4r",
-          "properties" : {
-            "traits" : {
-              "email"            : "achilles@segment.com",
-              "name"             : "Achilles",
-              "subscription_plan" : "Premium",
-              "friendCount"      : 29
-            }
-          },
-          "eventTime" : "2012-12-02T00:30:08.276Z"
-        }
-      """
-
-      check(SegmentIOConnector, identify, expected)
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/api/EventServiceSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/api/EventServiceSpec.scala b/data/src/test/scala/org/apache/predictionio/data/api/EventServiceSpec.scala
new file mode 100644
index 0000000..62fd89c
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/api/EventServiceSpec.scala
@@ -0,0 +1,68 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.api
+
+import org.apache.predictionio.data.storage.Storage
+
+import akka.testkit.TestProbe
+import akka.actor.ActorSystem
+import akka.actor.Props
+
+import spray.http.HttpEntity
+import spray.http.HttpResponse
+import spray.http.ContentTypes
+import spray.httpx.RequestBuilding.Get
+
+import org.specs2.mutable.Specification
+
+class EventServiceSpec extends Specification {
+
+  val system = ActorSystem("EventServiceSpecSystem")
+
+  val eventClient = Storage.getLEvents()
+  val accessKeysClient = Storage.getMetaDataAccessKeys()
+  val channelsClient = Storage.getMetaDataChannels()
+  
+  val eventServiceActor = system.actorOf(
+    Props(
+      new EventServiceActor(
+        eventClient,
+        accessKeysClient,
+        channelsClient,
+        EventServerConfig()
+      )
+    )
+  )
+
+  "GET / request" should {
+    "properly produce OK HttpResponses" in {
+      val probe = TestProbe()(system)
+      probe.send(eventServiceActor, Get("/"))
+      probe.expectMsg(
+        HttpResponse(
+          200,
+          HttpEntity(
+            contentType = ContentTypes.`application/json`,
+            string = """{"status":"alive"}"""
+          )
+        )
+      )
+      success
+    }
+  }
+
+  step(system.shutdown())
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/api/SegmentIOAuthSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/api/SegmentIOAuthSpec.scala b/data/src/test/scala/org/apache/predictionio/data/api/SegmentIOAuthSpec.scala
new file mode 100644
index 0000000..bae0f0b
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/api/SegmentIOAuthSpec.scala
@@ -0,0 +1,175 @@
+package io.prediction.data.api
+
+import akka.actor.{ActorSystem, Props}
+import akka.testkit.TestProbe
+import io.prediction.data.storage._
+import org.joda.time.DateTime
+import org.specs2.mutable.Specification
+import spray.http.HttpHeaders.RawHeader
+import spray.http.{ContentTypes, HttpEntity, HttpResponse}
+import spray.httpx.RequestBuilding._
+import sun.misc.BASE64Encoder
+
+import scala.concurrent.{Future, ExecutionContext}
+
+class SegmentIOAuthSpec extends Specification {
+
+  val system = ActorSystem("EventServiceSpecSystem")
+  sequential
+  isolated
+  val eventClient = new LEvents {
+    override def init(appId: Int, channelId: Option[Int]): Boolean = true
+
+    override def futureInsert(event: Event, appId: Int, channelId: Option[Int])
+        (implicit ec: ExecutionContext): Future[String] =
+      Future successful "event_id"
+
+    override def futureFind(
+      appId: Int, channelId: Option[Int], startTime: Option[DateTime],
+      untilTime: Option[DateTime], entityType: Option[String],
+      entityId: Option[String], eventNames: Option[Seq[String]],
+      targetEntityType: Option[Option[String]],
+      targetEntityId: Option[Option[String]], limit: Option[Int],
+      reversed: Option[Boolean])
+        (implicit ec: ExecutionContext): Future[Iterator[Event]] =
+      Future successful List.empty[Event].iterator
+
+    override def futureGet(eventId: String, appId: Int, channelId: Option[Int])
+        (implicit ec: ExecutionContext): Future[Option[Event]] =
+      Future successful None
+
+    override def remove(appId: Int, channelId: Option[Int]): Boolean = true
+
+    override def futureDelete(eventId: String, appId: Int, channelId: Option[Int])
+        (implicit ec: ExecutionContext): Future[Boolean] =
+      Future successful true
+
+    override def close(): Unit = {}
+  }
+  val appId = 0
+  val accessKeysClient = new AccessKeys {
+    override def insert(k: AccessKey): Option[String] = null
+    override def getByAppid(appid: Int): Seq[AccessKey] = null
+    override def update(k: AccessKey): Unit = {}
+    override def delete(k: String): Unit = {}
+    override def getAll(): Seq[AccessKey] = null
+
+    override def get(k: String): Option[AccessKey] =
+      k match {
+        case "abc" \u21d2 Some(AccessKey(k, appId, Seq.empty))
+        case _ \u21d2 None
+      }
+  }
+
+  val channelsClient = Storage.getMetaDataChannels()
+  val eventServiceActor = system.actorOf(
+    Props(
+      new EventServiceActor(
+        eventClient,
+        accessKeysClient,
+        channelsClient,
+        EventServerConfig()
+      )
+    )
+  )
+
+  val base64Encoder = new BASE64Encoder
+
+  "Event Service" should {
+
+    "reject with CredentialsRejected with invalid credentials" in {
+      val accessKey = "abc123:"
+      val probe = TestProbe()(system)
+      probe.send(
+        eventServiceActor,
+        Post("/webhooks/segmentio.json")
+          .withHeaders(
+            List(
+              RawHeader("Authorization", s"Basic $accessKey")
+            )
+          )
+      )
+      probe.expectMsg(
+        HttpResponse(
+          401,
+          HttpEntity(
+            contentType = ContentTypes.`application/json`,
+            string = """{"message":"Invalid accessKey."}"""
+          )
+        )
+      )
+      success
+    }
+
+    "reject with CredentialsMissed without credentials" in {
+      val probe = TestProbe()(system)
+      probe.send(
+        eventServiceActor,
+        Post("/webhooks/segmentio.json")
+      )
+      probe.expectMsg(
+        HttpResponse(
+          401,
+          HttpEntity(
+            contentType = ContentTypes.`application/json`,
+            string = """{"message":"Missing accessKey."}"""
+          )
+        )
+      )
+      success
+    }
+
+    "process SegmentIO identity request properly" in {
+      val jsonReq =
+        """
+          |{
+          |  "anonymous_id": "507f191e810c19729de860ea",
+          |  "channel": "browser",
+          |  "context": {
+          |    "ip": "8.8.8.8",
+          |    "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5)"
+          |  },
+          |  "message_id": "022bb90c-bbac-11e4-8dfc-aa07a5b093db",
+          |  "timestamp": "2015-02-23T22:28:55.387Z",
+          |  "sent_at": "2015-02-23T22:28:55.111Z",
+          |  "traits": {
+          |    "name": "Peter Gibbons",
+          |    "email": "peter@initech.com",
+          |    "plan": "premium",
+          |    "logins": 5
+          |  },
+          |  "type": "identify",
+          |  "user_id": "97980cfea0067",
+          |  "version": "2"
+          |}
+        """.stripMargin
+
+      val accessKey = "abc:"
+      val accessKeyEncoded = base64Encoder.encodeBuffer(accessKey.getBytes)
+      val probe = TestProbe()(system)
+      probe.send(
+        eventServiceActor,
+        Post(
+          "/webhooks/segmentio.json",
+          HttpEntity(ContentTypes.`application/json`, jsonReq.getBytes)
+        ).withHeaders(
+            List(
+              RawHeader("Authorization", s"Basic $accessKeyEncoded")
+            )
+          )
+      )
+      probe.expectMsg(
+        HttpResponse(
+          201,
+          HttpEntity(
+            contentType = ContentTypes.`application/json`,
+            string = """{"eventId":"event_id"}"""
+          )
+        )
+      )
+      success
+    }
+  }
+
+  step(system.shutdown())
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/storage/BiMapSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/storage/BiMapSpec.scala b/data/src/test/scala/org/apache/predictionio/data/storage/BiMapSpec.scala
new file mode 100644
index 0000000..c98c882
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/storage/BiMapSpec.scala
@@ -0,0 +1,196 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.specs2.mutable._
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.SparkConf
+import org.apache.spark.rdd.RDD
+
+class BiMapSpec extends Specification {
+
+  System.clearProperty("spark.driver.port")
+  System.clearProperty("spark.hostPort")
+  val sc = new SparkContext("local[4]", "BiMapSpec test")
+
+  "BiMap created with map" should {
+
+    val keys = Seq(1, 4, 6)
+    val orgValues = Seq(2, 5, 7)
+    val org = keys.zip(orgValues).toMap
+    val bi = BiMap(org)
+
+    "return correct values for each key of original map" in {
+      val biValues = keys.map(k => bi(k))
+
+      biValues must beEqualTo(orgValues)
+    }
+
+    "get return Option[V]" in {
+      val checkKeys = keys ++ Seq(12345)
+      val biValues = checkKeys.map(k => bi.get(k))
+      val expected = orgValues.map(Some(_)) ++ Seq(None)
+
+      biValues must beEqualTo(expected)
+    }
+
+    "getOrElse return value for each key of original map" in {
+      val biValues = keys.map(k => bi.getOrElse(k, -1))
+
+      biValues must beEqualTo(orgValues)
+    }
+
+    "getOrElse return default values for invalid key" in {
+      val keys = Seq(999, -1, -2)
+      val defaults = Seq(1234, 5678, 987)
+      val biValues = keys.zip(defaults).map{ case (k,d) => bi.getOrElse(k, d) }
+
+      biValues must beEqualTo(defaults)
+    }
+
+    "contains() returns true/false correctly" in {
+      val checkKeys = keys ++ Seq(12345)
+      val biValues = checkKeys.map(k => bi.contains(k))
+      val expected = orgValues.map(_ => true) ++ Seq(false)
+
+      biValues must beEqualTo(expected)
+    }
+
+    "same size as original map" in {
+      (bi.size) must beEqualTo(org.size)
+    }
+
+    "take(2) returns BiMap of size 2" in {
+      bi.take(2).size must beEqualTo(2)
+    }
+
+    "toMap contain same element as original map" in {
+      (bi.toMap) must beEqualTo(org)
+    }
+
+    "toSeq contain same element as original map" in {
+      (bi.toSeq) must containTheSameElementsAs(org.toSeq)
+    }
+
+    "inverse and return correct keys for each values of original map" in {
+      val biKeys = orgValues.map(v => bi.inverse(v))
+      biKeys must beEqualTo(keys)
+    }
+
+    "inverse with same size" in {
+      bi.inverse.size must beEqualTo(org.size)
+    }
+
+    "inverse's inverse reference back to the same original object" in {
+      // NOTE: reference equality
+      bi.inverse.inverse == bi
+    }
+  }
+
+  "BiMap created with duplicated values in map" should {
+    val dup = Map(1 -> 2, 4 -> 7, 6 -> 7)
+    "return IllegalArgumentException" in {
+      BiMap(dup) must throwA[IllegalArgumentException]
+    }
+  }
+
+  "BiMap.stringLong and stringInt" should {
+
+    "create BiMap from set of string" in {
+      val keys = Set("a", "b", "foo", "bar")
+      val values: Seq[Long] = Seq(0, 1, 2, 3)
+
+      val bi = BiMap.stringLong(keys)
+      val biValues = keys.map(k => bi(k))
+
+      val biInt = BiMap.stringInt(keys)
+      val valuesInt: Seq[Int] = values.map(_.toInt)
+      val biIntValues = keys.map(k => biInt(k))
+
+      biValues must containTheSameElementsAs(values) and
+        (biIntValues must containTheSameElementsAs(valuesInt))
+    }
+
+    "create BiMap from Array of unique string" in {
+      val keys = Array("a", "b", "foo", "bar")
+      val values: Seq[Long] = Seq(0, 1, 2, 3)
+
+      val bi = BiMap.stringLong(keys)
+      val biValues = keys.toSeq.map(k => bi(k))
+
+      val biInt = BiMap.stringInt(keys)
+      val valuesInt: Seq[Int] = values.map(_.toInt)
+      val biIntValues = keys.toSeq.map(k => biInt(k))
+
+      biValues must containTheSameElementsAs(values) and
+        (biIntValues must containTheSameElementsAs(valuesInt))
+    }
+
+    "not guarantee sequential index for Array with duplicated string" in {
+      val keys = Array("a", "b", "foo", "bar", "a", "b", "x")
+      val dupValues: Seq[Long] = Seq(0, 1, 2, 3, 4, 5, 6)
+      val values = keys.zip(dupValues).toMap.values.toSeq
+
+      val bi = BiMap.stringLong(keys)
+      val biValues = keys.toSet[String].map(k => bi(k))
+
+      val biInt = BiMap.stringInt(keys)
+      val valuesInt: Seq[Int] = values.map(_.toInt)
+      val biIntValues = keys.toSet[String].map(k => biInt(k))
+
+      biValues must containTheSameElementsAs(values) and
+        (biIntValues must containTheSameElementsAs(valuesInt))
+    }
+
+    "create BiMap from RDD[String]" in {
+
+      val keys = Seq("a", "b", "foo", "bar")
+      val values: Seq[Long] = Seq(0, 1, 2, 3)
+      val rdd = sc.parallelize(keys)
+
+      val bi = BiMap.stringLong(rdd)
+      val biValues = keys.map(k => bi(k))
+
+      val biInt = BiMap.stringInt(rdd)
+      val valuesInt: Seq[Int] = values.map(_.toInt)
+      val biIntValues = keys.map(k => biInt(k))
+
+      biValues must containTheSameElementsAs(values) and
+        (biIntValues must containTheSameElementsAs(valuesInt))
+    }
+
+    "create BiMap from RDD[String] with duplicated string" in {
+
+      val keys = Seq("a", "b", "foo", "bar", "a", "b", "x")
+      val values: Seq[Long] = Seq(0, 1, 2, 3, 4)
+      val rdd = sc.parallelize(keys)
+
+      val bi = BiMap.stringLong(rdd)
+      val biValues = keys.distinct.map(k => bi(k))
+
+      val biInt = BiMap.stringInt(rdd)
+      val valuesInt: Seq[Int] = values.map(_.toInt)
+      val biIntValues = keys.distinct.map(k => biInt(k))
+
+      biValues must containTheSameElementsAs(values) and
+        (biIntValues must containTheSameElementsAs(valuesInt))
+    }
+  }
+
+  step(sc.stop())
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/storage/DataMapSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/storage/DataMapSpec.scala b/data/src/test/scala/org/apache/predictionio/data/storage/DataMapSpec.scala
new file mode 100644
index 0000000..46ae8dd
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/storage/DataMapSpec.scala
@@ -0,0 +1,243 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.specs2.mutable._
+
+class DataMapSpec extends Specification {
+
+  "DataMap" should {
+
+    val properties = DataMap("""
+      {
+        "prop1" : 1,
+        "prop2" : "value2",
+        "prop3" : [1, 2, 3],
+        "prop4" : true,
+        "prop5" : ["a", "b", "c", "c"],
+        "prop6" : 4.56
+      }
+      """)
+
+    "get Int data" in {
+      properties.get[Int]("prop1") must beEqualTo(1)
+      properties.getOpt[Int]("prop1") must beEqualTo(Some(1))
+    }
+
+    "get String data" in {
+      properties.get[String]("prop2") must beEqualTo("value2")
+      properties.getOpt[String]("prop2") must beEqualTo(Some("value2"))
+    }
+
+    "get List of Int data" in {
+      properties.get[List[Int]]("prop3") must beEqualTo(List(1,2,3))
+      properties.getOpt[List[Int]]("prop3") must beEqualTo(Some(List(1,2,3)))
+    }
+
+    "get Boolean data" in {
+      properties.get[Boolean]("prop4") must beEqualTo(true)
+      properties.getOpt[Boolean]("prop4") must beEqualTo(Some(true))
+    }
+
+    "get List of String data" in {
+      properties.get[List[String]]("prop5") must beEqualTo(List("a", "b", "c", "c"))
+      properties.getOpt[List[String]]("prop5") must beEqualTo(Some(List("a", "b", "c", "c")))
+    }
+
+    "get Set of String data" in {
+      properties.get[Set[String]]("prop5") must beEqualTo(Set("a", "b", "c"))
+      properties.getOpt[Set[String]]("prop5") must beEqualTo(Some(Set("a", "b", "c")))
+    }
+
+    "get Double data" in {
+      properties.get[Double]("prop6") must beEqualTo(4.56)
+      properties.getOpt[Double]("prop6") must beEqualTo(Some(4.56))
+    }
+
+    "get empty optional Int data" in {
+      properties.getOpt[Int]("prop9999") must beEqualTo(None)
+    }
+
+  }
+
+  "DataMap with multi-level data" should {
+    val properties = DataMap("""
+      {
+        "context": {
+          "ip": "1.23.4.56",
+          "prop1": 2.345
+          "prop2": "value1",
+          "prop4": [1, 2, 3]
+        },
+        "anotherPropertyA": 4.567,
+        "anotherPropertyB": false
+      }
+      """)
+
+    "get case class data" in {
+      val expected = DataMapSpec.Context(
+        ip = "1.23.4.56",
+        prop1 = Some(2.345),
+        prop2 = Some("value1"),
+        prop3 = None,
+        prop4 = List(1,2,3)
+      )
+
+      properties.get[DataMapSpec.Context]("context") must beEqualTo(expected)
+    }
+
+    "get empty optional case class data" in {
+      properties.getOpt[DataMapSpec.Context]("context999") must beEqualTo(None)
+    }
+
+    "get double data" in {
+      properties.get[Double]("anotherPropertyA") must beEqualTo(4.567)
+    }
+
+    "get boolean data" in {
+      properties.get[Boolean]("anotherPropertyB") must beEqualTo(false)
+    }
+  }
+
+  "DataMap extract" should {
+
+    "extract to case class object" in {
+      val properties = DataMap("""
+        {
+          "prop1" : 1,
+          "prop2" : "value2",
+          "prop3" : [1, 2, 3],
+          "prop4" : true,
+          "prop5" : ["a", "b", "c", "c"],
+          "prop6" : 4.56
+        }
+        """)
+
+      val result = properties.extract[DataMapSpec.BasicProperty]
+      val expected = DataMapSpec.BasicProperty(
+        prop1 = 1,
+        prop2 = "value2",
+        prop3 = List(1,2,3),
+        prop4 = true,
+        prop5 = List("a", "b", "c", "c"),
+        prop6 = 4.56
+      )
+
+      result must beEqualTo(expected)
+    }
+
+    "extract with optional fields" in {
+      val propertiesEmpty = DataMap("""{}""")
+      val propertiesSome = DataMap("""
+        {
+          "prop1" : 1,
+          "prop5" : ["a", "b", "c", "c"],
+          "prop6" : 4.56
+        }
+        """)
+
+      val resultEmpty = propertiesEmpty.extract[DataMapSpec.OptionProperty]
+      val expectedEmpty = DataMapSpec.OptionProperty(
+        prop1 = None,
+        prop2 = None,
+        prop3 = None,
+        prop4 = None,
+        prop5 = None,
+        prop6 = None
+      )
+
+      val resultSome = propertiesSome.extract[DataMapSpec.OptionProperty]
+      val expectedSome = DataMapSpec.OptionProperty(
+        prop1 = Some(1),
+        prop2 = None,
+        prop3 = None,
+        prop4 = None,
+        prop5 = Some(List("a", "b", "c", "c")),
+        prop6 = Some(4.56)
+      )
+
+      resultEmpty must beEqualTo(expectedEmpty)
+      resultSome must beEqualTo(expectedSome)
+    }
+
+    "extract to multi-level object" in {
+      val properties = DataMap("""
+        {
+          "context": {
+            "ip": "1.23.4.56",
+            "prop1": 2.345
+            "prop2": "value1",
+            "prop4": [1, 2, 3]
+          },
+          "anotherPropertyA": 4.567,
+          "anotherPropertyB": false
+        }
+        """)
+
+      val result = properties.extract[DataMapSpec.MultiLevelProperty]
+      val expected = DataMapSpec.MultiLevelProperty(
+        context = DataMapSpec.Context(
+          ip = "1.23.4.56",
+          prop1 = Some(2.345),
+          prop2 = Some("value1"),
+          prop3 = None,
+          prop4 = List(1,2,3)
+        ),
+        anotherPropertyA = 4.567,
+        anotherPropertyB = false
+      )
+
+      result must beEqualTo(expected)
+    }
+
+  }
+}
+
+object DataMapSpec {
+
+  // define this case class inside object to avoid case class name conflict with other tests
+  case class Context(
+    ip: String,
+    prop1: Option[Double],
+    prop2: Option[String],
+    prop3: Option[Int],
+    prop4: List[Int]
+  )
+
+  case class BasicProperty(
+    prop1: Int,
+    prop2: String,
+    prop3: List[Int],
+    prop4: Boolean,
+    prop5: List[String],
+    prop6: Double
+  )
+
+  case class OptionProperty(
+    prop1: Option[Int],
+    prop2: Option[String],
+    prop3: Option[List[Int]],
+    prop4: Option[Boolean],
+    prop5: Option[List[String]],
+    prop6: Option[Double]
+  )
+
+  case class MultiLevelProperty(
+    context: Context,
+    anotherPropertyA: Double,
+    anotherPropertyB: Boolean
+  )
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/storage/LEventAggregatorSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/storage/LEventAggregatorSpec.scala b/data/src/test/scala/org/apache/predictionio/data/storage/LEventAggregatorSpec.scala
new file mode 100644
index 0000000..8c02186
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/storage/LEventAggregatorSpec.scala
@@ -0,0 +1,103 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.specs2.mutable._
+
+import org.json4s.JObject
+import org.json4s.native.JsonMethods.parse
+
+import org.joda.time.DateTime
+
+class LEventAggregatorSpec extends Specification with TestEvents {
+
+  "LEventAggregator.aggregateProperties()" should {
+
+    "aggregate two entities' properties as DataMap correctly" in {
+      val events = Vector(u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2)
+      val result: Map[String, DataMap] =
+        LEventAggregator.aggregateProperties(events.toIterator)
+
+      val expected = Map(
+        "u1" -> DataMap(u1),
+        "u2" -> DataMap(u2)
+      )
+
+      result must beEqualTo(expected)
+    }
+
+    "aggregate two entities' properties as PropertyMap correctly" in {
+      val events = Vector(u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2)
+      val result: Map[String, PropertyMap] =
+        LEventAggregator.aggregateProperties(events.toIterator)
+
+      val expected = Map(
+        "u1" -> PropertyMap(u1, u1BaseTime, u1LastTime),
+        "u2" -> PropertyMap(u2, u2BaseTime, u2LastTime)
+      )
+
+      result must beEqualTo(expected)
+    }
+
+
+    "aggregate deleted entity correctly" in {
+      val events = Vector(u1e5, u2e2, u1e3, u1ed, u1e1, u2e3, u2e1, u1e4, u1e2)
+
+      val result = LEventAggregator.aggregateProperties(events.toIterator)
+      val expected = Map(
+        "u2" -> PropertyMap(u2, u2BaseTime, u2LastTime)
+      )
+
+      result must beEqualTo(expected)
+    }
+  }
+
+
+  "LEventAggregator.aggregatePropertiesSingle()" should {
+
+    "aggregate single entity properties as DataMap correctly" in {
+        val events = Vector(u1e5, u1e3, u1e1, u1e4, u1e2)
+        val eventsIt = events.toIterator
+
+        val result: Option[DataMap] = LEventAggregator
+          .aggregatePropertiesSingle(eventsIt)
+        val expected = DataMap(u1)
+
+        result must beEqualTo(Some(expected))
+    }
+
+    "aggregate single entity properties as PropertyMap correctly" in {
+        val events = Vector(u1e5, u1e3, u1e1, u1e4, u1e2)
+        val eventsIt = events.toIterator
+
+        val result: Option[PropertyMap] = LEventAggregator
+          .aggregatePropertiesSingle(eventsIt)
+        val expected = PropertyMap(u1, u1BaseTime, u1LastTime)
+
+        result must beEqualTo(Some(expected))
+    }
+
+    "aggregate deleted entity correctly" in {
+      // put the delete event in the middle
+      val events = Vector(u1e4, u1e2, u1ed, u1e3, u1e1, u1e5)
+      val eventsIt = events.toIterator
+
+      val result = LEventAggregator.aggregatePropertiesSingle(eventsIt)
+
+      result must beEqualTo(None)
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/storage/LEventsSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/storage/LEventsSpec.scala b/data/src/test/scala/org/apache/predictionio/data/storage/LEventsSpec.scala
new file mode 100644
index 0000000..0639613
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/storage/LEventsSpec.scala
@@ -0,0 +1,245 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.specs2._
+import org.specs2.specification.Step
+
+class LEventsSpec extends Specification with TestEvents {
+  def is = s2"""
+
+  PredictionIO Storage LEvents Specification
+
+    Events can be implemented by:
+    - HBLEvents ${hbEvents}
+    - JDBCLEvents ${jdbcLEvents}
+
+  """
+
+  def hbEvents = sequential ^ s2"""
+
+    HBLEvents should
+    - behave like any LEvents implementation ${events(hbDO)}
+    - (table cleanup) ${Step(StorageTestUtils.dropHBaseNamespace(dbName))}
+
+  """
+
+  def jdbcLEvents = sequential ^ s2"""
+
+    JDBCLEvents should
+    - behave like any LEvents implementation ${events(jdbcDO)}
+
+  """
+
+  val appId = 1
+
+  def events(eventClient: LEvents) = sequential ^ s2"""
+
+    init default ${initDefault(eventClient)}
+    insert 3 test events and get back by event ID ${insertAndGetEvents(eventClient)}
+    insert 3 test events with timezone and get back by event ID ${insertAndGetTimezone(eventClient)}
+    insert and delete by ID ${insertAndDelete(eventClient)}
+    insert test user events ${insertTestUserEvents(eventClient)}
+    find user events ${findUserEvents(eventClient)}
+    aggregate user properties ${aggregateUserProperties(eventClient)}
+    aggregate one user properties ${aggregateOneUserProperties(eventClient)}
+    aggregate non-existent user properties ${aggregateNonExistentUserProperties(eventClient)}
+    init channel ${initChannel(eventClient)}
+    insert 2 events to channel ${insertChannel(eventClient)}
+    insert 1 event to channel and delete by ID  ${insertAndDeleteChannel(eventClient)}
+    find events from channel ${findChannel(eventClient)}
+    remove default ${removeDefault(eventClient)}
+    remove channel ${removeChannel(eventClient)}
+
+  """
+
+  val dbName = "test_pio_storage_events_" + hashCode
+  def hbDO = Storage.getDataObject[LEvents](
+    StorageTestUtils.hbaseSourceName,
+    dbName
+  )
+
+  def jdbcDO = Storage.getDataObject[LEvents](StorageTestUtils.jdbcSourceName, dbName)
+
+  def initDefault(eventClient: LEvents) = {
+    eventClient.init(appId)
+  }
+
+  def insertAndGetEvents(eventClient: LEvents) = {
+
+    // events from TestEvents trait
+    val listOfEvents = List(r1,r2,r3)
+
+    val insertResp = listOfEvents.map { eventClient.insert(_, appId) }
+
+    val insertedEventId: List[String] = insertResp
+
+    val insertedEvent: List[Option[Event]] = listOfEvents.zip(insertedEventId)
+      .map { case (e, id) => Some(e.copy(eventId = Some(id))) }
+
+    val getResp = insertedEventId.map { id => eventClient.get(id, appId) }
+
+    val getEvents = getResp
+
+    insertedEvent must containTheSameElementsAs(getEvents)
+  }
+
+  def insertAndGetTimezone(eventClient: LEvents) = {
+    val listOfEvents = List(tz1, tz2, tz3)
+
+    val insertResp = listOfEvents.map { eventClient.insert(_, appId) }
+
+    val insertedEventId: List[String] = insertResp
+
+    val insertedEvent: List[Option[Event]] = listOfEvents.zip(insertedEventId)
+      .map { case (e, id) => Some(e.copy(eventId = Some(id))) }
+
+    val getResp = insertedEventId.map { id => eventClient.get(id, appId) }
+
+    val getEvents = getResp
+
+    insertedEvent must containTheSameElementsAs(getEvents)
+  }
+
+  def insertAndDelete(eventClient: LEvents) = {
+    val eventId = eventClient.insert(r2, appId)
+
+    val resultBefore = eventClient.get(eventId, appId)
+
+    val expectedBefore = r2.copy(eventId = Some(eventId))
+
+    val deleteStatus = eventClient.delete(eventId, appId)
+
+    val resultAfter = eventClient.get(eventId, appId)
+
+    (resultBefore must beEqualTo(Some(expectedBefore))) and
+    (deleteStatus must beEqualTo(true)) and
+    (resultAfter must beEqualTo(None))
+  }
+
+  def insertTestUserEvents(eventClient: LEvents) = {
+    // events from TestEvents trait
+    val listOfEvents = Vector(u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2)
+
+    listOfEvents.map{ eventClient.insert(_, appId) }
+
+    success
+  }
+
+  def findUserEvents(eventClient: LEvents) = {
+
+    val results: List[Event] = eventClient.find(
+      appId = appId,
+      entityType = Some("user"))
+      .toList
+      .map(e => e.copy(eventId = None)) // ignore eventID
+
+    // same events in insertTestUserEvents
+    val expected = List(u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2)
+
+    results must containTheSameElementsAs(expected)
+  }
+
+  def aggregateUserProperties(eventClient: LEvents) = {
+
+    val result: Map[String, PropertyMap] = eventClient.aggregateProperties(
+      appId = appId,
+      entityType = "user")
+
+    val expected = Map(
+      "u1" -> PropertyMap(u1, u1BaseTime, u1LastTime),
+      "u2" -> PropertyMap(u2, u2BaseTime, u2LastTime)
+    )
+
+    result must beEqualTo(expected)
+  }
+
+  def aggregateOneUserProperties(eventClient: LEvents) = {
+    val result: Option[PropertyMap] = eventClient.aggregatePropertiesOfEntity(
+      appId = appId,
+      entityType = "user",
+      entityId = "u1")
+
+    val expected = Some(PropertyMap(u1, u1BaseTime, u1LastTime))
+
+    result must beEqualTo(expected)
+  }
+
+  def aggregateNonExistentUserProperties(eventClient: LEvents) = {
+    val result: Option[PropertyMap] = eventClient.aggregatePropertiesOfEntity(
+      appId = appId,
+      entityType = "user",
+      entityId = "u999999")
+
+    result must beEqualTo(None)
+  }
+
+  val channelId = 12
+
+  def initChannel(eventClient: LEvents) = {
+    eventClient.init(appId, Some(channelId))
+  }
+
+  def insertChannel(eventClient: LEvents) = {
+
+    // events from TestEvents trait
+    val listOfEvents = List(r4,r5)
+
+    listOfEvents.map( eventClient.insert(_, appId, Some(channelId)) )
+
+    success
+  }
+
+  def insertAndDeleteChannel(eventClient: LEvents) = {
+
+    val eventId = eventClient.insert(r2, appId, Some(channelId))
+
+    val resultBefore = eventClient.get(eventId, appId, Some(channelId))
+
+    val expectedBefore = r2.copy(eventId = Some(eventId))
+
+    val deleteStatus = eventClient.delete(eventId, appId, Some(channelId))
+
+    val resultAfter = eventClient.get(eventId, appId, Some(channelId))
+
+    (resultBefore must beEqualTo(Some(expectedBefore))) and
+    (deleteStatus must beEqualTo(true)) and
+    (resultAfter must beEqualTo(None))
+  }
+
+  def findChannel(eventClient: LEvents) = {
+
+    val results: List[Event] = eventClient.find(
+      appId = appId,
+      channelId = Some(channelId)
+    )
+    .toList
+    .map(e => e.copy(eventId = None)) // ignore eventId
+
+    // same events in insertChannel
+    val expected = List(r4, r5)
+
+    results must containTheSameElementsAs(expected)
+  }
+
+  def removeDefault(eventClient: LEvents) = {
+    eventClient.remove(appId)
+  }
+
+  def removeChannel(eventClient: LEvents) = {
+    eventClient.remove(appId, Some(channelId))
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/test/scala/org/apache/predictionio/data/storage/PEventAggregatorSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/storage/PEventAggregatorSpec.scala b/data/src/test/scala/org/apache/predictionio/data/storage/PEventAggregatorSpec.scala
new file mode 100644
index 0000000..21790ad
--- /dev/null
+++ b/data/src/test/scala/org/apache/predictionio/data/storage/PEventAggregatorSpec.scala
@@ -0,0 +1,72 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.specs2.mutable._
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+
+class PEventAggregatorSpec extends Specification with TestEvents {
+
+  System.clearProperty("spark.driver.port")
+  System.clearProperty("spark.hostPort")
+  val sc = new SparkContext("local[4]", "PEventAggregatorSpec test")
+
+  "PEventAggregator" should {
+
+    "aggregate two entities' properties as DataMap/PropertyMap correctly" in {
+      val events = sc.parallelize(Seq(
+        u1e5, u2e2, u1e3, u1e1, u2e3, u2e1, u1e4, u1e2))
+
+      val users = PEventAggregator.aggregateProperties(events)
+
+      val userMap = users.collectAsMap.toMap
+      val expectedDM = Map(
+        "u1" -> DataMap(u1),
+        "u2" -> DataMap(u2)
+      )
+
+      val expectedPM = Map(
+        "u1" -> PropertyMap(u1, u1BaseTime, u1LastTime),
+        "u2" -> PropertyMap(u2, u2BaseTime, u2LastTime)
+      )
+
+      userMap must beEqualTo(expectedDM)
+      userMap must beEqualTo(expectedPM)
+    }
+
+    "aggregate deleted entity correctly" in {
+      // put the delete event in middle
+      val events = sc.parallelize(Seq(
+        u1e5, u2e2, u1e3, u1ed, u1e1, u2e3, u2e1, u1e4, u1e2))
+
+      val users = PEventAggregator.aggregateProperties(events)
+
+      val userMap = users.collectAsMap.toMap
+      val expectedPM = Map(
+        "u2" -> PropertyMap(u2, u2BaseTime, u2LastTime)
+      )
+
+      userMap must beEqualTo(expectedPM)
+    }
+
+  }
+
+  step(sc.stop())
+}


[09/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCApps.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCApps.scala b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCApps.scala
new file mode 100644
index 0000000..52c8b44
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCApps.scala
@@ -0,0 +1,86 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.jdbc
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.App
+import org.apache.predictionio.data.storage.Apps
+import org.apache.predictionio.data.storage.StorageClientConfig
+import scalikejdbc._
+
+/** JDBC implementation of [[Apps]] */
+class JDBCApps(client: String, config: StorageClientConfig, prefix: String)
+  extends Apps with Logging {
+  /** Database table name for this data access object */
+  val tableName = JDBCUtils.prefixTableName(prefix, "apps")
+  DB autoCommit { implicit session =>
+    sql"""
+    create table if not exists $tableName (
+      id serial not null primary key,
+      name text not null,
+      description text)""".execute.apply()
+  }
+
+  def insert(app: App): Option[Int] = DB localTx { implicit session =>
+    val q = if (app.id == 0) {
+      sql"""
+      insert into $tableName (name, description) values(${app.name}, ${app.description})
+      """
+    } else {
+      sql"""
+      insert into $tableName values(${app.id}, ${app.name}, ${app.description})
+      """
+    }
+    Some(q.updateAndReturnGeneratedKey().apply().toInt)
+  }
+
+  def get(id: Int): Option[App] = DB readOnly { implicit session =>
+    sql"SELECT id, name, description FROM $tableName WHERE id = ${id}".map(rs =>
+      App(
+        id = rs.int("id"),
+        name = rs.string("name"),
+        description = rs.stringOpt("description"))
+    ).single().apply()
+  }
+
+  def getByName(name: String): Option[App] = DB readOnly { implicit session =>
+    sql"SELECT id, name, description FROM $tableName WHERE name = ${name}".map(rs =>
+      App(
+        id = rs.int("id"),
+        name = rs.string("name"),
+        description = rs.stringOpt("description"))
+    ).single().apply()
+  }
+
+  def getAll(): Seq[App] = DB readOnly { implicit session =>
+    sql"SELECT id, name, description FROM $tableName".map(rs =>
+      App(
+        id = rs.int("id"),
+        name = rs.string("name"),
+        description = rs.stringOpt("description"))
+    ).list().apply()
+  }
+
+  def update(app: App): Unit = DB localTx { implicit session =>
+    sql"""
+    update $tableName set name = ${app.name}, description = ${app.description}
+    where id = ${app.id}""".update().apply()
+  }
+
+  def delete(id: Int): Unit = DB localTx { implicit session =>
+    sql"DELETE FROM $tableName WHERE id = $id".update().apply()
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCChannels.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCChannels.scala b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCChannels.scala
new file mode 100644
index 0000000..f94a64a
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCChannels.scala
@@ -0,0 +1,66 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.jdbc
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.Channel
+import org.apache.predictionio.data.storage.Channels
+import org.apache.predictionio.data.storage.StorageClientConfig
+import scalikejdbc._
+
+/** JDBC implementation of [[Channels]] */
+class JDBCChannels(client: String, config: StorageClientConfig, prefix: String)
+  extends Channels with Logging {
+  /** Database table name for this data access object */
+  val tableName = JDBCUtils.prefixTableName(prefix, "channels")
+  DB autoCommit { implicit session =>
+    sql"""
+    create table if not exists $tableName (
+      id serial not null primary key,
+      name text not null,
+      appid integer not null)""".execute().apply()
+  }
+
+  def insert(channel: Channel): Option[Int] = DB localTx { implicit session =>
+    val q = if (channel.id == 0) {
+      sql"INSERT INTO $tableName (name, appid) VALUES(${channel.name}, ${channel.appid})"
+    } else {
+      sql"INSERT INTO $tableName VALUES(${channel.id}, ${channel.name}, ${channel.appid})"
+    }
+    Some(q.updateAndReturnGeneratedKey().apply().toInt)
+  }
+
+  def get(id: Int): Option[Channel] = DB localTx { implicit session =>
+    sql"SELECT id, name, appid FROM $tableName WHERE id = $id".
+      map(resultToChannel).single().apply()
+  }
+
+  def getByAppid(appid: Int): Seq[Channel] = DB localTx { implicit session =>
+    sql"SELECT id, name, appid FROM $tableName WHERE appid = $appid".
+      map(resultToChannel).list().apply()
+  }
+
+  def delete(id: Int): Unit = DB localTx { implicit session =>
+    sql"DELETE FROM $tableName WHERE id = $id".update().apply()
+  }
+
+  def resultToChannel(rs: WrappedResultSet): Channel = {
+    Channel(
+      id = rs.int("id"),
+      name = rs.string("name"),
+      appid = rs.int("appid"))
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCEngineInstances.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCEngineInstances.scala b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCEngineInstances.scala
new file mode 100644
index 0000000..a4bd640
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCEngineInstances.scala
@@ -0,0 +1,194 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.jdbc
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.EngineInstance
+import org.apache.predictionio.data.storage.EngineInstances
+import org.apache.predictionio.data.storage.StorageClientConfig
+import scalikejdbc._
+
+/** JDBC implementation of [[EngineInstances]] */
+class JDBCEngineInstances(client: String, config: StorageClientConfig, prefix: String)
+  extends EngineInstances with Logging {
+  /** Database table name for this data access object */
+  val tableName = JDBCUtils.prefixTableName(prefix, "engineinstances")
+  DB autoCommit { implicit session =>
+    sql"""
+    create table if not exists $tableName (
+      id varchar(100) not null primary key,
+      status text not null,
+      startTime timestamp DEFAULT CURRENT_TIMESTAMP,
+      endTime timestamp DEFAULT CURRENT_TIMESTAMP,
+      engineId text not null,
+      engineVersion text not null,
+      engineVariant text not null,
+      engineFactory text not null,
+      batch text not null,
+      env text not null,
+      sparkConf text not null,
+      datasourceParams text not null,
+      preparatorParams text not null,
+      algorithmsParams text not null,
+      servingParams text not null)""".execute().apply()
+  }
+
+  def insert(i: EngineInstance): String = DB localTx { implicit session =>
+    val id = java.util.UUID.randomUUID().toString
+    sql"""
+    INSERT INTO $tableName VALUES(
+      $id,
+      ${i.status},
+      ${i.startTime},
+      ${i.endTime},
+      ${i.engineId},
+      ${i.engineVersion},
+      ${i.engineVariant},
+      ${i.engineFactory},
+      ${i.batch},
+      ${JDBCUtils.mapToString(i.env)},
+      ${JDBCUtils.mapToString(i.sparkConf)},
+      ${i.dataSourceParams},
+      ${i.preparatorParams},
+      ${i.algorithmsParams},
+      ${i.servingParams})""".update().apply()
+    id
+  }
+
+  def get(id: String): Option[EngineInstance] = DB localTx { implicit session =>
+    sql"""
+    SELECT
+      id,
+      status,
+      startTime,
+      endTime,
+      engineId,
+      engineVersion,
+      engineVariant,
+      engineFactory,
+      batch,
+      env,
+      sparkConf,
+      datasourceParams,
+      preparatorParams,
+      algorithmsParams,
+      servingParams
+    FROM $tableName WHERE id = $id""".map(resultToEngineInstance).
+      single().apply()
+  }
+
+  def getAll(): Seq[EngineInstance] = DB localTx { implicit session =>
+    sql"""
+    SELECT
+      id,
+      status,
+      startTime,
+      endTime,
+      engineId,
+      engineVersion,
+      engineVariant,
+      engineFactory,
+      batch,
+      env,
+      sparkConf,
+      datasourceParams,
+      preparatorParams,
+      algorithmsParams,
+      servingParams
+    FROM $tableName""".map(resultToEngineInstance).list().apply()
+  }
+
+  def getLatestCompleted(
+    engineId: String,
+    engineVersion: String,
+    engineVariant: String): Option[EngineInstance] =
+    getCompleted(engineId, engineVersion, engineVariant).headOption
+
+  def getCompleted(
+    engineId: String,
+    engineVersion: String,
+    engineVariant: String): Seq[EngineInstance] = DB localTx { implicit s =>
+    sql"""
+    SELECT
+      id,
+      status,
+      startTime,
+      endTime,
+      engineId,
+      engineVersion,
+      engineVariant,
+      engineFactory,
+      batch,
+      env,
+      sparkConf,
+      datasourceParams,
+      preparatorParams,
+      algorithmsParams,
+      servingParams
+    FROM $tableName
+    WHERE
+      status = 'COMPLETED' AND
+      engineId = $engineId AND
+      engineVersion = $engineVersion AND
+      engineVariant = $engineVariant
+    ORDER BY startTime DESC""".
+      map(resultToEngineInstance).list().apply()
+  }
+
+  def update(i: EngineInstance): Unit = DB localTx { implicit session =>
+    sql"""
+    update $tableName set
+      status = ${i.status},
+      startTime = ${i.startTime},
+      endTime = ${i.endTime},
+      engineId = ${i.engineId},
+      engineVersion = ${i.engineVersion},
+      engineVariant = ${i.engineVariant},
+      engineFactory = ${i.engineFactory},
+      batch = ${i.batch},
+      env = ${JDBCUtils.mapToString(i.env)},
+      sparkConf = ${JDBCUtils.mapToString(i.sparkConf)},
+      datasourceParams = ${i.dataSourceParams},
+      preparatorParams = ${i.preparatorParams},
+      algorithmsParams = ${i.algorithmsParams},
+      servingParams = ${i.servingParams}
+    where id = ${i.id}""".update().apply()
+  }
+
+  def delete(id: String): Unit = DB localTx { implicit session =>
+    sql"DELETE FROM $tableName WHERE id = $id".update().apply()
+  }
+
+  /** Convert JDBC results to [[EngineInstance]] */
+  def resultToEngineInstance(rs: WrappedResultSet): EngineInstance = {
+    EngineInstance(
+      id = rs.string("id"),
+      status = rs.string("status"),
+      startTime = rs.jodaDateTime("startTime"),
+      endTime = rs.jodaDateTime("endTime"),
+      engineId = rs.string("engineId"),
+      engineVersion = rs.string("engineVersion"),
+      engineVariant = rs.string("engineVariant"),
+      engineFactory = rs.string("engineFactory"),
+      batch = rs.string("batch"),
+      env = JDBCUtils.stringToMap(rs.string("env")),
+      sparkConf = JDBCUtils.stringToMap(rs.string("sparkConf")),
+      dataSourceParams = rs.string("datasourceParams"),
+      preparatorParams = rs.string("preparatorParams"),
+      algorithmsParams = rs.string("algorithmsParams"),
+      servingParams = rs.string("servingParams"))
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCEngineManifests.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCEngineManifests.scala b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCEngineManifests.scala
new file mode 100644
index 0000000..b766689
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCEngineManifests.scala
@@ -0,0 +1,111 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.jdbc
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.EngineManifest
+import org.apache.predictionio.data.storage.EngineManifests
+import org.apache.predictionio.data.storage.StorageClientConfig
+import scalikejdbc._
+
+/** JDBC implementation of [[EngineManifests]] */
+class JDBCEngineManifests(client: String, config: StorageClientConfig, prefix: String)
+  extends EngineManifests with Logging {
+  /** Database table name for this data access object */
+  val tableName = JDBCUtils.prefixTableName(prefix, "enginemanifests")
+  DB autoCommit { implicit session =>
+    sql"""
+    create table if not exists $tableName (
+      id varchar(100) not null primary key,
+      version text not null,
+      engineName text not null,
+      description text,
+      files text not null,
+      engineFactory text not null)""".execute().apply()
+  }
+
+  def insert(m: EngineManifest): Unit = DB localTx { implicit session =>
+    sql"""
+    INSERT INTO $tableName VALUES(
+      ${m.id},
+      ${m.version},
+      ${m.name},
+      ${m.description},
+      ${m.files.mkString(",")},
+      ${m.engineFactory})""".update().apply()
+  }
+
+  def get(id: String, version: String): Option[EngineManifest] = DB localTx { implicit session =>
+    sql"""
+    SELECT
+      id,
+      version,
+      engineName,
+      description,
+      files,
+      engineFactory
+    FROM $tableName WHERE id = $id AND version = $version""".
+      map(resultToEngineManifest).single().apply()
+  }
+
+  def getAll(): Seq[EngineManifest] = DB localTx { implicit session =>
+    sql"""
+    SELECT
+      id,
+      version,
+      engineName,
+      description,
+      files,
+      engineFactory
+    FROM $tableName""".map(resultToEngineManifest).list().apply()
+  }
+
+  def update(m: EngineManifest, upsert: Boolean = false): Unit = {
+    var r = 0
+    DB localTx { implicit session =>
+      r = sql"""
+      update $tableName set
+        engineName = ${m.name},
+        description = ${m.description},
+        files = ${m.files.mkString(",")},
+        engineFactory = ${m.engineFactory}
+      where id = ${m.id} and version = ${m.version}""".update().apply()
+    }
+    if (r == 0) {
+      if (upsert) {
+        insert(m)
+      } else {
+        error("Cannot find a record to update, and upsert is not enabled.")
+      }
+    }
+  }
+
+  def delete(id: String, version: String): Unit = DB localTx { implicit session =>
+    sql"DELETE FROM $tableName WHERE id = $id AND version = $version".
+      update().apply()
+  }
+
+  /** Convert JDBC results to [[EngineManifest]] */
+  def resultToEngineManifest(rs: WrappedResultSet): EngineManifest = {
+    EngineManifest(
+      id = rs.string("id"),
+      version = rs.string("version"),
+      name = rs.string("engineName"),
+      description = rs.stringOpt("description"),
+      files = rs.string("files").split(","),
+      engineFactory = rs.string("engineFactory"))
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCEvaluationInstances.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCEvaluationInstances.scala b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCEvaluationInstances.scala
new file mode 100644
index 0000000..1811271
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCEvaluationInstances.scala
@@ -0,0 +1,162 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.jdbc
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.EvaluationInstance
+import org.apache.predictionio.data.storage.EvaluationInstances
+import org.apache.predictionio.data.storage.StorageClientConfig
+import scalikejdbc._
+
+/** JDBC implementations of [[EvaluationInstances]] */
+class JDBCEvaluationInstances(client: String, config: StorageClientConfig, prefix: String)
+  extends EvaluationInstances with Logging {
+  /** Database table name for this data access object */
+  val tableName = JDBCUtils.prefixTableName(prefix, "evaluationinstances")
+  DB autoCommit { implicit session =>
+    sql"""
+    create table if not exists $tableName (
+      id varchar(100) not null primary key,
+      status text not null,
+      startTime timestamp DEFAULT CURRENT_TIMESTAMP,
+      endTime timestamp DEFAULT CURRENT_TIMESTAMP,
+      evaluationClass text not null,
+      engineParamsGeneratorClass text not null,
+      batch text not null,
+      env text not null,
+      sparkConf text not null,
+      evaluatorResults text not null,
+      evaluatorResultsHTML text not null,
+      evaluatorResultsJSON text)""".execute().apply()
+  }
+
+  def insert(i: EvaluationInstance): String = DB localTx { implicit session =>
+    val id = java.util.UUID.randomUUID().toString
+    sql"""
+    INSERT INTO $tableName VALUES(
+      $id,
+      ${i.status},
+      ${i.startTime},
+      ${i.endTime},
+      ${i.evaluationClass},
+      ${i.engineParamsGeneratorClass},
+      ${i.batch},
+      ${JDBCUtils.mapToString(i.env)},
+      ${JDBCUtils.mapToString(i.sparkConf)},
+      ${i.evaluatorResults},
+      ${i.evaluatorResultsHTML},
+      ${i.evaluatorResultsJSON})""".update().apply()
+    id
+  }
+
+  def get(id: String): Option[EvaluationInstance] = DB localTx { implicit session =>
+    sql"""
+    SELECT
+      id,
+      status,
+      startTime,
+      endTime,
+      evaluationClass,
+      engineParamsGeneratorClass,
+      batch,
+      env,
+      sparkConf,
+      evaluatorResults,
+      evaluatorResultsHTML,
+      evaluatorResultsJSON
+    FROM $tableName WHERE id = $id
+    """.map(resultToEvaluationInstance).single().apply()
+  }
+
+  def getAll(): Seq[EvaluationInstance] = DB localTx { implicit session =>
+    sql"""
+    SELECT
+      id,
+      status,
+      startTime,
+      endTime,
+      evaluationClass,
+      engineParamsGeneratorClass,
+      batch,
+      env,
+      sparkConf,
+      evaluatorResults,
+      evaluatorResultsHTML,
+      evaluatorResultsJSON
+    FROM $tableName
+    """.map(resultToEvaluationInstance).list().apply()
+  }
+
+  def getCompleted(): Seq[EvaluationInstance] = DB localTx { implicit s =>
+    sql"""
+    SELECT
+      id,
+      status,
+      startTime,
+      endTime,
+      evaluationClass,
+      engineParamsGeneratorClass,
+      batch,
+      env,
+      sparkConf,
+      evaluatorResults,
+      evaluatorResultsHTML,
+      evaluatorResultsJSON
+    FROM $tableName
+    WHERE
+      status = 'EVALCOMPLETED'
+    ORDER BY starttime DESC
+    """.map(resultToEvaluationInstance).list().apply()
+  }
+
+  def update(i: EvaluationInstance): Unit = DB localTx { implicit session =>
+    sql"""
+    update $tableName set
+      status = ${i.status},
+      startTime = ${i.startTime},
+      endTime = ${i.endTime},
+      evaluationClass = ${i.evaluationClass},
+      engineParamsGeneratorClass = ${i.engineParamsGeneratorClass},
+      batch = ${i.batch},
+      env = ${JDBCUtils.mapToString(i.env)},
+      sparkConf = ${JDBCUtils.mapToString(i.sparkConf)},
+      evaluatorResults = ${i.evaluatorResults},
+      evaluatorResultsHTML = ${i.evaluatorResultsHTML},
+      evaluatorResultsJSON = ${i.evaluatorResultsJSON}
+    where id = ${i.id}""".update().apply()
+  }
+
+  def delete(id: String): Unit = DB localTx { implicit session =>
+    sql"DELETE FROM $tableName WHERE id = $id".update().apply()
+  }
+
+  /** Convert JDBC results to [[EvaluationInstance]] */
+  def resultToEvaluationInstance(rs: WrappedResultSet): EvaluationInstance = {
+    EvaluationInstance(
+      id = rs.string("id"),
+      status = rs.string("status"),
+      startTime = rs.jodaDateTime("startTime"),
+      endTime = rs.jodaDateTime("endTime"),
+      evaluationClass = rs.string("evaluationClass"),
+      engineParamsGeneratorClass = rs.string("engineParamsGeneratorClass"),
+      batch = rs.string("batch"),
+      env = JDBCUtils.stringToMap(rs.string("env")),
+      sparkConf = JDBCUtils.stringToMap(rs.string("sparkConf")),
+      evaluatorResults = rs.string("evaluatorResults"),
+      evaluatorResultsHTML = rs.string("evaluatorResultsHTML"),
+      evaluatorResultsJSON = rs.string("evaluatorResultsJSON"))
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCLEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCLEvents.scala b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCLEvents.scala
new file mode 100644
index 0000000..945879c
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCLEvents.scala
@@ -0,0 +1,241 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.jdbc
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.DataMap
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.LEvents
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.joda.time.DateTime
+import org.joda.time.DateTimeZone
+import org.json4s.JObject
+import org.json4s.native.Serialization.read
+import org.json4s.native.Serialization.write
+import scalikejdbc._
+
+import scala.concurrent.ExecutionContext
+import scala.concurrent.Future
+
+/** JDBC implementation of [[LEvents]] */
+class JDBCLEvents(
+    client: String,
+    config: StorageClientConfig,
+    namespace: String) extends LEvents with Logging {
+  implicit private val formats = org.json4s.DefaultFormats
+
+  def init(appId: Int, channelId: Option[Int] = None): Boolean = {
+
+    // To use index, it must be varchar less than 255 characters on a VARCHAR column
+    val useIndex = config.properties.contains("INDEX") &&
+      config.properties("INDEX").equalsIgnoreCase("enabled")
+
+    val tableName = JDBCUtils.eventTableName(namespace, appId, channelId)
+    val entityIdIndexName = s"idx_${tableName}_ei"
+    val entityTypeIndexName = s"idx_${tableName}_et"
+    DB autoCommit { implicit session =>
+      if (useIndex) {
+        SQL(s"""
+      create table if not exists $tableName (
+        id varchar(32) not null primary key,
+        event varchar(255) not null,
+        entityType varchar(255) not null,
+        entityId varchar(255) not null,
+        targetEntityType text,
+        targetEntityId text,
+        properties text,
+        eventTime timestamp DEFAULT CURRENT_TIMESTAMP,
+        eventTimeZone varchar(50) not null,
+        tags text,
+        prId text,
+        creationTime timestamp DEFAULT CURRENT_TIMESTAMP,
+        creationTimeZone varchar(50) not null)""").execute().apply()
+
+        // create index
+        SQL(s"create index $entityIdIndexName on $tableName (entityId)").execute().apply()
+        SQL(s"create index $entityTypeIndexName on $tableName (entityType)").execute().apply()
+      } else {
+        SQL(s"""
+      create table if not exists $tableName (
+        id varchar(32) not null primary key,
+        event text not null,
+        entityType text not null,
+        entityId text not null,
+        targetEntityType text,
+        targetEntityId text,
+        properties text,
+        eventTime timestamp DEFAULT CURRENT_TIMESTAMP,
+        eventTimeZone varchar(50) not null,
+        tags text,
+        prId text,
+        creationTime timestamp DEFAULT CURRENT_TIMESTAMP,
+        creationTimeZone varchar(50) not null)""").execute().apply()
+      }
+      true
+    }
+  }
+
+  def remove(appId: Int, channelId: Option[Int] = None): Boolean =
+    DB autoCommit { implicit session =>
+      SQL(s"""
+      drop table ${JDBCUtils.eventTableName(namespace, appId, channelId)}
+      """).execute().apply()
+      true
+    }
+
+  def close(): Unit = ConnectionPool.closeAll()
+
+  def futureInsert(event: Event, appId: Int, channelId: Option[Int])(
+    implicit ec: ExecutionContext): Future[String] = Future {
+    DB localTx { implicit session =>
+      val id = event.eventId.getOrElse(JDBCUtils.generateId)
+      val tableName = sqls.createUnsafely(JDBCUtils.eventTableName(namespace, appId, channelId))
+      sql"""
+      insert into $tableName values(
+        $id,
+        ${event.event},
+        ${event.entityType},
+        ${event.entityId},
+        ${event.targetEntityType},
+        ${event.targetEntityId},
+        ${write(event.properties.toJObject)},
+        ${event.eventTime},
+        ${event.eventTime.getZone.getID},
+        ${if (event.tags.nonEmpty) Some(event.tags.mkString(",")) else None},
+        ${event.prId},
+        ${event.creationTime},
+        ${event.creationTime.getZone.getID}
+      )
+      """.update().apply()
+      id
+    }
+  }
+
+  def futureGet(eventId: String, appId: Int, channelId: Option[Int])(
+    implicit ec: ExecutionContext): Future[Option[Event]] = Future {
+    DB readOnly { implicit session =>
+      val tableName = sqls.createUnsafely(JDBCUtils.eventTableName(namespace, appId, channelId))
+      sql"""
+      select
+        id,
+        event,
+        entityType,
+        entityId,
+        targetEntityType,
+        targetEntityId,
+        properties,
+        eventTime,
+        eventTimeZone,
+        tags,
+        prId,
+        creationTime,
+        creationTimeZone
+      from $tableName
+      where id = $eventId
+      """.map(resultToEvent).single().apply()
+    }
+  }
+
+  def futureDelete(eventId: String, appId: Int, channelId: Option[Int])(
+    implicit ec: ExecutionContext): Future[Boolean] = Future {
+    DB localTx { implicit session =>
+      val tableName = sqls.createUnsafely(JDBCUtils.eventTableName(namespace, appId, channelId))
+      sql"""
+      delete from $tableName where id = $eventId
+      """.update().apply()
+      true
+    }
+  }
+
+  def futureFind(
+      appId: Int,
+      channelId: Option[Int] = None,
+      startTime: Option[DateTime] = None,
+      untilTime: Option[DateTime] = None,
+      entityType: Option[String] = None,
+      entityId: Option[String] = None,
+      eventNames: Option[Seq[String]] = None,
+      targetEntityType: Option[Option[String]] = None,
+      targetEntityId: Option[Option[String]] = None,
+      limit: Option[Int] = None,
+      reversed: Option[Boolean] = None
+    )(implicit ec: ExecutionContext): Future[Iterator[Event]] = Future {
+    DB readOnly { implicit session =>
+      val tableName = sqls.createUnsafely(JDBCUtils.eventTableName(namespace, appId, channelId))
+      val whereClause = sqls.toAndConditionOpt(
+        startTime.map(x => sqls"eventTime >= $x"),
+        untilTime.map(x => sqls"eventTime < $x"),
+        entityType.map(x => sqls"entityType = $x"),
+        entityId.map(x => sqls"entityId = $x"),
+        eventNames.map(x =>
+          sqls.toOrConditionOpt(x.map(y =>
+            Some(sqls"event = $y")
+          ): _*)
+        ).getOrElse(None),
+        targetEntityType.map(x => x.map(y => sqls"targetEntityType = $y")
+            .getOrElse(sqls"targetEntityType IS NULL")),
+        targetEntityId.map(x => x.map(y => sqls"targetEntityId = $y")
+            .getOrElse(sqls"targetEntityId IS NULL"))
+      ).map(sqls.where(_)).getOrElse(sqls"")
+      val orderByClause = reversed.map(x =>
+        if (x) sqls"eventTime desc" else sqls"eventTime asc"
+      ).getOrElse(sqls"eventTime asc")
+      val limitClause = limit.map(x =>
+        if (x < 0) sqls"" else sqls.limit(x)
+      ).getOrElse(sqls"")
+      val q = sql"""
+      select
+        id,
+        event,
+        entityType,
+        entityId,
+        targetEntityType,
+        targetEntityId,
+        properties,
+        eventTime,
+        eventTimeZone,
+        tags,
+        prId,
+        creationTime,
+        creationTimeZone
+      from $tableName
+      $whereClause
+      order by $orderByClause
+      $limitClause
+      """
+      q.map(resultToEvent).list().apply().toIterator
+    }
+  }
+
+  private[prediction] def resultToEvent(rs: WrappedResultSet): Event = {
+    Event(
+      eventId = rs.stringOpt("id"),
+      event = rs.string("event"),
+      entityType = rs.string("entityType"),
+      entityId = rs.string("entityId"),
+      targetEntityType = rs.stringOpt("targetEntityType"),
+      targetEntityId = rs.stringOpt("targetEntityId"),
+      properties = rs.stringOpt("properties").map(p =>
+        DataMap(read[JObject](p))).getOrElse(DataMap()),
+      eventTime = new DateTime(rs.jodaDateTime("eventTime"),
+        DateTimeZone.forID(rs.string("eventTimeZone"))),
+      tags = rs.stringOpt("tags").map(t => t.split(",").toList).getOrElse(Nil),
+      prId = rs.stringOpt("prId"),
+      creationTime = new DateTime(rs.jodaDateTime("creationTime"),
+        DateTimeZone.forID(rs.string("creationTimeZone")))
+    )
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCModels.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCModels.scala b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCModels.scala
new file mode 100644
index 0000000..01ed6ca
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCModels.scala
@@ -0,0 +1,52 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.jdbc
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.Model
+import org.apache.predictionio.data.storage.Models
+import org.apache.predictionio.data.storage.StorageClientConfig
+import scalikejdbc._
+
+/** JDBC implementation of [[Models]] */
+class JDBCModels(client: String, config: StorageClientConfig, prefix: String)
+  extends Models with Logging {
+  /** Database table name for this data access object */
+  val tableName = JDBCUtils.prefixTableName(prefix, "models")
+
+  /** Determines binary column type based on JDBC driver type */
+  val binaryColumnType = JDBCUtils.binaryColumnType(client)
+  DB autoCommit { implicit session =>
+    sql"""
+    create table if not exists $tableName (
+      id varchar(100) not null primary key,
+      models $binaryColumnType not null)""".execute().apply()
+  }
+
+  def insert(i: Model): Unit = DB localTx { implicit session =>
+    sql"insert into $tableName values(${i.id}, ${i.models})".update().apply()
+  }
+
+  def get(id: String): Option[Model] = DB readOnly { implicit session =>
+    sql"select id, models from $tableName where id = $id".map { r =>
+      Model(id = r.string("id"), models = r.bytes("models"))
+    }.single().apply()
+  }
+
+  def delete(id: String): Unit = DB localTx { implicit session =>
+    sql"delete from $tableName where id = $id".execute().apply()
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCPEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCPEvents.scala b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCPEvents.scala
new file mode 100644
index 0000000..c01989c
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCPEvents.scala
@@ -0,0 +1,160 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.jdbc
+
+import java.sql.{DriverManager, ResultSet}
+
+import com.github.nscala_time.time.Imports._
+import org.apache.predictionio.data.storage.{DataMap, Event, PEvents, StorageClientConfig}
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.{JdbcRDD, RDD}
+import org.apache.spark.sql.{SQLContext, SaveMode}
+import org.json4s.JObject
+import org.json4s.native.Serialization
+
+/** JDBC implementation of [[PEvents]] */
+class JDBCPEvents(client: String, config: StorageClientConfig, namespace: String) extends PEvents {
+  @transient private implicit lazy val formats = org.json4s.DefaultFormats
+  def find(
+    appId: Int,
+    channelId: Option[Int] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    entityType: Option[String] = None,
+    entityId: Option[String] = None,
+    eventNames: Option[Seq[String]] = None,
+    targetEntityType: Option[Option[String]] = None,
+    targetEntityId: Option[Option[String]] = None)(sc: SparkContext): RDD[Event] = {
+    val lower = startTime.map(_.getMillis).getOrElse(0.toLong)
+    /** Change the default upper bound from +100 to +1 year because MySQL's
+      * FROM_UNIXTIME(t) will return NULL if we use +100 years.
+      */
+    val upper = untilTime.map(_.getMillis).getOrElse((DateTime.now + 1.years).getMillis)
+    val par = scala.math.min(
+      new Duration(upper - lower).getStandardDays,
+      config.properties.getOrElse("PARTITIONS", "4").toLong).toInt
+    val entityTypeClause = entityType.map(x => s"and entityType = '$x'").getOrElse("")
+    val entityIdClause = entityId.map(x => s"and entityId = '$x'").getOrElse("")
+    val eventNamesClause =
+      eventNames.map("and (" + _.map(y => s"event = '$y'").mkString(" or ") + ")").getOrElse("")
+    val targetEntityTypeClause = targetEntityType.map(
+      _.map(x => s"and targetEntityType = '$x'"
+    ).getOrElse("and targetEntityType is null")).getOrElse("")
+    val targetEntityIdClause = targetEntityId.map(
+      _.map(x => s"and targetEntityId = '$x'"
+    ).getOrElse("and targetEntityId is null")).getOrElse("")
+    val q = s"""
+      select
+        id,
+        event,
+        entityType,
+        entityId,
+        targetEntityType,
+        targetEntityId,
+        properties,
+        eventTime,
+        eventTimeZone,
+        tags,
+        prId,
+        creationTime,
+        creationTimeZone
+      from ${JDBCUtils.eventTableName(namespace, appId, channelId)}
+      where
+        eventTime >= ${JDBCUtils.timestampFunction(client)}(?) and
+        eventTime < ${JDBCUtils.timestampFunction(client)}(?)
+      $entityTypeClause
+      $entityIdClause
+      $eventNamesClause
+      $targetEntityTypeClause
+      $targetEntityIdClause
+      """.replace("\n", " ")
+    new JdbcRDD(
+      sc,
+      () => {
+        DriverManager.getConnection(
+          client,
+          config.properties("USERNAME"),
+          config.properties("PASSWORD"))
+      },
+      q,
+      lower / 1000,
+      upper / 1000,
+      par,
+      (r: ResultSet) => {
+        Event(
+          eventId = Option(r.getString("id")),
+          event = r.getString("event"),
+          entityType = r.getString("entityType"),
+          entityId = r.getString("entityId"),
+          targetEntityType = Option(r.getString("targetEntityType")),
+          targetEntityId = Option(r.getString("targetEntityId")),
+          properties = Option(r.getString("properties")).map(x =>
+            DataMap(Serialization.read[JObject](x))).getOrElse(DataMap()),
+          eventTime = new DateTime(r.getTimestamp("eventTime").getTime,
+            DateTimeZone.forID(r.getString("eventTimeZone"))),
+          tags = Option(r.getString("tags")).map(x =>
+            x.split(",").toList).getOrElse(Nil),
+          prId = Option(r.getString("prId")),
+          creationTime = new DateTime(r.getTimestamp("creationTime").getTime,
+            DateTimeZone.forID(r.getString("creationTimeZone"))))
+      }).cache()
+  }
+
+  def write(events: RDD[Event], appId: Int, channelId: Option[Int])(sc: SparkContext): Unit = {
+    val sqlContext = new SQLContext(sc)
+
+    import sqlContext.implicits._
+
+    val tableName = JDBCUtils.eventTableName(namespace, appId, channelId)
+
+    val eventTableColumns = Seq[String](
+        "id"
+      , "event"
+      , "entityType"
+      , "entityId"
+      , "targetEntityType"
+      , "targetEntityId"
+      , "properties"
+      , "eventTime"
+      , "eventTimeZone"
+      , "tags"
+      , "prId"
+      , "creationTime"
+      , "creationTimeZone")
+
+    val eventDF = events.map { event =>
+      (event.eventId.getOrElse(JDBCUtils.generateId)
+        , event.event
+        , event.entityType
+        , event.entityId
+        , event.targetEntityType.orNull
+        , event.targetEntityId.orNull
+        , if (!event.properties.isEmpty) Serialization.write(event.properties.toJObject) else null
+        , new java.sql.Timestamp(event.eventTime.getMillis)
+        , event.eventTime.getZone.getID
+        , if (event.tags.nonEmpty) Some(event.tags.mkString(",")) else null
+        , event.prId
+        , new java.sql.Timestamp(event.creationTime.getMillis)
+        , event.creationTime.getZone.getID)
+    }.toDF(eventTableColumns:_*)
+
+    // spark version 1.4.0 or higher
+    val prop = new java.util.Properties
+    prop.setProperty("user", config.properties("USERNAME"))
+    prop.setProperty("password", config.properties("PASSWORD"))
+    eventDF.write.mode(SaveMode.Append).jdbc(client, tableName, prop)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCUtils.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCUtils.scala b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCUtils.scala
new file mode 100644
index 0000000..e95b49b
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCUtils.scala
@@ -0,0 +1,103 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.jdbc
+
+import scalikejdbc._
+
+/** JDBC related utilities */
+object JDBCUtils {
+  /** Extract JDBC driver type from URL
+    *
+    * @param url JDBC URL
+    * @return The driver type, e.g. postgresql
+    */
+  def driverType(url: String): String = {
+    val capture = """jdbc:([^:]+):""".r
+    capture findFirstIn url match {
+      case Some(capture(driverType)) => driverType
+      case None => ""
+    }
+  }
+
+  /** Determines binary column type from JDBC URL
+    *
+    * @param url JDBC URL
+    * @return Binary column type as SQLSyntax, e.g. LONGBLOB
+    */
+  def binaryColumnType(url: String): SQLSyntax = {
+    driverType(url) match {
+      case "postgresql" => sqls"bytea"
+      case "mysql" => sqls"longblob"
+      case _ => sqls"longblob"
+    }
+  }
+
+  /** Determines UNIX timestamp conversion function from JDBC URL
+    *
+    * @param url JDBC URL
+    * @return Timestamp conversion function, e.g. TO_TIMESTAMP
+    */
+  def timestampFunction(url: String): String = {
+    driverType(url) match {
+      case "postgresql" => "to_timestamp"
+      case "mysql" => "from_unixtime"
+      case _ => "from_unixtime"
+    }
+  }
+
+  /** Converts Map of String to String to comma-separated list of key=value
+    *
+    * @param m Map of String to String
+    * @return Comma-separated list, e.g. FOO=BAR,X=Y,...
+    */
+  def mapToString(m: Map[String, String]): String = {
+    m.map(t => s"${t._1}=${t._2}").mkString(",")
+  }
+
+  /** Inverse of mapToString
+    *
+    * @param str Comma-separated list, e.g. FOO=BAR,X=Y,...
+    * @return Map of String to String, e.g. Map("FOO" -> "BAR", "X" -> "Y", ...)
+    */
+  def stringToMap(str: String): Map[String, String] = {
+    str.split(",").map { x =>
+      val y = x.split("=")
+      y(0) -> y(1)
+    }.toMap[String, String]
+  }
+
+  /** Generate 32-character random ID using UUID with - stripped */
+  def generateId: String = java.util.UUID.randomUUID().toString.replace("-", "")
+
+  /** Prefix a table name
+    *
+    * @param prefix Table prefix
+    * @param table Table name
+    * @return Prefixed table name
+    */
+  def prefixTableName(prefix: String, table: String): SQLSyntax =
+    sqls.createUnsafely(s"${prefix}_$table")
+
+  /** Derive event table name
+    *
+    * @param namespace Namespace of event tables
+    * @param appId App ID
+    * @param channelId Optional channel ID
+    * @return Full event table name
+    */
+  def eventTableName(namespace: String, appId: Int, channelId: Option[Int]): String =
+    s"${namespace}_${appId}${channelId.map("_" + _).getOrElse("")}"
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/StorageClient.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/StorageClient.scala b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/StorageClient.scala
new file mode 100644
index 0000000..6015870
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/StorageClient.scala
@@ -0,0 +1,50 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.jdbc
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.BaseStorageClient
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.apache.predictionio.data.storage.StorageClientException
+import scalikejdbc._
+
+/** JDBC implementation of [[BaseStorageClient]] */
+class StorageClient(val config: StorageClientConfig)
+  extends BaseStorageClient with Logging {
+  override val prefix = "JDBC"
+
+  if (!config.properties.contains("URL")) {
+    throw new StorageClientException("The URL variable is not set!", null)
+  }
+  if (!config.properties.contains("USERNAME")) {
+    throw new StorageClientException("The USERNAME variable is not set!", null)
+  }
+  if (!config.properties.contains("PASSWORD")) {
+    throw new StorageClientException("The PASSWORD variable is not set!", null)
+  }
+
+  // set max size of connection pool
+  val maxSize: Int = config.properties.getOrElse("CONNECTIONS", "8").toInt
+  val settings = ConnectionPoolSettings(maxSize = maxSize)
+
+  ConnectionPool.singleton(
+    config.properties("URL"),
+    config.properties("USERNAME"),
+    config.properties("PASSWORD"),
+    settings)
+  /** JDBC connection URL. Connections are managed by ScalikeJDBC. */
+  val client = config.properties("URL")
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/package.scala b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/package.scala
new file mode 100644
index 0000000..c423b29
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/package.scala
@@ -0,0 +1,23 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+/** JDBC implementation of storage traits, supporting meta data, event data, and
+  * model data
+  *
+  * @group Implementation
+  */
+package object jdbc {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/localfs/LocalFSModels.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/localfs/LocalFSModels.scala b/data/src/main/scala/org/apache/predictionio/data/storage/localfs/LocalFSModels.scala
new file mode 100644
index 0000000..82989aa
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/localfs/LocalFSModels.scala
@@ -0,0 +1,59 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.localfs
+
+import java.io.File
+import java.io.FileNotFoundException
+import java.io.FileOutputStream
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.Model
+import org.apache.predictionio.data.storage.Models
+import org.apache.predictionio.data.storage.StorageClientConfig
+
+import scala.io.Source
+
+class LocalFSModels(f: File, config: StorageClientConfig, prefix: String)
+  extends Models with Logging {
+
+  def insert(i: Model): Unit = {
+    try {
+      val fos = new FileOutputStream(new File(f, s"${prefix}${i.id}"))
+      fos.write(i.models)
+      fos.close
+    } catch {
+      case e: FileNotFoundException => error(e.getMessage)
+    }
+  }
+
+  def get(id: String): Option[Model] = {
+    try {
+      Some(Model(
+        id = id,
+        models = Source.fromFile(new File(f, s"${prefix}${id}"))(
+          scala.io.Codec.ISO8859).map(_.toByte).toArray))
+    } catch {
+      case e: Throwable =>
+        error(e.getMessage)
+        None
+    }
+  }
+
+  def delete(id: String): Unit = {
+    val m = new File(f, s"${prefix}${id}")
+    if (!m.delete) error(s"Unable to delete ${m.getCanonicalPath}!")
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/localfs/StorageClient.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/localfs/StorageClient.scala b/data/src/main/scala/org/apache/predictionio/data/storage/localfs/StorageClient.scala
new file mode 100644
index 0000000..8206384
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/localfs/StorageClient.scala
@@ -0,0 +1,43 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.localfs
+
+import java.io.File
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.BaseStorageClient
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.apache.predictionio.data.storage.StorageClientException
+
+class StorageClient(val config: StorageClientConfig) extends BaseStorageClient
+    with Logging {
+  override val prefix = "LocalFS"
+  val f = new File(
+    config.properties.getOrElse("PATH", config.properties("HOSTS")))
+  if (f.exists) {
+    if (!f.isDirectory) throw new StorageClientException(
+      s"${f} already exists but it is not a directory!",
+      null)
+    if (!f.canWrite) throw new StorageClientException(
+      s"${f} already exists but it is not writable!",
+      null)
+  } else {
+    if (!f.mkdirs) throw new StorageClientException(
+      s"${f} does not exist and automatic creation failed!",
+      null)
+  }
+  val client = f
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/localfs/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/localfs/package.scala b/data/src/main/scala/org/apache/predictionio/data/storage/localfs/package.scala
new file mode 100644
index 0000000..f245a06
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/localfs/package.scala
@@ -0,0 +1,22 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+/** Local file system implementation of storage traits, supporting model data only
+  *
+  * @group Implementation
+  */
+package object localfs {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/package.scala b/data/src/main/scala/org/apache/predictionio/data/storage/package.scala
new file mode 100644
index 0000000..09e6fa3
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/package.scala
@@ -0,0 +1,26 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data
+
+/** If you are an engine developer, please refer to the [[store]] package.
+  *
+  * This package provides convenient access to underlying data access objects.
+  * The common entry point is [[Storage]].
+  *
+  * Developer APIs are available to advanced developers to add support of other
+  * data store backends.
+  */
+package object storage {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/store/Common.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/store/Common.scala b/data/src/main/scala/org/apache/predictionio/data/store/Common.scala
new file mode 100644
index 0000000..81b4b28
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/store/Common.scala
@@ -0,0 +1,50 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.store
+
+import org.apache.predictionio.data.storage.Storage
+import grizzled.slf4j.Logger
+
+private[prediction] object Common {
+
+  @transient lazy val logger = Logger[this.type]
+  @transient lazy private val appsDb = Storage.getMetaDataApps()
+  @transient lazy private val channelsDb = Storage.getMetaDataChannels()
+
+  /* throw exception if invalid app name or channel name */
+  def appNameToId(appName: String, channelName: Option[String]): (Int, Option[Int]) = {
+    val appOpt = appsDb.getByName(appName)
+
+    appOpt.map { app =>
+      val channelMap: Map[String, Int] = channelsDb.getByAppid(app.id)
+        .map(c => (c.name, c.id)).toMap
+
+      val channelId: Option[Int] = channelName.map { ch =>
+        if (channelMap.contains(ch)) {
+          channelMap(ch)
+        } else {
+          logger.error(s"Invalid channel name ${ch}.")
+          throw new IllegalArgumentException(s"Invalid channel name ${ch}.")
+        }
+      }
+
+      (app.id, channelId)
+    }.getOrElse {
+      logger.error(s"Invalid app name ${appName}")
+      throw new IllegalArgumentException(s"Invalid app name ${appName}")
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/store/LEventStore.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/store/LEventStore.scala b/data/src/main/scala/org/apache/predictionio/data/store/LEventStore.scala
new file mode 100644
index 0000000..ae38e7b
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/store/LEventStore.scala
@@ -0,0 +1,142 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.store
+
+import org.apache.predictionio.data.storage.Storage
+import org.apache.predictionio.data.storage.Event
+
+import org.joda.time.DateTime
+
+import scala.concurrent.Await
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.concurrent.duration.Duration
+
+/** This object provides a set of operation to access Event Store
+  * without going through Spark's parallelization
+  */
+object LEventStore {
+
+  private val defaultTimeout = Duration(60, "seconds")
+
+  @transient lazy private val eventsDb = Storage.getLEvents()
+
+  /** Reads events of the specified entity. May use this in Algorithm's predict()
+    * or Serving logic to have fast event store access.
+    *
+    * @param appName return events of this app
+    * @param entityType return events of this entityType
+    * @param entityId return events of this entityId
+    * @param channelName return events of this channel (default channel if it's None)
+    * @param eventNames return events with any of these event names.
+    * @param targetEntityType return events of this targetEntityType:
+    *   - None means no restriction on targetEntityType
+    *   - Some(None) means no targetEntityType for this event
+    *   - Some(Some(x)) means targetEntityType should match x.
+    * @param targetEntityId return events of this targetEntityId
+    *   - None means no restriction on targetEntityId
+    *   - Some(None) means no targetEntityId for this event
+    *   - Some(Some(x)) means targetEntityId should match x.
+    * @param startTime return events with eventTime >= startTime
+    * @param untilTime return events with eventTime < untilTime
+    * @param limit Limit number of events. Get all events if None or Some(-1)
+    * @param latest Return latest event first (default true)
+    * @return Iterator[Event]
+    */
+  def findByEntity(
+    appName: String,
+    entityType: String,
+    entityId: String,
+    channelName: Option[String] = None,
+    eventNames: Option[Seq[String]] = None,
+    targetEntityType: Option[Option[String]] = None,
+    targetEntityId: Option[Option[String]] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    limit: Option[Int] = None,
+    latest: Boolean = true,
+    timeout: Duration = defaultTimeout): Iterator[Event] = {
+
+    val (appId, channelId) = Common.appNameToId(appName, channelName)
+
+    Await.result(eventsDb.futureFind(
+      appId = appId,
+      channelId = channelId,
+      startTime = startTime,
+      untilTime = untilTime,
+      entityType = Some(entityType),
+      entityId = Some(entityId),
+      eventNames = eventNames,
+      targetEntityType = targetEntityType,
+      targetEntityId = targetEntityId,
+      limit = limit,
+      reversed = Some(latest)),
+      timeout)
+  }
+
+  /** Reads events generically. If entityType or entityId is not specified, it
+    * results in table scan.
+    *
+    * @param appName return events of this app
+    * @param entityType return events of this entityType
+    *   - None means no restriction on entityType
+    *   - Some(x) means entityType should match x.
+    * @param entityId return events of this entityId
+    *   - None means no restriction on entityId
+    *   - Some(x) means entityId should match x.
+    * @param channelName return events of this channel (default channel if it's None)
+    * @param eventNames return events with any of these event names.
+    * @param targetEntityType return events of this targetEntityType:
+    *   - None means no restriction on targetEntityType
+    *   - Some(None) means no targetEntityType for this event
+    *   - Some(Some(x)) means targetEntityType should match x.
+    * @param targetEntityId return events of this targetEntityId
+    *   - None means no restriction on targetEntityId
+    *   - Some(None) means no targetEntityId for this event
+    *   - Some(Some(x)) means targetEntityId should match x.
+    * @param startTime return events with eventTime >= startTime
+    * @param untilTime return events with eventTime < untilTime
+    * @param limit Limit number of events. Get all events if None or Some(-1)
+    * @return Iterator[Event]
+    */
+  def find(
+    appName: String,
+    entityType: Option[String] = None,
+    entityId: Option[String] = None,
+    channelName: Option[String] = None,
+    eventNames: Option[Seq[String]] = None,
+    targetEntityType: Option[Option[String]] = None,
+    targetEntityId: Option[Option[String]] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    limit: Option[Int] = None,
+    timeout: Duration = defaultTimeout): Iterator[Event] = {
+
+    val (appId, channelId) = Common.appNameToId(appName, channelName)
+
+    Await.result(eventsDb.futureFind(
+      appId = appId,
+      channelId = channelId,
+      startTime = startTime,
+      untilTime = untilTime,
+      entityType = entityType,
+      entityId = entityId,
+      eventNames = eventNames,
+      targetEntityType = targetEntityType,
+      targetEntityId = targetEntityId,
+      limit = limit), timeout)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/store/PEventStore.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/store/PEventStore.scala b/data/src/main/scala/org/apache/predictionio/data/store/PEventStore.scala
new file mode 100644
index 0000000..b8f0037
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/store/PEventStore.scala
@@ -0,0 +1,116 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.store
+
+import org.apache.predictionio.data.storage.Storage
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.PropertyMap
+
+import org.joda.time.DateTime
+
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+/** This object provides a set of operation to access Event Store
+  * with Spark's parallelization
+  */
+object PEventStore {
+
+  @transient lazy private val eventsDb = Storage.getPEvents()
+
+  /** Read events from Event Store
+    *
+    * @param appName return events of this app
+    * @param channelName return events of this channel (default channel if it's None)
+    * @param startTime return events with eventTime >= startTime
+    * @param untilTime return events with eventTime < untilTime
+    * @param entityType return events of this entityType
+    * @param entityId return events of this entityId
+    * @param eventNames return events with any of these event names.
+    * @param targetEntityType return events of this targetEntityType:
+    *   - None means no restriction on targetEntityType
+    *   - Some(None) means no targetEntityType for this event
+    *   - Some(Some(x)) means targetEntityType should match x.
+    * @param targetEntityId return events of this targetEntityId
+    *   - None means no restriction on targetEntityId
+    *   - Some(None) means no targetEntityId for this event
+    *   - Some(Some(x)) means targetEntityId should match x.
+    * @param sc Spark context
+    * @return RDD[Event]
+    */
+  def find(
+    appName: String,
+    channelName: Option[String] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    entityType: Option[String] = None,
+    entityId: Option[String] = None,
+    eventNames: Option[Seq[String]] = None,
+    targetEntityType: Option[Option[String]] = None,
+    targetEntityId: Option[Option[String]] = None
+  )(sc: SparkContext): RDD[Event] = {
+
+    val (appId, channelId) = Common.appNameToId(appName, channelName)
+
+    eventsDb.find(
+      appId = appId,
+      channelId = channelId,
+      startTime = startTime,
+      untilTime = untilTime,
+      entityType = entityType,
+      entityId = entityId,
+      eventNames = eventNames,
+      targetEntityType = targetEntityType,
+      targetEntityId = targetEntityId
+    )(sc)
+
+  }
+
+  /** Aggregate properties of entities based on these special events:
+    * \$set, \$unset, \$delete events.
+    *
+    * @param appName use events of this app
+    * @param entityType aggregate properties of the entities of this entityType
+    * @param channelName use events of this channel (default channel if it's None)
+    * @param startTime use events with eventTime >= startTime
+    * @param untilTime use events with eventTime < untilTime
+    * @param required only keep entities with these required properties defined
+    * @param sc Spark context
+    * @return RDD[(String, PropertyMap)] RDD of entityId and PropetyMap pair
+    */
+  def aggregateProperties(
+    appName: String,
+    entityType: String,
+    channelName: Option[String] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    required: Option[Seq[String]] = None)
+    (sc: SparkContext): RDD[(String, PropertyMap)] = {
+
+      val (appId, channelId) = Common.appNameToId(appName, channelName)
+
+      eventsDb.aggregateProperties(
+        appId = appId,
+        entityType = entityType,
+        channelId = channelId,
+        startTime = startTime,
+        untilTime = untilTime,
+        required = required
+      )(sc)
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/store/java/LJavaEventStore.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/store/java/LJavaEventStore.scala b/data/src/main/scala/org/apache/predictionio/data/store/java/LJavaEventStore.scala
new file mode 100644
index 0000000..fa14daf
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/store/java/LJavaEventStore.scala
@@ -0,0 +1,142 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.store.java
+
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.store.LEventStore
+import org.joda.time.DateTime
+
+import scala.collection.JavaConversions
+import scala.concurrent.duration.Duration
+
+/** This Java-friendly object provides a set of operation to access Event Store
+  * without going through Spark's parallelization
+  */
+object LJavaEventStore {
+
+  /** Reads events of the specified entity. May use this in Algorithm's predict()
+    * or Serving logic to have fast event store access.
+    *
+    * @param appName return events of this app
+    * @param entityType return events of this entityType
+    * @param entityId return events of this entityId
+    * @param channelName return events of this channel (default channel if it's None)
+    * @param eventNames return events with any of these event names.
+    * @param targetEntityType return events of this targetEntityType:
+    *   - None means no restriction on targetEntityType
+    *   - Some(None) means no targetEntityType for this event
+    *   - Some(Some(x)) means targetEntityType should match x.
+    * @param targetEntityId return events of this targetEntityId
+    *   - None means no restriction on targetEntityId
+    *   - Some(None) means no targetEntityId for this event
+    *   - Some(Some(x)) means targetEntityId should match x.
+    * @param startTime return events with eventTime >= startTime
+    * @param untilTime return events with eventTime < untilTime
+    * @param limit Limit number of events. Get all events if None or Some(-1)
+    * @param latest Return latest event first
+    * @return java.util.List[Event]
+    */
+  def findByEntity(
+    appName: String,
+    entityType: String,
+    entityId: String,
+    channelName: Option[String],
+    eventNames: Option[java.util.List[String]],
+    targetEntityType: Option[Option[String]],
+    targetEntityId: Option[Option[String]],
+    startTime: Option[DateTime],
+    untilTime: Option[DateTime],
+    limit: Option[Integer],
+    latest: Boolean,
+    timeout: Duration): java.util.List[Event] = {
+
+    val eventNamesSeq = eventNames.map(JavaConversions.asScalaBuffer(_).toSeq)
+    val limitInt = limit.map(_.intValue())
+
+    JavaConversions.seqAsJavaList(
+      LEventStore.findByEntity(
+        appName,
+        entityType,
+        entityId,
+        channelName,
+        eventNamesSeq,
+        targetEntityType,
+        targetEntityId,
+        startTime,
+        untilTime,
+        limitInt,
+        latest,
+        timeout
+      ).toSeq)
+  }
+
+  /** Reads events generically. If entityType or entityId is not specified, it
+    * results in table scan.
+    *
+    * @param appName return events of this app
+    * @param entityType return events of this entityType
+    *   - None means no restriction on entityType
+    *   - Some(x) means entityType should match x.
+    * @param entityId return events of this entityId
+    *   - None means no restriction on entityId
+    *   - Some(x) means entityId should match x.
+    * @param channelName return events of this channel (default channel if it's None)
+    * @param eventNames return events with any of these event names.
+    * @param targetEntityType return events of this targetEntityType:
+    *   - None means no restriction on targetEntityType
+    *   - Some(None) means no targetEntityType for this event
+    *   - Some(Some(x)) means targetEntityType should match x.
+    * @param targetEntityId return events of this targetEntityId
+    *   - None means no restriction on targetEntityId
+    *   - Some(None) means no targetEntityId for this event
+    *   - Some(Some(x)) means targetEntityId should match x.
+    * @param startTime return events with eventTime >= startTime
+    * @param untilTime return events with eventTime < untilTime
+    * @param limit Limit number of events. Get all events if None or Some(-1)
+    * @return java.util.List[Event]
+    */
+  def find(
+    appName: String,
+    entityType: Option[String],
+    entityId: Option[String],
+    channelName: Option[String],
+    eventNames: Option[java.util.List[String]],
+    targetEntityType: Option[Option[String]],
+    targetEntityId: Option[Option[String]],
+    startTime: Option[DateTime],
+    untilTime: Option[DateTime],
+    limit: Option[Integer],
+    timeout: Duration): java.util.List[Event] = {
+
+    val eventNamesSeq = eventNames.map(JavaConversions.asScalaBuffer(_).toSeq)
+    val limitInt = limit.map(_.intValue())
+
+    JavaConversions.seqAsJavaList(
+      LEventStore.find(
+        appName,
+        entityType,
+        entityId,
+        channelName,
+        eventNamesSeq,
+        targetEntityType,
+        targetEntityId,
+        startTime,
+        untilTime,
+        limitInt,
+        timeout
+      ).toSeq)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/store/java/OptionHelper.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/store/java/OptionHelper.scala b/data/src/main/scala/org/apache/predictionio/data/store/java/OptionHelper.scala
new file mode 100644
index 0000000..b6d174b
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/store/java/OptionHelper.scala
@@ -0,0 +1,29 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.store.java
+
+/** Used by Java-based engines to mock Some and None */
+object OptionHelper {
+  /** Mimics a None from Java-based engine */
+  def none[T]: Option[T] = {
+    Option(null.asInstanceOf[T])
+  }
+
+  /** Mimics a Some from Java-based engine */
+  def some[T](value: T): Option[T] = {
+    Some(value)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/store/java/PJavaEventStore.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/store/java/PJavaEventStore.scala b/data/src/main/scala/org/apache/predictionio/data/store/java/PJavaEventStore.scala
new file mode 100644
index 0000000..c47032c
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/store/java/PJavaEventStore.scala
@@ -0,0 +1,109 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.store.java
+
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.PropertyMap
+import org.apache.predictionio.data.store.PEventStore
+import org.apache.spark.SparkContext
+import org.apache.spark.api.java.JavaRDD
+import org.joda.time.DateTime
+
+import scala.collection.JavaConversions
+
+/** This Java-friendly object provides a set of operation to access Event Store
+  * with Spark's parallelization
+  */
+object PJavaEventStore {
+
+  /** Read events from Event Store
+    *
+    * @param appName return events of this app
+    * @param channelName return events of this channel (default channel if it's None)
+    * @param startTime return events with eventTime >= startTime
+    * @param untilTime return events with eventTime < untilTime
+    * @param entityType return events of this entityType
+    * @param entityId return events of this entityId
+    * @param eventNames return events with any of these event names.
+    * @param targetEntityType return events of this targetEntityType:
+    *   - None means no restriction on targetEntityType
+    *   - Some(None) means no targetEntityType for this event
+    *   - Some(Some(x)) means targetEntityType should match x.
+    * @param targetEntityId return events of this targetEntityId
+    *   - None means no restriction on targetEntityId
+    *   - Some(None) means no targetEntityId for this event
+    *   - Some(Some(x)) means targetEntityId should match x.
+    * @param sc Spark context
+    * @return JavaRDD[Event]
+    */
+  def find(
+    appName: String,
+    channelName: Option[String],
+    startTime: Option[DateTime],
+    untilTime: Option[DateTime],
+    entityType: Option[String],
+    entityId: Option[String],
+    eventNames: Option[java.util.List[String]],
+    targetEntityType: Option[Option[String]],
+    targetEntityId: Option[Option[String]],
+    sc: SparkContext): JavaRDD[Event] = {
+
+    val eventNamesSeq = eventNames.map(JavaConversions.asScalaBuffer(_).toSeq)
+
+    PEventStore.find(
+      appName,
+      channelName,
+      startTime,
+      untilTime,
+      entityType,
+      entityId,
+      eventNamesSeq,
+      targetEntityType,
+      targetEntityId
+    )(sc)
+  }
+
+  /** Aggregate properties of entities based on these special events:
+    * \$set, \$unset, \$delete events.
+    *
+    * @param appName use events of this app
+    * @param entityType aggregate properties of the entities of this entityType
+    * @param channelName use events of this channel (default channel if it's None)
+    * @param startTime use events with eventTime >= startTime
+    * @param untilTime use events with eventTime < untilTime
+    * @param required only keep entities with these required properties defined
+    * @param sc Spark context
+    * @return JavaRDD[(String, PropertyMap)] JavaRDD of entityId and PropetyMap pair
+    */
+  def aggregateProperties(
+    appName: String,
+    entityType: String,
+    channelName: Option[String],
+    startTime: Option[DateTime],
+    untilTime: Option[DateTime],
+    required: Option[java.util.List[String]],
+    sc: SparkContext): JavaRDD[(String, PropertyMap)] = {
+
+    PEventStore.aggregateProperties(
+      appName,
+    entityType,
+    channelName,
+    startTime,
+    untilTime
+    )(sc)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/store/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/store/package.scala b/data/src/main/scala/org/apache/predictionio/data/store/package.scala
new file mode 100644
index 0000000..36c592f
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/store/package.scala
@@ -0,0 +1,21 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data
+
+/** Provides high level interfaces to the Event Store from within a prediction
+  * engine.
+  */
+package object store {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/view/DataView.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/view/DataView.scala b/data/src/main/scala/org/apache/predictionio/data/view/DataView.scala
new file mode 100644
index 0000000..31937d5
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/view/DataView.scala
@@ -0,0 +1,110 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.view
+
+import org.apache.predictionio.annotation.Experimental
+import org.apache.predictionio.data.storage.Event
+
+import grizzled.slf4j.Logger
+import org.apache.predictionio.data.store.PEventStore
+
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.DataFrame
+import org.apache.spark.sql.SQLContext
+import org.joda.time.DateTime
+
+import scala.reflect.ClassTag
+import scala.reflect.runtime.universe._
+import scala.util.hashing.MurmurHash3
+
+/**
+ * :: Experimental ::
+ */
+@Experimental
+object DataView {
+  /**
+    * :: Experimental ::
+    *
+    * Create a DataFrame from events of a specified app.
+    *
+    * @param appName return events of this app
+    * @param channelName use events of this channel (default channel if it's None)
+    * @param startTime return events with eventTime >= startTime
+    * @param untilTime return events with eventTime < untilTime
+    * @param conversionFunction a function that turns raw Events into events of interest.
+    *                           If conversionFunction returns None, such events are dropped.
+    * @param name identify the DataFrame created
+    * @param version used to track changes to the conversionFunction, e.g. version = "20150413"
+    *                and update whenever the function is changed.
+    * @param sqlContext SQL context
+    * @tparam E the output type of the conversion function. The type needs to extend Product
+    *           (e.g. case class)
+    * @return a DataFrame of events
+    */
+  @Experimental
+  def create[E <: Product: TypeTag: ClassTag](
+    appName: String,
+    channelName: Option[String] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    conversionFunction: Event => Option[E],
+    name: String = "",
+    version: String = "")(sqlContext: SQLContext): DataFrame = {
+
+    @transient lazy val logger = Logger[this.type]
+
+    val sc = sqlContext.sparkContext
+
+    val beginTime = startTime match {
+      case Some(t) => t
+      case None => new DateTime(0L)
+    }
+    val endTime = untilTime match {
+      case Some(t) => t
+      case None => DateTime.now() // fix the current time
+    }
+    // detect changes to the case class
+    val uid = java.io.ObjectStreamClass.lookup(implicitly[reflect.ClassTag[E]].runtimeClass)
+        .getSerialVersionUID
+    val hash = MurmurHash3.stringHash(s"$beginTime-$endTime-$version-$uid")
+    val baseDir = s"${sys.env("PIO_FS_BASEDIR")}/view"
+    val fileName = s"$baseDir/$name-$appName-$hash.parquet"
+    try {
+      sqlContext.parquetFile(fileName)
+    } catch {
+      case e: java.io.FileNotFoundException =>
+        logger.info("Cached copy not found, reading from DB.")
+        // if cached copy is found, use it. If not, grab from Storage
+        val result: RDD[E] = PEventStore.find(
+            appName = appName,
+            channelName = channelName,
+            startTime = startTime,
+            untilTime = Some(endTime))(sc)
+          .flatMap((e) => conversionFunction(e))
+        import sqlContext.implicits._ // needed for RDD.toDF()
+        val resultDF = result.toDF()
+
+        resultDF.saveAsParquetFile(fileName)
+        sqlContext.parquetFile(fileName)
+      case e: java.lang.RuntimeException =>
+        if (e.toString.contains("is not a Parquet file")) {
+          logger.error(s"$fileName does not contain a valid Parquet file. " +
+            "Please delete it and try again.")
+        }
+        throw e
+    }
+  }
+}



[17/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/org/apache/predictionio/workflow/EvaluationWorkflowTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/predictionio/workflow/EvaluationWorkflowTest.scala b/core/src/test/scala/org/apache/predictionio/workflow/EvaluationWorkflowTest.scala
new file mode 100644
index 0000000..2d5939f
--- /dev/null
+++ b/core/src/test/scala/org/apache/predictionio/workflow/EvaluationWorkflowTest.scala
@@ -0,0 +1,61 @@
+package org.apache.predictionio.workflow
+
+import org.apache.predictionio.controller._
+
+import org.scalatest.FunSuite
+import org.scalatest.Matchers._
+
+class EvaluationWorkflowSuite extends FunSuite with SharedSparkContext {
+
+  test("Evaluation return best engine params, simple result type: Double") {
+    val engine = new Engine1()
+    val ep0 = EngineParams(dataSourceParams = Engine1.DSP(0.2))
+    val ep1 = EngineParams(dataSourceParams = Engine1.DSP(0.3))
+    val ep2 = EngineParams(dataSourceParams = Engine1.DSP(0.3))
+    val ep3 = EngineParams(dataSourceParams = Engine1.DSP(-0.2))
+    val engineParamsList = Seq(ep0, ep1, ep2, ep3)
+
+    val evaluator = MetricEvaluator(new Metric0())
+  
+    object Eval extends Evaluation {
+      engineEvaluator = (new Engine1(), MetricEvaluator(new Metric0()))
+    }
+
+    val result = EvaluationWorkflow.runEvaluation(
+      sc,
+      Eval,
+      engine,
+      engineParamsList,
+      evaluator,
+      WorkflowParams())
+
+    result.bestScore.score shouldBe 0.3
+    result.bestEngineParams shouldBe ep1
+  }
+
+  test("Evaluation return best engine params, complex result type") {
+    val engine = new Engine1()
+    val ep0 = EngineParams(dataSourceParams = Engine1.DSP(0.2))
+    val ep1 = EngineParams(dataSourceParams = Engine1.DSP(0.3))
+    val ep2 = EngineParams(dataSourceParams = Engine1.DSP(0.3))
+    val ep3 = EngineParams(dataSourceParams = Engine1.DSP(-0.2))
+    val engineParamsList = Seq(ep0, ep1, ep2, ep3)
+
+    val evaluator = MetricEvaluator(new Metric1())
+    
+    object Eval extends Evaluation {
+      engineEvaluator = (new Engine1(), MetricEvaluator(new Metric1()))
+    }
+
+    val result = EvaluationWorkflow.runEvaluation(
+      sc,
+      Eval,
+      engine,
+      engineParamsList,
+      evaluator,
+      WorkflowParams())
+  
+    result.bestScore.score shouldBe Metric1.Result(0, 0.3)
+    result.bestEngineParams shouldBe ep1
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/org/apache/predictionio/workflow/JsonExtractorSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/predictionio/workflow/JsonExtractorSuite.scala b/core/src/test/scala/org/apache/predictionio/workflow/JsonExtractorSuite.scala
new file mode 100644
index 0000000..217f416
--- /dev/null
+++ b/core/src/test/scala/org/apache/predictionio/workflow/JsonExtractorSuite.scala
@@ -0,0 +1,383 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+import org.apache.predictionio.controller.EngineParams
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.Utils
+import org.json4s.CustomSerializer
+import org.json4s.JsonAST.JField
+import org.json4s.JsonAST.JObject
+import org.json4s.JsonAST.JString
+import org.json4s.MappingException
+import org.json4s.native.JsonMethods.compact
+import org.json4s.native.JsonMethods.render
+import org.scalatest.FunSuite
+import org.scalatest.Matchers
+
+class JsonExtractorSuite extends FunSuite with Matchers {
+
+  test("Extract Scala object using option Json4sNative works with optional and default value " +
+    "provided") {
+
+    val json = """{"string": "query string", "optional": "optional string", "default": "d"}"""
+
+    val query = JsonExtractor.extract(
+      JsonExtractorOption.Json4sNative,
+      json,
+      classOf[ScalaQuery])
+
+    query should be (ScalaQuery("query string", Some("optional string"), "d"))
+  }
+
+  test("Extract Scala object using option Json4sNative works with no optional and no default " +
+    "value provided") {
+
+    val json = """{"string": "query string"}"""
+
+    val query = JsonExtractor.extract(
+      JsonExtractorOption.Json4sNative,
+      json,
+      classOf[ScalaQuery])
+
+    query should be (ScalaQuery("query string", None, "default"))
+  }
+
+  test("Extract Scala object using option Json4sNative works with null optional and null default" +
+    " value") {
+
+    val json = """{"string": "query string", "optional": null, "default": null}"""
+
+    val query = JsonExtractor.extract(
+      JsonExtractorOption.Json4sNative,
+      json,
+      classOf[ScalaQuery])
+
+    query should be (ScalaQuery("query string", None, "default"))
+  }
+
+  test("Extract Scala object using option Both works with optional and default value provided") {
+
+    val json = """{"string": "query string", "optional": "optional string", "default": "d"}"""
+
+    val query = JsonExtractor.extract(
+      JsonExtractorOption.Json4sNative,
+      json,
+      classOf[ScalaQuery])
+
+    query should be (ScalaQuery("query string", Some("optional string"), "d"))
+  }
+
+  test("Extract Scala object using option Both works with no optional and no default value " +
+    "provided") {
+
+    val json = """{"string": "query string"}"""
+
+    val query = JsonExtractor.extract(
+      JsonExtractorOption.Json4sNative,
+      json,
+      classOf[ScalaQuery])
+
+    query should be (ScalaQuery("query string", None, "default"))
+  }
+
+  test("Extract Scala object using option Both works with null optional and null default value") {
+
+    val json = """{"string": "query string", "optional": null, "default": null}"""
+
+    val query = JsonExtractor.extract(
+      JsonExtractorOption.Json4sNative,
+      json,
+      classOf[ScalaQuery])
+
+    query should be (ScalaQuery("query string", None, "default"))
+  }
+
+  test("Extract Scala object using option Gson should not get default value and optional none" +
+    " value") {
+
+    val json = """{"string": "query string"}"""
+    val query = JsonExtractor.extract(
+      JsonExtractorOption.Gson,
+      json,
+      classOf[ScalaQuery])
+
+    query should be (ScalaQuery("query string", null, null))
+  }
+
+  test("Extract Scala object using option Gson should throw an exception with optional " +
+    "value provided") {
+
+    val json = """{"string": "query string", "optional": "o", "default": "d"}"""
+    intercept[RuntimeException] {
+      JsonExtractor.extract(
+        JsonExtractorOption.Gson,
+        json,
+        classOf[ScalaQuery])
+    }
+  }
+
+  test("Extract Java object using option Gson works") {
+
+    val json = """{"q": "query string"}"""
+
+    val query = JsonExtractor.extract(
+      JsonExtractorOption.Gson,
+      json,
+      classOf[JavaQuery])
+
+    query should be (new JavaQuery("query string"))
+  }
+
+  test("Extract Java object using option Both works") {
+
+    val json = """{"q": "query string"}"""
+
+    val query = JsonExtractor.extract(
+      JsonExtractorOption.Both,
+      json,
+      classOf[JavaQuery])
+
+    query should be (new JavaQuery("query string"))
+  }
+
+  test("Extract Java object using option Json4sNative should throw an exception") {
+
+    val json = """{"q": "query string"}"""
+
+    intercept[MappingException] {
+      JsonExtractor.extract(
+        JsonExtractorOption.Json4sNative,
+        json,
+        classOf[JavaQuery])
+    }
+  }
+
+  test("Extract Scala object using option Json4sNative with custom deserializer") {
+    val json = """{"string": "query string", "optional": "o", "default": "d"}"""
+
+    val query = JsonExtractor.extract(
+      JsonExtractorOption.Json4sNative,
+      json,
+      classOf[ScalaQuery],
+      Utils.json4sDefaultFormats + new UpperCaseFormat
+    )
+
+    query should be(ScalaQuery("QUERY STRING", Some("O"), "D"))
+  }
+
+  test("Extract Java object usingoption Gson with custom deserializer") {
+    val json = """{"q": "query string"}"""
+
+    val query = JsonExtractor.extract(
+      extractorOption = JsonExtractorOption.Gson,
+      json = json,
+      clazz = classOf[JavaQuery],
+      gsonTypeAdapterFactories = Seq(new JavaQueryTypeAdapterFactory)
+    )
+
+    query should be(new JavaQuery("QUERY STRING"))
+  }
+
+  test("Java object to JValue using option Both works") {
+    val query = new JavaQuery("query string")
+    val jValue = JsonExtractor.toJValue(JsonExtractorOption.Both, query)
+
+    compact(render(jValue)) should be ("""{"q":"query string"}""")
+  }
+
+  test("Java object to JValue using option Gson works") {
+    val query = new JavaQuery("query string")
+    val jValue = JsonExtractor.toJValue(JsonExtractorOption.Gson, query)
+
+    compact(render(jValue)) should be ("""{"q":"query string"}""")
+  }
+
+  test("Java object to JValue using option Json4sNative results in empty Json") {
+    val query = new JavaQuery("query string")
+    val jValue = JsonExtractor.toJValue(JsonExtractorOption.Json4sNative, query)
+
+    compact(render(jValue)) should be ("""{}""")
+  }
+
+  test("Scala object to JValue using option Both works") {
+    val query = new ScalaQuery("query string", Some("option"))
+    val jValue = JsonExtractor.toJValue(JsonExtractorOption.Both, query)
+
+    compact(render(jValue)) should
+      be ("""{"string":"query string","optional":"option","default":"default"}""")
+  }
+
+  test("Scala object to JValue using option Gson does not serialize optional") {
+    val query = new ScalaQuery("query string", Some("option"))
+    val jValue = JsonExtractor.toJValue(JsonExtractorOption.Gson, query)
+
+    compact(render(jValue)) should
+      be ("""{"string":"query string","optional":{},"default":"default"}""")
+  }
+
+  test("Scala object to JValue using option Json4sNative works") {
+    val query = new ScalaQuery("query string", Some("option"))
+    val jValue = JsonExtractor.toJValue(JsonExtractorOption.Json4sNative, query)
+
+    compact(render(jValue)) should
+      be ("""{"string":"query string","optional":"option","default":"default"}""")
+  }
+
+  test("Scala object to JValue using option Json4sNative with custom serializer") {
+    val query = new ScalaQuery("query string", Some("option"))
+    val jValue = JsonExtractor.toJValue(
+      JsonExtractorOption.Json4sNative,
+      query,
+      Utils.json4sDefaultFormats + new UpperCaseFormat
+    )
+
+    compact(render(jValue)) should
+      be ("""{"string":"QUERY STRING","optional":"OPTION","default":"DEFAULT"}""")
+  }
+
+  test("Java object to JValue using option Gson with custom serializer") {
+    val query = new JavaQuery("query string")
+    val jValue = JsonExtractor.toJValue(
+      extractorOption = JsonExtractorOption.Gson,
+      o = query,
+      gsonTypeAdapterFactories = Seq(new JavaQueryTypeAdapterFactory)
+    )
+
+    compact(render(jValue)) should be ("""{"q":"QUERY STRING"}""")
+  }
+
+  test("Java Param to Json using option Both") {
+    val param = ("algo", new JavaParams("parameter"))
+    val json = JsonExtractor.paramToJson(JsonExtractorOption.Both, param)
+
+    json should be ("""{"algo":{"p":"parameter"}}""")
+  }
+
+  test("Java Param to Json using option Gson") {
+    val param = ("algo", new JavaParams("parameter"))
+    val json = JsonExtractor.paramToJson(JsonExtractorOption.Gson, param)
+
+    json should be ("""{"algo":{"p":"parameter"}}""")
+  }
+
+  test("Scala Param to Json using option Both") {
+    val param = ("algo", AlgorithmParams("parameter"))
+    val json = JsonExtractor.paramToJson(JsonExtractorOption.Both, param)
+
+    json should be ("""{"algo":{"a":"parameter"}}""")
+  }
+
+  test("Scala Param to Json using option Json4sNative") {
+    val param = ("algo", AlgorithmParams("parameter"))
+    val json = JsonExtractor.paramToJson(JsonExtractorOption.Json4sNative, param)
+
+    json should be ("""{"algo":{"a":"parameter"}}""")
+  }
+
+  test("Java Params to Json using option Both") {
+    val params = Seq(("algo", new JavaParams("parameter")), ("algo2", new JavaParams("parameter2")))
+    val json = JsonExtractor.paramsToJson(JsonExtractorOption.Both, params)
+
+    json should be ("""[{"algo":{"p":"parameter"}},{"algo2":{"p":"parameter2"}}]""")
+  }
+
+  test("Java Params to Json using option Gson") {
+    val params = Seq(("algo", new JavaParams("parameter")), ("algo2", new JavaParams("parameter2")))
+    val json = JsonExtractor.paramsToJson(JsonExtractorOption.Gson, params)
+
+    json should be ("""[{"algo":{"p":"parameter"}},{"algo2":{"p":"parameter2"}}]""")
+  }
+
+  test("Scala Params to Json using option Both") {
+    val params =
+      Seq(("algo", AlgorithmParams("parameter")), ("algo2", AlgorithmParams("parameter2")))
+    val json = JsonExtractor.paramsToJson(JsonExtractorOption.Both, params)
+
+    json should be (org.json4s.native.Serialization.write(params)(Utils.json4sDefaultFormats))
+  }
+
+  test("Scala Params to Json using option Json4sNative") {
+    val params =
+      Seq(("algo", AlgorithmParams("parameter")), ("algo2", AlgorithmParams("parameter2")))
+    val json = JsonExtractor.paramsToJson(JsonExtractorOption.Json4sNative, params)
+
+    json should be (org.json4s.native.Serialization.write(params)(Utils.json4sDefaultFormats))
+  }
+
+  test("Mixed Java and Scala Params to Json using option Both") {
+    val params =
+      Seq(("scala", AlgorithmParams("parameter")), ("java", new JavaParams("parameter2")))
+    val json = JsonExtractor.paramsToJson(JsonExtractorOption.Both, params)
+
+    json should be ("""[{"scala":{"a":"parameter"}},{"java":{"p":"parameter2"}}]""")
+  }
+
+  test("Serializing Scala EngineParams works using option Json4sNative") {
+    val ep = new EngineParams(
+      dataSourceParams = ("ds", DataSourceParams("dsp")),
+      algorithmParamsList = Seq(("a0", AlgorithmParams("ap"))))
+
+    val json = JsonExtractor.engineParamsToJson(JsonExtractorOption.Json4sNative, ep)
+
+    json should be (
+      """{"dataSourceParams":{"ds":{"a":"dsp"}},"preparatorParams":{"":{}},""" +
+        """"algorithmParamsList":[{"a0":{"a":"ap"}}],"servingParams":{"":{}}}""")
+  }
+
+  test("Serializing Java EngineParams works using option Gson") {
+    val ep = new EngineParams(
+      dataSourceParams = ("ds", new JavaParams("dsp")),
+      algorithmParamsList = Seq(("a0", new JavaParams("ap")), ("a1", new JavaParams("ap2"))))
+
+    val json = JsonExtractor.engineParamsToJson(JsonExtractorOption.Gson, ep)
+
+    json should be (
+      """{"dataSourceParams":{"ds":{"p":"dsp"}},"preparatorParams":{"":{}},""" +
+        """"algorithmParamsList":[{"a0":{"p":"ap"}},{"a1":{"p":"ap2"}}],"servingParams":{"":{}}}""")
+  }
+
+  test("Serializing Java EngineParams works using option Both") {
+    val ep = new EngineParams(
+      dataSourceParams = ("ds", new JavaParams("dsp")),
+      algorithmParamsList = Seq(("a0", new JavaParams("ap")), ("a1", new JavaParams("ap2"))))
+
+    val json = JsonExtractor.engineParamsToJson(JsonExtractorOption.Both, ep)
+
+    json should be (
+      """{"dataSourceParams":{"ds":{"p":"dsp"}},"preparatorParams":{"":{}},""" +
+        """"algorithmParamsList":[{"a0":{"p":"ap"}},{"a1":{"p":"ap2"}}],"servingParams":{"":{}}}""")
+  }
+}
+
+private case class AlgorithmParams(a: String) extends Params
+
+private case class DataSourceParams(a: String) extends Params
+
+private case class ScalaQuery(string: String, optional: Option[String], default: String = "default")
+
+private class UpperCaseFormat extends CustomSerializer[ScalaQuery](format => ( {
+  case JObject(JField("string", JString(string)) ::
+    JField("optional", JString(optional)) ::
+    JField("default", JString(default)) ::
+    Nil) => ScalaQuery(string.toUpperCase, Some(optional.toUpperCase), default.toUpperCase)
+}, {
+  case x: ScalaQuery =>
+    JObject(
+      JField("string", JString(x.string.toUpperCase)),
+      JField("optional", JString(x.optional.get.toUpperCase)),
+      JField("default", JString(x.default.toUpperCase)))
+}))
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/Utils.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/Utils.scala b/data/src/main/scala/io/prediction/data/Utils.scala
deleted file mode 100644
index 78b71cc..0000000
--- a/data/src/main/scala/io/prediction/data/Utils.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data
-
-import org.joda.time.DateTime
-import org.joda.time.format.ISODateTimeFormat
-
-import java.lang.IllegalArgumentException
-
-private[prediction] object Utils {
-
-  // use dateTime() for strict ISO8601 format
-  val dateTimeFormatter = ISODateTimeFormat.dateTime().withOffsetParsed()
-
-  val dateTimeNoMillisFormatter =
-    ISODateTimeFormat.dateTimeNoMillis().withOffsetParsed()
-
-  def stringToDateTime(dt: String): DateTime = {
-    // We accept two formats.
-    // 1. "yyyy-MM-dd'T'HH:mm:ss.SSSZZ"
-    // 2. "yyyy-MM-dd'T'HH:mm:ssZZ"
-    // The first one also takes milliseconds into account.
-    try {
-      // formatting for "yyyy-MM-dd'T'HH:mm:ss.SSSZZ"
-      dateTimeFormatter.parseDateTime(dt)
-    } catch {
-      case e: IllegalArgumentException => {
-        // handle when the datetime string doesn't specify milliseconds.
-        dateTimeNoMillisFormatter.parseDateTime(dt)
-      }
-    }
-  }
-
-  def dateTimeToString(dt: DateTime): String = dateTimeFormatter.print(dt)
-    // dt.toString
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/api/Common.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/api/Common.scala b/data/src/main/scala/io/prediction/data/api/Common.scala
deleted file mode 100644
index 6681a1d..0000000
--- a/data/src/main/scala/io/prediction/data/api/Common.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.api
-
-import io.prediction.data.webhooks.ConnectorException
-import io.prediction.data.storage.StorageException
-
-import spray.routing._
-import spray.routing.Directives._
-import spray.routing.Rejection
-import spray.http.StatusCodes
-import spray.http.StatusCode
-import spray.httpx.Json4sSupport
-
-import org.json4s.Formats
-import org.json4s.DefaultFormats
-
-object Common {
-
-  object Json4sProtocol extends Json4sSupport {
-    implicit def json4sFormats: Formats = DefaultFormats
-  }
-
-  import Json4sProtocol._
-
-  val rejectionHandler = RejectionHandler {
-    case MalformedRequestContentRejection(msg, _) :: _ =>
-      complete(StatusCodes.BadRequest, Map("message" -> msg))
-    case MissingQueryParamRejection(msg) :: _ =>
-      complete(StatusCodes.NotFound,
-        Map("message" -> s"missing required query parameter ${msg}."))
-    case AuthenticationFailedRejection(cause, challengeHeaders) :: _ => {
-      val msg = cause match {
-        case AuthenticationFailedRejection.CredentialsRejected =>
-          "Invalid accessKey."
-        case AuthenticationFailedRejection.CredentialsMissing =>
-          "Missing accessKey."
-      }
-      complete(StatusCodes.Unauthorized, challengeHeaders, Map("message" -> msg))
-    }
-    case ChannelRejection(msg) :: _ =>
-      complete(StatusCodes.Unauthorized, Map("message" -> msg))
-    case NonExistentAppRejection(msg) :: _ =>
-      complete(StatusCodes.Unauthorized, Map("message" -> msg))
-  }
-
-  val exceptionHandler = ExceptionHandler {
-    case e: ConnectorException => {
-      val msg = s"${e.getMessage()}"
-      complete(StatusCodes.BadRequest, Map("message" -> msg))
-    }
-    case e: StorageException => {
-      val msg = s"${e.getMessage()}"
-      complete(StatusCodes.InternalServerError, Map("message" -> msg))
-    }
-    case e: Exception => {
-      val msg = s"${e.getMessage()}"
-      complete(StatusCodes.InternalServerError, Map("message" -> msg))
-    }
-  }
-}
-
-/** invalid channel */
-case class ChannelRejection(msg: String) extends Rejection
-
-/** the app doesn't exist */
-case class NonExistentAppRejection(msg: String) extends Rejection

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/api/EventInfo.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/api/EventInfo.scala b/data/src/main/scala/io/prediction/data/api/EventInfo.scala
deleted file mode 100644
index 1e324c2..0000000
--- a/data/src/main/scala/io/prediction/data/api/EventInfo.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.api
-
-import io.prediction.data.storage.Event
-
-case class EventInfo(
-  appId: Int,
-  channelId: Option[Int],
-  event: Event)
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/api/EventServer.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/api/EventServer.scala b/data/src/main/scala/io/prediction/data/api/EventServer.scala
deleted file mode 100644
index 139f964..0000000
--- a/data/src/main/scala/io/prediction/data/api/EventServer.scala
+++ /dev/null
@@ -1,640 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.api
-
-import akka.event.Logging
-import sun.misc.BASE64Decoder
-
-import java.util.concurrent.TimeUnit
-
-import akka.actor._
-import akka.io.IO
-import akka.pattern.ask
-import akka.util.Timeout
-import io.prediction.data.Utils
-import io.prediction.data.storage.AccessKeys
-import io.prediction.data.storage.Channels
-import io.prediction.data.storage.DateTimeJson4sSupport
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.EventJson4sSupport
-import io.prediction.data.storage.BatchEventsJson4sSupport
-import io.prediction.data.storage.LEvents
-import io.prediction.data.storage.Storage
-import org.json4s.DefaultFormats
-import org.json4s.Formats
-import org.json4s.JObject
-import org.json4s.native.JsonMethods.parse
-import spray.can.Http
-import spray.http.FormData
-import spray.http.MediaTypes
-import spray.http.StatusCodes
-import spray.httpx.Json4sSupport
-import spray.routing._
-import spray.routing.authentication.Authentication
-
-import scala.concurrent.ExecutionContext
-import scala.concurrent.Future
-import scala.util.{Try, Success, Failure}
-
-class  EventServiceActor(
-    val eventClient: LEvents,
-    val accessKeysClient: AccessKeys,
-    val channelsClient: Channels,
-    val config: EventServerConfig) extends HttpServiceActor {
-
-  object Json4sProtocol extends Json4sSupport {
-    implicit def json4sFormats: Formats = DefaultFormats +
-      new EventJson4sSupport.APISerializer +
-      new BatchEventsJson4sSupport.APISerializer +
-      // NOTE: don't use Json4s JodaTimeSerializers since it has issues,
-      // some format not converted, or timezone not correct
-      new DateTimeJson4sSupport.Serializer
-  }
-
-
-  val MaxNumberOfEventsPerBatchRequest = 50
-
-  val logger = Logging(context.system, this)
-
-  // we use the enclosing ActorContext's or ActorSystem's dispatcher for our
-  // Futures
-  implicit def executionContext: ExecutionContext = context.dispatcher
-
-  implicit val timeout = Timeout(5, TimeUnit.SECONDS)
-
-  val rejectionHandler = Common.rejectionHandler
-
-  val jsonPath = """(.+)\.json$""".r
-  val formPath = """(.+)\.form$""".r
-
-  val pluginContext = EventServerPluginContext(logger)
-
-  private lazy val base64Decoder = new BASE64Decoder
-
-  case class AuthData(appId: Int, channelId: Option[Int], events: Seq[String])
-
-  /* with accessKey in query/header, return appId if succeed */
-  def withAccessKey: RequestContext => Future[Authentication[AuthData]] = {
-    ctx: RequestContext =>
-      val accessKeyParamOpt = ctx.request.uri.query.get("accessKey")
-      val channelParamOpt = ctx.request.uri.query.get("channel")
-      Future {
-        // with accessKey in query, return appId if succeed
-        accessKeyParamOpt.map { accessKeyParam =>
-          accessKeysClient.get(accessKeyParam).map { k =>
-            channelParamOpt.map { ch =>
-              val channelMap =
-                channelsClient.getByAppid(k.appid)
-                .map(c => (c.name, c.id)).toMap
-              if (channelMap.contains(ch)) {
-                Right(AuthData(k.appid, Some(channelMap(ch)), k.events))
-              } else {
-                Left(ChannelRejection(s"Invalid channel '$ch'."))
-              }
-            }.getOrElse{
-              Right(AuthData(k.appid, None, k.events))
-            }
-          }.getOrElse(FailedAuth)
-        }.getOrElse {
-          // with accessKey in header, return appId if succeed
-          ctx.request.headers.find(_.name == "Authorization").map { authHeader \u21d2
-            authHeader.value.split("Basic ") match {
-              case Array(_, value) \u21d2
-                val appAccessKey =
-                  new String(base64Decoder.decodeBuffer(value)).trim.split(":")(0)
-                accessKeysClient.get(appAccessKey) match {
-                  case Some(k) \u21d2 Right(AuthData(k.appid, None, k.events))
-                  case None \u21d2 FailedAuth
-                }
-
-              case _ \u21d2 FailedAuth
-            }
-          }.getOrElse(MissedAuth)
-        }
-      }
-  }
-
-  private val FailedAuth = Left(
-    AuthenticationFailedRejection(
-      AuthenticationFailedRejection.CredentialsRejected, List()
-    )
-  )
-
-  private val MissedAuth = Left(
-    AuthenticationFailedRejection(
-      AuthenticationFailedRejection.CredentialsMissing, List()
-    )
-  )
-
-  lazy val statsActorRef = actorRefFactory.actorSelection("/user/StatsActor")
-  lazy val pluginsActorRef = actorRefFactory.actorSelection("/user/PluginsActor")
-
-  val route: Route =
-    pathSingleSlash {
-      import Json4sProtocol._
-
-      get {
-        respondWithMediaType(MediaTypes.`application/json`) {
-          complete(Map("status" -> "alive"))
-        }
-      }
-    } ~
-    path("plugins.json") {
-      import Json4sProtocol._
-      get {
-        respondWithMediaType(MediaTypes.`application/json`) {
-          complete {
-            Map("plugins" -> Map(
-              "inputblockers" -> pluginContext.inputBlockers.map { case (n, p) =>
-                n -> Map(
-                  "name" -> p.pluginName,
-                  "description" -> p.pluginDescription,
-                  "class" -> p.getClass.getName)
-              },
-              "inputsniffers" -> pluginContext.inputSniffers.map { case (n, p) =>
-                n -> Map(
-                  "name" -> p.pluginName,
-                  "description" -> p.pluginDescription,
-                  "class" -> p.getClass.getName)
-              }
-            ))
-          }
-        }
-      }
-    } ~
-    path("plugins" / Segments) { segments =>
-      get {
-        handleExceptions(Common.exceptionHandler) {
-          authenticate(withAccessKey) { authData =>
-            respondWithMediaType(MediaTypes.`application/json`) {
-              complete {
-                val pluginArgs = segments.drop(2)
-                val pluginType = segments(0)
-                val pluginName = segments(1)
-                pluginType match {
-                  case EventServerPlugin.inputBlocker =>
-                    pluginContext.inputBlockers(pluginName).handleREST(
-                      authData.appId,
-                      authData.channelId,
-                      pluginArgs)
-                  case EventServerPlugin.inputSniffer =>
-                    pluginsActorRef ? PluginsActor.HandleREST(
-                      appId = authData.appId,
-                      channelId = authData.channelId,
-                      pluginName = pluginName,
-                      pluginArgs = pluginArgs) map {
-                      _.asInstanceOf[String]
-                    }
-                }
-              }
-            }
-          }
-        }
-      }
-    } ~
-    path("events" / jsonPath ) { eventId =>
-
-      import Json4sProtocol._
-
-      get {
-        handleExceptions(Common.exceptionHandler) {
-          handleRejections(rejectionHandler) {
-            authenticate(withAccessKey) { authData =>
-              val appId = authData.appId
-              val channelId = authData.channelId
-              respondWithMediaType(MediaTypes.`application/json`) {
-                complete {
-                  logger.debug(s"GET event ${eventId}.")
-                  val data = eventClient.futureGet(eventId, appId, channelId).map { eventOpt =>
-                    eventOpt.map( event =>
-                      (StatusCodes.OK, event)
-                    ).getOrElse(
-                      (StatusCodes.NotFound, Map("message" -> "Not Found"))
-                    )
-                  }
-                  data
-                }
-              }
-            }
-          }
-        }
-      } ~
-      delete {
-        handleExceptions(Common.exceptionHandler) {
-          handleRejections(rejectionHandler) {
-            authenticate(withAccessKey) { authData =>
-              val appId = authData.appId
-              val channelId = authData.channelId
-              respondWithMediaType(MediaTypes.`application/json`) {
-                complete {
-                  logger.debug(s"DELETE event ${eventId}.")
-                  val data = eventClient.futureDelete(eventId, appId, channelId).map { found =>
-                    if (found) {
-                      (StatusCodes.OK, Map("message" -> "Found"))
-                    } else {
-                      (StatusCodes.NotFound, Map("message" -> "Not Found"))
-                    }
-                  }
-                  data
-                }
-              }
-            }
-          }
-        }
-      }
-    } ~
-    path("events.json") {
-
-      import Json4sProtocol._
-
-      post {
-        handleExceptions(Common.exceptionHandler) {
-          handleRejections(rejectionHandler) {
-            authenticate(withAccessKey) { authData =>
-              val appId = authData.appId
-              val channelId = authData.channelId
-              val events = authData.events
-              entity(as[Event]) { event =>
-                complete {
-                  if (events.isEmpty || authData.events.contains(event.event)) {
-                    pluginContext.inputBlockers.values.foreach(
-                      _.process(EventInfo(
-                        appId = appId,
-                        channelId = channelId,
-                        event = event), pluginContext))
-                    val data = eventClient.futureInsert(event, appId, channelId).map { id =>
-                      pluginsActorRef ! EventInfo(
-                        appId = appId,
-                        channelId = channelId,
-                        event = event)
-                      val result = (StatusCodes.Created, Map("eventId" -> s"${id}"))
-                      if (config.stats) {
-                        statsActorRef ! Bookkeeping(appId, result._1, event)
-                      }
-                      result
-                    }
-                    data
-                  } else {
-                    (StatusCodes.Forbidden,
-                      Map("message" -> s"${event.event} events are not allowed"))
-                  }
-                }
-              }
-            }
-          }
-        }
-      } ~
-      get {
-        handleExceptions(Common.exceptionHandler) {
-          handleRejections(rejectionHandler) {
-            authenticate(withAccessKey) { authData =>
-              val appId = authData.appId
-              val channelId = authData.channelId
-              parameters(
-                'startTime.as[Option[String]],
-                'untilTime.as[Option[String]],
-                'entityType.as[Option[String]],
-                'entityId.as[Option[String]],
-                'event.as[Option[String]],
-                'targetEntityType.as[Option[String]],
-                'targetEntityId.as[Option[String]],
-                'limit.as[Option[Int]],
-                'reversed.as[Option[Boolean]]) {
-                (startTimeStr, untilTimeStr, entityType, entityId,
-                  eventName,  // only support one event name
-                  targetEntityType, targetEntityId,
-                  limit, reversed) =>
-                respondWithMediaType(MediaTypes.`application/json`) {
-                  complete {
-                    logger.debug(
-                      s"GET events of appId=${appId} " +
-                      s"st=${startTimeStr} ut=${untilTimeStr} " +
-                      s"et=${entityType} eid=${entityId} " +
-                      s"li=${limit} rev=${reversed} ")
-
-                    require(!((reversed == Some(true))
-                      && (entityType.isEmpty || entityId.isEmpty)),
-                      "the parameter reversed can only be used with" +
-                      " both entityType and entityId specified.")
-
-                    val parseTime = Future {
-                      val startTime = startTimeStr.map(Utils.stringToDateTime(_))
-                      val untilTime = untilTimeStr.map(Utils.stringToDateTime(_))
-                      (startTime, untilTime)
-                    }
-
-
-                    parseTime.flatMap { case (startTime, untilTime) =>
-                      val data = eventClient.futureFind(
-                        appId = appId,
-                        channelId = channelId,
-                        startTime = startTime,
-                        untilTime = untilTime,
-                        entityType = entityType,
-                        entityId = entityId,
-                        eventNames = eventName.map(List(_)),
-                        targetEntityType = targetEntityType.map(Some(_)),
-                        targetEntityId = targetEntityId.map(Some(_)),
-                        limit = limit.orElse(Some(20)),
-                        reversed = reversed)
-                        .map { eventIter =>
-                          if (eventIter.hasNext) {
-                            (StatusCodes.OK, eventIter.toArray)
-                          } else {
-                            (StatusCodes.NotFound,
-                              Map("message" -> "Not Found"))
-                          }
-                        }
-                      data
-                    }.recover {
-                      case e: Exception =>
-                        (StatusCodes.BadRequest, Map("message" -> s"${e}"))
-                    }
-                  }
-                }
-              }
-            }
-          }
-        }
-      }
-    } ~
-    path("batch" / "events.json") {
-
-      import Json4sProtocol._
-
-      post {
-        handleExceptions(Common.exceptionHandler) {
-          handleRejections(rejectionHandler) {
-            authenticate(withAccessKey) { authData =>
-              val appId = authData.appId
-              val channelId = authData.channelId
-              val allowedEvents = authData.events
-              val handleEvent: PartialFunction[Try[Event], Future[Map[String, Any]]] = {
-                case Success(event) => {
-                  if (allowedEvents.isEmpty || allowedEvents.contains(event.event)) {
-                    pluginContext.inputBlockers.values.foreach(
-                      _.process(EventInfo(
-                        appId = appId,
-                        channelId = channelId,
-                        event = event), pluginContext))
-                    val data = eventClient.futureInsert(event, appId, channelId).map { id =>
-                      pluginsActorRef ! EventInfo(
-                        appId = appId,
-                        channelId = channelId,
-                        event = event)
-                      val status = StatusCodes.Created
-                      val result = Map(
-                        "status" -> status.intValue,
-                        "eventId" -> s"${id}")
-                      if (config.stats) {
-                        statsActorRef ! Bookkeeping(appId, status, event)
-                      }
-                      result
-                    }.recover { case exception =>
-                      Map(
-                        "status" -> StatusCodes.InternalServerError.intValue,
-                        "message" -> s"${exception.getMessage()}")
-                    }
-                    data
-                  } else {
-                    Future.successful(Map(
-                      "status" -> StatusCodes.Forbidden.intValue,
-                      "message" -> s"${event.event} events are not allowed"))
-                  }
-                }
-                case Failure(exception) => {
-                  Future.successful(Map(
-                    "status" -> StatusCodes.BadRequest.intValue,
-                    "message" -> s"${exception.getMessage()}"))
-                }
-              }
-
-              entity(as[Seq[Try[Event]]]) { events =>
-                complete {
-                  if (events.length <= MaxNumberOfEventsPerBatchRequest) {
-                    Future.traverse(events)(handleEvent)
-                  } else {
-                    (StatusCodes.BadRequest,
-                      Map("message" -> (s"Batch request must have less than or equal to " +
-                        s"${MaxNumberOfEventsPerBatchRequest} events")))
-                  }
-                }
-              }
-            }
-          }
-        }
-      }
-    } ~
-    path("stats.json") {
-
-      import Json4sProtocol._
-
-      get {
-        handleExceptions(Common.exceptionHandler) {
-          handleRejections(rejectionHandler) {
-            authenticate(withAccessKey) { authData =>
-              val appId = authData.appId
-              respondWithMediaType(MediaTypes.`application/json`) {
-                if (config.stats) {
-                  complete {
-                    statsActorRef ? GetStats(appId) map {
-                      _.asInstanceOf[Map[String, StatsSnapshot]]
-                    }
-                  }
-                } else {
-                  complete(
-                    StatusCodes.NotFound,
-                    parse("""{"message": "To see stats, launch Event Server """ +
-                      """with --stats argument."}"""))
-                }
-              }
-            }
-          }
-        }
-      }  // stats.json get
-    } ~
-    path("webhooks" / jsonPath ) { web =>
-      import Json4sProtocol._
-
-      post {
-        handleExceptions(Common.exceptionHandler) {
-          handleRejections(rejectionHandler) {
-            authenticate(withAccessKey) { authData =>
-              val appId = authData.appId
-              val channelId = authData.channelId
-              respondWithMediaType(MediaTypes.`application/json`) {
-                entity(as[JObject]) { jObj =>
-                  complete {
-                    Webhooks.postJson(
-                      appId = appId,
-                      channelId = channelId,
-                      web = web,
-                      data = jObj,
-                      eventClient = eventClient,
-                      log = logger,
-                      stats = config.stats,
-                      statsActorRef = statsActorRef)
-                  }
-                }
-              }
-            }
-          }
-        }
-      } ~
-      get {
-        handleExceptions(Common.exceptionHandler) {
-          handleRejections(rejectionHandler) {
-            authenticate(withAccessKey) { authData =>
-              val appId = authData.appId
-              val channelId = authData.channelId
-              respondWithMediaType(MediaTypes.`application/json`) {
-                complete {
-                  Webhooks.getJson(
-                    appId = appId,
-                    channelId = channelId,
-                    web = web,
-                    log = logger)
-                }
-              }
-            }
-          }
-        }
-      }
-    } ~
-    path("webhooks" / formPath ) { web =>
-      post {
-        handleExceptions(Common.exceptionHandler) {
-          handleRejections(rejectionHandler) {
-            authenticate(withAccessKey) { authData =>
-              val appId = authData.appId
-              val channelId = authData.channelId
-              respondWithMediaType(MediaTypes.`application/json`) {
-                entity(as[FormData]){ formData =>
-                  // logger.debug(formData.toString)
-                  complete {
-                    // respond with JSON
-                    import Json4sProtocol._
-
-                    Webhooks.postForm(
-                      appId = appId,
-                      channelId = channelId,
-                      web = web,
-                      data = formData,
-                      eventClient = eventClient,
-                      log = logger,
-                      stats = config.stats,
-                      statsActorRef = statsActorRef)
-                  }
-                }
-              }
-            }
-          }
-        }
-      } ~
-      get {
-        handleExceptions(Common.exceptionHandler) {
-          handleRejections(rejectionHandler) {
-            authenticate(withAccessKey) { authData =>
-              val appId = authData.appId
-              val channelId = authData.channelId
-              respondWithMediaType(MediaTypes.`application/json`) {
-                complete {
-                  // respond with JSON
-                  import Json4sProtocol._
-
-                  Webhooks.getForm(
-                    appId = appId,
-                    channelId = channelId,
-                    web = web,
-                    log = logger)
-                }
-              }
-            }
-          }
-        }
-      }
-
-    }
-
-  def receive: Actor.Receive = runRoute(route)
-}
-
-
-
-/* message */
-case class StartServer(host: String, port: Int)
-
-class EventServerActor(
-    val eventClient: LEvents,
-    val accessKeysClient: AccessKeys,
-    val channelsClient: Channels,
-    val config: EventServerConfig) extends Actor with ActorLogging {
-  val child = context.actorOf(
-    Props(classOf[EventServiceActor],
-      eventClient,
-      accessKeysClient,
-      channelsClient,
-      config),
-    "EventServiceActor")
-  implicit val system = context.system
-
-  def receive: Actor.Receive = {
-    case StartServer(host, portNum) => {
-      IO(Http) ! Http.Bind(child, interface = host, port = portNum)
-    }
-    case m: Http.Bound => log.info("Bound received. EventServer is ready.")
-    case m: Http.CommandFailed => log.error("Command failed.")
-    case _ => log.error("Unknown message.")
-  }
-}
-
-case class EventServerConfig(
-  ip: String = "localhost",
-  port: Int = 7070,
-  plugins: String = "plugins",
-  stats: Boolean = false)
-
-object EventServer {
-  def createEventServer(config: EventServerConfig): Unit = {
-    implicit val system = ActorSystem("EventServerSystem")
-
-    val eventClient = Storage.getLEvents()
-    val accessKeysClient = Storage.getMetaDataAccessKeys()
-    val channelsClient = Storage.getMetaDataChannels()
-
-    val serverActor = system.actorOf(
-      Props(
-        classOf[EventServerActor],
-        eventClient,
-        accessKeysClient,
-        channelsClient,
-        config),
-      "EventServerActor"
-    )
-    if (config.stats) system.actorOf(Props[StatsActor], "StatsActor")
-    system.actorOf(Props[PluginsActor], "PluginsActor")
-    serverActor ! StartServer(config.ip, config.port)
-    system.awaitTermination()
-  }
-}
-
-object Run {
-  def main(args: Array[String]) {
-    EventServer.createEventServer(EventServerConfig(
-      ip = "0.0.0.0",
-      port = 7070))
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/api/EventServerPlugin.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/api/EventServerPlugin.scala b/data/src/main/scala/io/prediction/data/api/EventServerPlugin.scala
deleted file mode 100644
index a87fc84..0000000
--- a/data/src/main/scala/io/prediction/data/api/EventServerPlugin.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.api
-
-trait EventServerPlugin {
-  val pluginName: String
-  val pluginDescription: String
-  val pluginType: String
-
-  def start(context: EventServerPluginContext): Unit
-
-  def process(eventInfo: EventInfo, context: EventServerPluginContext)
-
-  def handleREST(appId: Int, channelId: Option[Int], arguments: Seq[String]): String
-}
-
-object EventServerPlugin {
-  val inputBlocker = "inputblocker"
-  val inputSniffer = "inputsniffer"
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/api/EventServerPluginContext.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/api/EventServerPluginContext.scala b/data/src/main/scala/io/prediction/data/api/EventServerPluginContext.scala
deleted file mode 100644
index 1d8d36e..0000000
--- a/data/src/main/scala/io/prediction/data/api/EventServerPluginContext.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.api
-
-import java.util.ServiceLoader
-
-import akka.event.LoggingAdapter
-import grizzled.slf4j.Logging
-
-import scala.collection.JavaConversions._
-import scala.collection.mutable
-
-class EventServerPluginContext(
-    val plugins: mutable.Map[String, mutable.Map[String, EventServerPlugin]],
-    val log: LoggingAdapter) {
-  def inputBlockers: Map[String, EventServerPlugin] =
-    plugins.getOrElse(EventServerPlugin.inputBlocker, Map()).toMap
-
-  def inputSniffers: Map[String, EventServerPlugin] =
-    plugins.getOrElse(EventServerPlugin.inputSniffer, Map()).toMap
-}
-
-object EventServerPluginContext extends Logging {
-  def apply(log: LoggingAdapter): EventServerPluginContext = {
-    val plugins = mutable.Map[String, mutable.Map[String, EventServerPlugin]](
-      EventServerPlugin.inputBlocker -> mutable.Map(),
-      EventServerPlugin.inputSniffer -> mutable.Map())
-    val serviceLoader = ServiceLoader.load(classOf[EventServerPlugin])
-    serviceLoader foreach { service =>
-      plugins(service.pluginType) += service.pluginName -> service
-    }
-    new EventServerPluginContext(
-      plugins,
-      log)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/api/PluginsActor.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/api/PluginsActor.scala b/data/src/main/scala/io/prediction/data/api/PluginsActor.scala
deleted file mode 100644
index 7883adf..0000000
--- a/data/src/main/scala/io/prediction/data/api/PluginsActor.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.api
-
-import akka.actor.Actor
-import akka.event.Logging
-
-class PluginsActor() extends Actor {
-  implicit val system = context.system
-  val log = Logging(system, this)
-
-  val pluginContext = EventServerPluginContext(log)
-
-  def receive: PartialFunction[Any, Unit] = {
-    case e: EventInfo =>
-      pluginContext.inputSniffers.values.foreach(_.process(e, pluginContext))
-    case h: PluginsActor.HandleREST =>
-      try {
-        sender() ! pluginContext.inputSniffers(h.pluginName).handleREST(
-          h.appId,
-          h.channelId,
-          h.pluginArgs)
-      } catch {
-        case e: Exception =>
-          sender() ! s"""{"message":"${e.getMessage}"}"""
-      }
-    case _ =>
-      log.error("Unknown message sent to Event Server input sniffer plugin host.")
-  }
-}
-
-object PluginsActor {
-  case class HandleREST(
-    pluginName: String,
-    appId: Int,
-    channelId: Option[Int],
-    pluginArgs: Seq[String])
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/api/Stats.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/api/Stats.scala b/data/src/main/scala/io/prediction/data/api/Stats.scala
deleted file mode 100644
index ca5f05e..0000000
--- a/data/src/main/scala/io/prediction/data/api/Stats.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.api
-
-import io.prediction.data.storage.Event
-
-import spray.http.StatusCode
-
-import scala.collection.mutable.{ HashMap => MHashMap }
-import scala.collection.mutable
-
-import com.github.nscala_time.time.Imports.DateTime
-
-case class EntityTypesEvent(
-  val entityType: String,
-  val targetEntityType: Option[String],
-  val event: String) {
-
-  def this(e: Event) = this(
-    e.entityType,
-    e.targetEntityType,
-    e.event)
-}
-
-case class KV[K, V](key: K, value: V)
-
-case class StatsSnapshot(
-  val startTime: DateTime,
-  val endTime: Option[DateTime],
-  val basic: Seq[KV[EntityTypesEvent, Long]],
-  val statusCode: Seq[KV[StatusCode, Long]]
-)
-
-
-class Stats(val startTime: DateTime) {
-  private[this] var _endTime: Option[DateTime] = None
-  var statusCodeCount = MHashMap[(Int, StatusCode), Long]().withDefaultValue(0L)
-  var eteCount = MHashMap[(Int, EntityTypesEvent), Long]().withDefaultValue(0L)
-
-  def cutoff(endTime: DateTime) {
-    _endTime = Some(endTime)
-  }
-
-  def update(appId: Int, statusCode: StatusCode, event: Event) {
-    statusCodeCount((appId, statusCode)) += 1
-    eteCount((appId, new EntityTypesEvent(event))) += 1
-  }
-
-  def extractByAppId[K, V](appId: Int, m: mutable.Map[(Int, K), V])
-  : Seq[KV[K, V]] = {
-    m
-    .toSeq
-    .flatMap { case (k, v) =>
-      if (k._1 == appId) { Seq(KV(k._2, v)) } else { Seq() }
-    }
-  }
-
-  def get(appId: Int): StatsSnapshot = {
-    StatsSnapshot(
-      startTime,
-      _endTime,
-      extractByAppId(appId, eteCount),
-      extractByAppId(appId, statusCodeCount)
-    )
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/api/StatsActor.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/api/StatsActor.scala b/data/src/main/scala/io/prediction/data/api/StatsActor.scala
deleted file mode 100644
index 857352f..0000000
--- a/data/src/main/scala/io/prediction/data/api/StatsActor.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.api
-
-import io.prediction.data.storage.Event
-
-import spray.http.StatusCode
-
-import akka.actor.Actor
-import akka.event.Logging
-
-import com.github.nscala_time.time.Imports.DateTime
-
-/* message to StatsActor */
-case class Bookkeeping(val appId: Int, statusCode: StatusCode, event: Event)
-
-/* message to StatsActor */
-case class GetStats(val appId: Int)
-
-class StatsActor extends Actor {
-  implicit val system = context.system
-  val log = Logging(system, this)
-
-  def getCurrent: DateTime = {
-    DateTime.now.
-      withMinuteOfHour(0).
-      withSecondOfMinute(0).
-      withMillisOfSecond(0)
-  }
-
-  var longLiveStats = new Stats(DateTime.now)
-  var hourlyStats = new Stats(getCurrent)
-
-  var prevHourlyStats = new Stats(getCurrent.minusHours(1))
-  prevHourlyStats.cutoff(hourlyStats.startTime)
-
-  def bookkeeping(appId: Int, statusCode: StatusCode, event: Event) {
-    val current = getCurrent
-    // If the current hour is different from the stats start time, we create
-    // another stats instance, and move the current to prev.
-    if (current != hourlyStats.startTime) {
-      prevHourlyStats = hourlyStats
-      prevHourlyStats.cutoff(current)
-      hourlyStats = new Stats(current)
-    }
-
-    hourlyStats.update(appId, statusCode, event)
-    longLiveStats.update(appId, statusCode, event)
-  }
-
-  def receive: Actor.Receive = {
-    case Bookkeeping(appId, statusCode, event) =>
-      bookkeeping(appId, statusCode, event)
-    case GetStats(appId) => sender() ! Map(
-      "time" -> DateTime.now,
-      "currentHour" -> hourlyStats.get(appId),
-      "prevHour" -> prevHourlyStats.get(appId),
-      "longLive" -> longLiveStats.get(appId))
-    case _ => log.error("Unknown message.")
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/api/Webhooks.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/api/Webhooks.scala b/data/src/main/scala/io/prediction/data/api/Webhooks.scala
deleted file mode 100644
index ff18888..0000000
--- a/data/src/main/scala/io/prediction/data/api/Webhooks.scala
+++ /dev/null
@@ -1,151 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.api
-
-import io.prediction.data.webhooks.JsonConnector
-import io.prediction.data.webhooks.FormConnector
-import io.prediction.data.webhooks.ConnectorUtil
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.EventJson4sSupport
-import io.prediction.data.storage.LEvents
-
-import spray.routing._
-import spray.routing.Directives._
-import spray.http.StatusCodes
-import spray.http.StatusCode
-import spray.http.FormData
-import spray.httpx.Json4sSupport
-
-import org.json4s.Formats
-import org.json4s.DefaultFormats
-import org.json4s.JObject
-
-import akka.event.LoggingAdapter
-import akka.actor.ActorSelection
-
-import scala.concurrent.{ExecutionContext, Future}
-
-
-private[prediction] object Webhooks {
-
-  def postJson(
-    appId: Int,
-    channelId: Option[Int],
-    web: String,
-    data: JObject,
-    eventClient: LEvents,
-    log: LoggingAdapter,
-    stats: Boolean,
-    statsActorRef: ActorSelection
-  )(implicit ec: ExecutionContext): Future[(StatusCode, Map[String, String])] = {
-
-    val eventFuture = Future {
-      WebhooksConnectors.json.get(web).map { connector =>
-        ConnectorUtil.toEvent(connector, data)
-      }
-    }
-
-    eventFuture.flatMap { eventOpt =>
-      if (eventOpt.isEmpty) {
-        Future successful {
-          val message = s"webhooks connection for ${web} is not supported."
-          (StatusCodes.NotFound, Map("message" -> message))
-        }
-      } else {
-        val event = eventOpt.get
-        val data = eventClient.futureInsert(event, appId, channelId).map { id =>
-          val result = (StatusCodes.Created, Map("eventId" -> s"${id}"))
-
-          if (stats) {
-            statsActorRef ! Bookkeeping(appId, result._1, event)
-          }
-          result
-        }
-        data
-      }
-    }
-  }
-
-  def getJson(
-    appId: Int,
-    channelId: Option[Int],
-    web: String,
-    log: LoggingAdapter
-  )(implicit ec: ExecutionContext): Future[(StatusCode, Map[String, String])] = {
-    Future {
-      WebhooksConnectors.json.get(web).map { connector =>
-        (StatusCodes.OK, Map("message" -> "Ok"))
-      }.getOrElse {
-        val message = s"webhooks connection for ${web} is not supported."
-        (StatusCodes.NotFound, Map("message" -> message))
-      }
-    }
-  }
-
-  def postForm(
-    appId: Int,
-    channelId: Option[Int],
-    web: String,
-    data: FormData,
-    eventClient: LEvents,
-    log: LoggingAdapter,
-    stats: Boolean,
-    statsActorRef: ActorSelection
-  )(implicit ec: ExecutionContext): Future[(StatusCode, Map[String, String])] = {
-    val eventFuture = Future {
-      WebhooksConnectors.form.get(web).map { connector =>
-        ConnectorUtil.toEvent(connector, data.fields.toMap)
-      }
-    }
-
-    eventFuture.flatMap { eventOpt =>
-      if (eventOpt.isEmpty) {
-        Future {
-          val message = s"webhooks connection for ${web} is not supported."
-          (StatusCodes.NotFound, Map("message" -> message))
-        }
-      } else {
-        val event = eventOpt.get
-        val data = eventClient.futureInsert(event, appId, channelId).map { id =>
-          val result = (StatusCodes.Created, Map("eventId" -> s"${id}"))
-
-          if (stats) {
-            statsActorRef ! Bookkeeping(appId, result._1, event)
-          }
-          result
-        }
-        data
-      }
-    }
-  }
-
-  def getForm(
-    appId: Int,
-    channelId: Option[Int],
-    web: String,
-    log: LoggingAdapter
-  )(implicit ec: ExecutionContext): Future[(StatusCode, Map[String, String])] = {
-    Future {
-      WebhooksConnectors.form.get(web).map { connector =>
-        (StatusCodes.OK, Map("message" -> "Ok"))
-      }.getOrElse {
-        val message = s"webhooks connection for ${web} is not supported."
-        (StatusCodes.NotFound, Map("message" -> message))
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/api/WebhooksConnectors.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/api/WebhooksConnectors.scala b/data/src/main/scala/io/prediction/data/api/WebhooksConnectors.scala
deleted file mode 100644
index 97c9775..0000000
--- a/data/src/main/scala/io/prediction/data/api/WebhooksConnectors.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.api
-
-import io.prediction.data.webhooks.JsonConnector
-import io.prediction.data.webhooks.FormConnector
-
-import io.prediction.data.webhooks.segmentio.SegmentIOConnector
-import io.prediction.data.webhooks.mailchimp.MailChimpConnector
-
-private[prediction] object WebhooksConnectors {
-
-  val json: Map[String, JsonConnector] = Map(
-    "segmentio" -> SegmentIOConnector
-  )
-
-  val form: Map[String, FormConnector] = Map(
-    "mailchimp" -> MailChimpConnector
-  )
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/package.scala b/data/src/main/scala/io/prediction/data/package.scala
deleted file mode 100644
index afbe573..0000000
--- a/data/src/main/scala/io/prediction/data/package.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction
-
-/** Provides data access for PredictionIO and any engines running on top of
-  * PredictionIO
-  */
-package object data {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/AccessKeys.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/AccessKeys.scala b/data/src/main/scala/io/prediction/data/storage/AccessKeys.scala
deleted file mode 100644
index f197e78..0000000
--- a/data/src/main/scala/io/prediction/data/storage/AccessKeys.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import java.security.SecureRandom
-
-import io.prediction.annotation.DeveloperApi
-import org.apache.commons.codec.binary.Base64
-
-/** :: DeveloperApi ::
-  * Stores mapping of access keys, app IDs, and lists of allowed event names
-  *
-  * @param key Access key
-  * @param appid App ID
-  * @param events List of allowed events for this particular app key
-  * @group Meta Data
-  */
-@DeveloperApi
-case class AccessKey(
-  key: String,
-  appid: Int,
-  events: Seq[String])
-
-/** :: DeveloperApi ::
-  * Base trait of the [[AccessKey]] data access object
-  *
-  * @group Meta Data
-  */
-@DeveloperApi
-trait AccessKeys {
-  /** Insert a new [[AccessKey]]. If the key field is empty, a key will be
-    * generated.
-    */
-  def insert(k: AccessKey): Option[String]
-
-  /** Get an [[AccessKey]] by key */
-  def get(k: String): Option[AccessKey]
-
-  /** Get all [[AccessKey]]s */
-  def getAll(): Seq[AccessKey]
-
-  /** Get all [[AccessKey]]s for a particular app ID */
-  def getByAppid(appid: Int): Seq[AccessKey]
-
-  /** Update an [[AccessKey]] */
-  def update(k: AccessKey): Unit
-
-  /** Delete an [[AccessKey]] */
-  def delete(k: String): Unit
-
-  /** Default implementation of key generation */
-  def generateKey: String = {
-    val sr = SecureRandom.getInstanceStrong
-    val srBytes = Array.fill(48)(0.toByte)
-    sr.nextBytes(srBytes)
-    Base64.encodeBase64URLSafeString(srBytes)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/Apps.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/Apps.scala b/data/src/main/scala/io/prediction/data/storage/Apps.scala
deleted file mode 100644
index 32343e1..0000000
--- a/data/src/main/scala/io/prediction/data/storage/Apps.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import io.prediction.annotation.DeveloperApi
-
-/** :: DeveloperApi ::
-  * Stores mapping of app IDs and names
-  *
-  * @param id ID of the app.
-  * @param name Name of the app.
-  * @param description Long description of the app.
-  * @group Meta Data
-  */
-@DeveloperApi
-case class App(
-  id: Int,
-  name: String,
-  description: Option[String])
-
-/** :: DeveloperApi ::
-  * Base trait of the [[App]] data access object
-  *
-  * @group Meta Data
-  */
-@DeveloperApi
-trait Apps {
-  /** Insert a new [[App]]. Returns a generated app ID if the supplied app ID is 0. */
-  def insert(app: App): Option[Int]
-
-  /** Get an [[App]] by app ID */
-  def get(id: Int): Option[App]
-
-  /** Get an [[App]] by app name */
-  def getByName(name: String): Option[App]
-
-  /** Get all [[App]]s */
-  def getAll(): Seq[App]
-
-  /** Update an [[App]] */
-  def update(app: App): Unit
-
-  /** Delete an [[App]] */
-  def delete(id: Int): Unit
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/BiMap.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/BiMap.scala b/data/src/main/scala/io/prediction/data/storage/BiMap.scala
deleted file mode 100644
index cbf3e12..0000000
--- a/data/src/main/scala/io/prediction/data/storage/BiMap.scala
+++ /dev/null
@@ -1,164 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import scala.collection.immutable.HashMap
-
-import org.apache.spark.rdd.RDD
-
-/** Immutable Bi-directional Map
-  *
-  */
-class BiMap[K, V] private[prediction] (
-  private val m: Map[K, V],
-  private val i: Option[BiMap[V, K]] = None
-  ) extends Serializable {
-
-  // NOTE: make inverse's inverse point back to current BiMap
-  val inverse: BiMap[V, K] = i.getOrElse {
-    val rev = m.map(_.swap)
-    require((rev.size == m.size),
-      s"Failed to create reversed map. Cannot have duplicated values.")
-    new BiMap(rev, Some(this))
-  }
-
-  def get(k: K): Option[V] = m.get(k)
-
-  def getOrElse(k: K, default: => V): V = m.getOrElse(k, default)
-
-  def contains(k: K): Boolean = m.contains(k)
-
-  def apply(k: K): V = m.apply(k)
-
-  /** Converts to a map.
-    * @return a map of type immutable.Map[K, V]
-    */
-  def toMap: Map[K, V] = m
-
-  /** Converts to a sequence.
-    * @return a sequence containing all elements of this map
-    */
-  def toSeq: Seq[(K, V)] = m.toSeq
-
-  def size: Int = m.size
-
-  def take(n: Int): BiMap[K, V] = BiMap(m.take(n))
-
-  override def toString: String = m.toString
-}
-
-object BiMap {
-
-  def apply[K, V](x: Map[K, V]): BiMap[K, V] = new BiMap(x)
-
-  /** Create a BiMap[String, Long] from a set of String. The Long index starts
-    * from 0.
-    * @param keys a set of String
-    * @return a String to Long BiMap
-    */
-  def stringLong(keys: Set[String]): BiMap[String, Long] = {
-    val hm = HashMap(keys.toSeq.zipWithIndex.map(t => (t._1, t._2.toLong)) : _*)
-    new BiMap(hm)
-  }
-
-  /** Create a BiMap[String, Long] from an array of String.
-    * NOTE: the the array cannot have duplicated element.
-    * The Long index starts from 0.
-    * @param keys a set of String
-    * @return a String to Long BiMap
-    */
-  def stringLong(keys: Array[String]): BiMap[String, Long] = {
-    val hm = HashMap(keys.zipWithIndex.map(t => (t._1, t._2.toLong)) : _*)
-    new BiMap(hm)
-  }
-
-  /** Create a BiMap[String, Long] from RDD[String]. The Long index starts
-    * from 0.
-    * @param keys RDD of String
-    * @return a String to Long BiMap
-    */
-  def stringLong(keys: RDD[String]): BiMap[String, Long] = {
-    stringLong(keys.distinct.collect)
-  }
-
-  /** Create a BiMap[String, Int] from a set of String. The Int index starts
-    * from 0.
-    * @param keys a set of String
-    * @return a String to Int BiMap
-    */
-  def stringInt(keys: Set[String]): BiMap[String, Int] = {
-    val hm = HashMap(keys.toSeq.zipWithIndex : _*)
-    new BiMap(hm)
-  }
-
-  /** Create a BiMap[String, Int] from an array of String.
-    * NOTE: the the array cannot have duplicated element.
-    * The Int index starts from 0.
-    * @param keys a set of String
-    * @return a String to Int BiMap
-    */
-  def stringInt(keys: Array[String]): BiMap[String, Int] = {
-    val hm = HashMap(keys.zipWithIndex : _*)
-    new BiMap(hm)
-  }
-
-  /** Create a BiMap[String, Int] from RDD[String]. The Int index starts
-    * from 0.
-    * @param keys RDD of String
-    * @return a String to Int BiMap
-    */
-  def stringInt(keys: RDD[String]): BiMap[String, Int] = {
-    stringInt(keys.distinct.collect)
-  }
-
-  private[this] def stringDoubleImpl(keys: Seq[String])
-  : BiMap[String, Double] = {
-    val ki = keys.zipWithIndex.map(e => (e._1, e._2.toDouble))
-    new BiMap(HashMap(ki : _*))
-  }
-
-  /** Create a BiMap[String, Double] from a set of String. The Double index
-    * starts from 0.
-    * @param keys a set of String
-    * @return a String to Double BiMap
-    */
-  def stringDouble(keys: Set[String]): BiMap[String, Double] = {
-    // val hm = HashMap(keys.toSeq.zipWithIndex.map(_.toDouble) : _*)
-    // new BiMap(hm)
-    stringDoubleImpl(keys.toSeq)
-  }
-
-  /** Create a BiMap[String, Double] from an array of String.
-    * NOTE: the the array cannot have duplicated element.
-    * The Double index starts from 0.
-    * @param keys a set of String
-    * @return a String to Double BiMap
-    */
-  def stringDouble(keys: Array[String]): BiMap[String, Double] = {
-    // val hm = HashMap(keys.zipWithIndex.mapValues(_.toDouble) : _*)
-    // new BiMap(hm)
-    stringDoubleImpl(keys.toSeq)
-  }
-
-  /** Create a BiMap[String, Double] from RDD[String]. The Double index starts
-    * from 0.
-    * @param keys RDD of String
-    * @return a String to Double BiMap
-    */
-  def stringDouble(keys: RDD[String]): BiMap[String, Double] = {
-    stringDoubleImpl(keys.distinct.collect)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/Channels.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/Channels.scala b/data/src/main/scala/io/prediction/data/storage/Channels.scala
deleted file mode 100644
index 3fa7aef..0000000
--- a/data/src/main/scala/io/prediction/data/storage/Channels.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import io.prediction.annotation.DeveloperApi
-
-/** :: DeveloperApi ::
-  * Stores mapping of channel IDs, names and app ID
-  *
-  * @param id ID of the channel
-  * @param name Name of the channel (must be unique within the same app)
-  * @param appid ID of the app which this channel belongs to
-  * @group Meta Data
-  */
-@DeveloperApi
-case class Channel(
-  id: Int,
-  name: String, // must be unique within the same app
-  appid: Int
-) {
-  require(Channel.isValidName(name),
-    "Invalid channel name: ${name}. ${Channel.nameConstraint}")
-}
-
-/** :: DeveloperApi ::
-  * Companion object of [[Channel]]
-  *
-  * @group Meta Data
-  */
-@DeveloperApi
-object Channel {
-  /** Examine whether the supplied channel name is valid. A valid channel name
-    * must consists of 1 to 16 alphanumeric and '-' characters.
-    *
-    * @param s Channel name to examine
-    * @return true if channel name is valid, false otherwise
-    */
-  def isValidName(s: String): Boolean = {
-    // note: update channelNameConstraint if this rule is changed
-    s.matches("^[a-zA-Z0-9-]{1,16}$")
-  }
-
-  /** For consistent error message display */
-  val nameConstraint: String =
-    "Only alphanumeric and - characters are allowed and max length is 16."
-}
-
-/** :: DeveloperApi ::
-  * Base trait of the [[Channel]] data access object
-  *
-  * @group Meta Data
-  */
-@DeveloperApi
-trait Channels {
-  /** Insert a new [[Channel]]. Returns a generated channel ID if original ID is 0. */
-  def insert(channel: Channel): Option[Int]
-
-  /** Get a [[Channel]] by channel ID */
-  def get(id: Int): Option[Channel]
-
-  /** Get all [[Channel]] by app ID */
-  def getByAppid(appid: Int): Seq[Channel]
-
-  /** Delete a [[Channel]] */
-  def delete(id: Int): Unit
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/DataMap.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/DataMap.scala b/data/src/main/scala/io/prediction/data/storage/DataMap.scala
deleted file mode 100644
index 91a0ba5..0000000
--- a/data/src/main/scala/io/prediction/data/storage/DataMap.scala
+++ /dev/null
@@ -1,241 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import org.json4s._
-import org.json4s.native.JsonMethods.parse
-
-import scala.collection.GenTraversableOnce
-import scala.collection.JavaConversions
-
-/** Exception class for [[DataMap]]
-  *
-  * @group Event Data
-  */
-case class DataMapException(msg: String, cause: Exception)
-  extends Exception(msg, cause) {
-  def this(msg: String) = this(msg, null)
-}
-
-/** A DataMap stores properties of the event or entity. Internally it is a Map
-  * whose keys are property names and values are corresponding JSON values
-  * respectively. Use the [[get]] method to retrieve the value of a mandatory
-  * property or use [[getOpt]] to retrieve the value of an optional property.
-  *
-  * @param fields Map of property name to JValue
-  * @group Event Data
-  */
-class DataMap (
-  val fields: Map[String, JValue]
-) extends Serializable {
-  @transient lazy implicit private val formats = DefaultFormats +
-    new DateTimeJson4sSupport.Serializer
-
-  /** Check the existence of a required property name. Throw an exception if
-    * it does not exist.
-    *
-    * @param name The property name
-    */
-  def require(name: String): Unit = {
-    if (!fields.contains(name)) {
-      throw new DataMapException(s"The field $name is required.")
-    }
-  }
-
-  /** Check if this DataMap contains a specific property.
-    *
-    * @param name The property name
-    * @return Return true if the property exists, else false.
-    */
-  def contains(name: String): Boolean = {
-    fields.contains(name)
-  }
-
-  /** Get the value of a mandatory property. Exception is thrown if the property
-    * does not exist.
-    *
-    * @tparam T The type of the property value
-    * @param name The property name
-    * @return Return the property value of type T
-    */
-  def get[T: Manifest](name: String): T = {
-    require(name)
-    fields(name) match {
-      case JNull => throw new DataMapException(
-        s"The required field $name cannot be null.")
-      case x: JValue => x.extract[T]
-    }
-  }
-
-  /** Get the value of an optional property. Return None if the property does
-    * not exist.
-    *
-    * @tparam T The type of the property value
-    * @param name The property name
-    * @return Return the property value of type Option[T]
-    */
-  def getOpt[T: Manifest](name: String): Option[T] = {
-    // either the field doesn't exist or its value is null
-    fields.get(name).flatMap(_.extract[Option[T]])
-  }
-
-  /** Get the value of an optional property. Return default value if the
-    * property does not exist.
-    *
-    * @tparam T The type of the property value
-    * @param name The property name
-    * @param default The default property value of type T
-    * @return Return the property value of type T
-    */
-  def getOrElse[T: Manifest](name: String, default: T): T = {
-    getOpt[T](name).getOrElse(default)
-  }
-
-  /** Java-friendly method for getting the value of a property. Return null if the
-    * property does not exist.
-    *
-    * @tparam T The type of the property value
-    * @param name The property name
-    * @param clazz The class of the type of the property value
-    * @return Return the property value of type T
-    */
-  def get[T](name: String, clazz: java.lang.Class[T]): T = {
-    val manifest =  new Manifest[T] {
-      override def erasure: Class[_] = clazz
-      override def runtimeClass: Class[_] = clazz
-    }
-
-    fields.get(name) match {
-      case None => null.asInstanceOf[T]
-      case Some(JNull) => null.asInstanceOf[T]
-      case Some(x) => x.extract[T](formats, manifest)
-    }
-  }
-
-  /** Java-friendly method for getting a list of values of a property. Return null if the
-    * property does not exist.
-    *
-    * @param name The property name
-    * @return Return the list of property values
-    */
-  def getStringList(name: String): java.util.List[String] = {
-    fields.get(name) match {
-      case None => null
-      case Some(JNull) => null
-      case Some(x) =>
-        JavaConversions.seqAsJavaList(x.extract[List[String]](formats, manifest[List[String]]))
-    }
-  }
-
-  /** Return a new DataMap with elements containing elements from the left hand
-    * side operand followed by elements from the right hand side operand.
-    *
-    * @param that Right hand side DataMap
-    * @return A new DataMap
-    */
-  def ++ (that: DataMap): DataMap = DataMap(this.fields ++ that.fields)
-
-  /** Creates a new DataMap from this DataMap by removing all elements of
-    * another collection.
-    *
-    * @param that A collection containing the removed property names
-    * @return A new DataMap
-    */
-  def -- (that: GenTraversableOnce[String]): DataMap =
-    DataMap(this.fields -- that)
-
-  /** Tests whether the DataMap is empty.
-    *
-    * @return true if the DataMap is empty, false otherwise.
-    */
-  def isEmpty: Boolean = fields.isEmpty
-
-  /** Collects all property names of this DataMap in a set.
-    *
-    * @return a set containing all property names of this DataMap.
-    */
-  def keySet: Set[String] = this.fields.keySet
-
-  /** Converts this DataMap to a List.
-    *
-    * @return a list of (property name, JSON value) tuples.
-    */
-  def toList(): List[(String, JValue)] = fields.toList
-
-  /** Converts this DataMap to a JObject.
-    *
-    * @return the JObject initialized by this DataMap.
-    */
-  def toJObject(): JObject = JObject(toList())
-
-  /** Converts this DataMap to case class of type T.
-    *
-    * @return the object of type T.
-    */
-  def extract[T: Manifest]: T = {
-    toJObject().extract[T]
-  }
-
-  override
-  def toString: String = s"DataMap($fields)"
-
-  override
-  def hashCode: Int = 41 + fields.hashCode
-
-  override
-  def equals(other: Any): Boolean = other match {
-    case that: DataMap => that.canEqual(this) && this.fields.equals(that.fields)
-    case _ => false
-  }
-
-  def canEqual(other: Any): Boolean = other.isInstanceOf[DataMap]
-}
-
-/** Companion object of the [[DataMap]] class
-  *
-  * @group Event Data
-  */
-object DataMap {
-  /** Create an empty DataMap
-    * @return an empty DataMap
-    */
-  def apply(): DataMap = new DataMap(Map[String, JValue]())
-
-  /** Create an DataMap from a Map of String to JValue
-    * @param fields a Map of String to JValue
-    * @return a new DataMap initialized by fields
-    */
-  def apply(fields: Map[String, JValue]): DataMap = new DataMap(fields)
-
-  /** Create an DataMap from a JObject
-    * @param jObj JObject
-    * @return a new DataMap initialized by a JObject
-    */
-  def apply(jObj: JObject): DataMap = {
-    if (jObj == null) {
-      apply()
-    } else {
-      new DataMap(jObj.obj.toMap)
-    }
-  }
-
-  /** Create an DataMap from a JSON String
-    * @param js JSON String. eg """{ "a": 1, "b": "foo" }"""
-    * @return a new DataMap initialized by a JSON string
-    */
-  def apply(js: String): DataMap = apply(parse(js).asInstanceOf[JObject])
-
-}



[26/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
rename all except examples


Project: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/commit/4f03388e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/tree/4f03388e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/diff/4f03388e

Branch: refs/heads/develop
Commit: 4f03388ef551133149603933bba8a6eac30fd333
Parents: ec79f20
Author: Xusen Yin <yi...@gmail.com>
Authored: Wed Jul 6 22:14:00 2016 -0700
Committer: Xusen Yin <yi...@gmail.com>
Committed: Wed Jul 6 22:14:00 2016 -0700

----------------------------------------------------------------------
 .../io/prediction/annotation/DeveloperApi.java  |   34 -
 .../io/prediction/annotation/Experimental.java  |   35 -
 .../authentication/KeyAuthentication.scala      |   55 -
 .../configuration/SSLConfiguration.scala        |   71 -
 .../predictionio/annotation/DeveloperApi.java   |   34 +
 .../predictionio/annotation/Experimental.java   |   35 +
 .../authentication/KeyAuthentication.scala      |   55 +
 .../configuration/SSLConfiguration.scala        |   71 +
 .../controller/CustomQuerySerializer.scala      |   37 -
 .../io/prediction/controller/Deployment.scala   |   56 -
 .../scala/io/prediction/controller/Engine.scala |  829 ------------
 .../prediction/controller/EngineFactory.scala   |   41 -
 .../io/prediction/controller/EngineParams.scala |  149 --
 .../controller/EngineParamsGenerator.scala      |   43 -
 .../io/prediction/controller/Evaluation.scala   |  122 --
 .../prediction/controller/FastEvalEngine.scala  |  343 -----
 .../controller/IdentityPreparator.scala         |   92 --
 .../io/prediction/controller/LAlgorithm.scala   |  130 --
 .../prediction/controller/LAverageServing.scala |   41 -
 .../io/prediction/controller/LDataSource.scala  |   67 -
 .../prediction/controller/LFirstServing.scala   |   39 -
 .../io/prediction/controller/LPreparator.scala  |   46 -
 .../io/prediction/controller/LServing.scala     |   52 -
 .../LocalFileSystemPersistentModel.scala        |   74 -
 .../scala/io/prediction/controller/Metric.scala |  266 ----
 .../prediction/controller/MetricEvaluator.scala |  260 ----
 .../io/prediction/controller/P2LAlgorithm.scala |  121 --
 .../io/prediction/controller/PAlgorithm.scala   |  126 --
 .../io/prediction/controller/PDataSource.scala  |   57 -
 .../io/prediction/controller/PPreparator.scala  |   44 -
 .../scala/io/prediction/controller/Params.scala |   31 -
 .../prediction/controller/PersistentModel.scala |  112 --
 .../io/prediction/controller/SanityCheck.scala  |   30 -
 .../scala/io/prediction/controller/Utils.scala  |   69 -
 .../java/JavaEngineParamsGenerator.scala        |   39 -
 .../controller/java/JavaEvaluation.scala        |   66 -
 .../controller/java/LJavaAlgorithm.scala        |   31 -
 .../controller/java/LJavaDataSource.scala       |   31 -
 .../controller/java/LJavaPreparator.scala       |   29 -
 .../controller/java/LJavaServing.scala          |   26 -
 .../controller/java/P2LJavaAlgorithm.scala      |   33 -
 .../controller/java/PJavaAlgorithm.scala        |   28 -
 .../controller/java/PJavaDataSource.scala       |   28 -
 .../controller/java/PJavaPreparator.scala       |   26 -
 .../java/SerializableComparator.scala           |   20 -
 .../io/prediction/controller/package.scala      |  168 ---
 .../scala/io/prediction/core/AbstractDoer.scala |   66 -
 .../io/prediction/core/BaseAlgorithm.scala      |  123 --
 .../io/prediction/core/BaseDataSource.scala     |   52 -
 .../scala/io/prediction/core/BaseEngine.scala   |  100 --
 .../io/prediction/core/BaseEvaluator.scala      |   72 -
 .../io/prediction/core/BasePreparator.scala     |   42 -
 .../scala/io/prediction/core/BaseServing.scala  |   51 -
 .../main/scala/io/prediction/core/package.scala |   21 -
 core/src/main/scala/io/prediction/package.scala |   19 -
 .../io/prediction/workflow/CoreWorkflow.scala   |  163 ---
 .../io/prediction/workflow/CreateServer.scala   |  737 ----------
 .../io/prediction/workflow/CreateWorkflow.scala |  274 ----
 .../workflow/EngineServerPlugin.scala           |   40 -
 .../workflow/EngineServerPluginContext.scala    |   88 --
 .../workflow/EngineServerPluginsActor.scala     |   46 -
 .../workflow/EvaluationWorkflow.scala           |   42 -
 .../io/prediction/workflow/FakeWorkflow.scala   |  106 --
 .../io/prediction/workflow/JsonExtractor.scala  |  164 ---
 .../workflow/JsonExtractorOption.scala          |   23 -
 .../workflow/PersistentModelManifest.scala      |   18 -
 .../scala/io/prediction/workflow/Workflow.scala |  135 --
 .../prediction/workflow/WorkflowContext.scala   |   45 -
 .../io/prediction/workflow/WorkflowParams.scala |   42 -
 .../io/prediction/workflow/WorkflowUtils.scala  |  419 ------
 .../controller/CustomQuerySerializer.scala      |   37 +
 .../predictionio/controller/Deployment.scala    |   56 +
 .../apache/predictionio/controller/Engine.scala |  829 ++++++++++++
 .../predictionio/controller/EngineFactory.scala |   41 +
 .../predictionio/controller/EngineParams.scala  |  149 ++
 .../controller/EngineParamsGenerator.scala      |   43 +
 .../predictionio/controller/Evaluation.scala    |  122 ++
 .../controller/FastEvalEngine.scala             |  343 +++++
 .../controller/IdentityPreparator.scala         |   92 ++
 .../predictionio/controller/LAlgorithm.scala    |  130 ++
 .../controller/LAverageServing.scala            |   41 +
 .../predictionio/controller/LDataSource.scala   |   67 +
 .../predictionio/controller/LFirstServing.scala |   39 +
 .../predictionio/controller/LPreparator.scala   |   46 +
 .../predictionio/controller/LServing.scala      |   52 +
 .../LocalFileSystemPersistentModel.scala        |   74 +
 .../apache/predictionio/controller/Metric.scala |  266 ++++
 .../controller/MetricEvaluator.scala            |  260 ++++
 .../predictionio/controller/P2LAlgorithm.scala  |  121 ++
 .../predictionio/controller/PAlgorithm.scala    |  126 ++
 .../predictionio/controller/PDataSource.scala   |   57 +
 .../predictionio/controller/PPreparator.scala   |   44 +
 .../apache/predictionio/controller/Params.scala |   31 +
 .../controller/PersistentModel.scala            |  112 ++
 .../predictionio/controller/SanityCheck.scala   |   30 +
 .../apache/predictionio/controller/Utils.scala  |   69 +
 .../java/JavaEngineParamsGenerator.scala        |   39 +
 .../controller/java/JavaEvaluation.scala        |   66 +
 .../controller/java/LJavaAlgorithm.scala        |   31 +
 .../controller/java/LJavaDataSource.scala       |   31 +
 .../controller/java/LJavaPreparator.scala       |   29 +
 .../controller/java/LJavaServing.scala          |   26 +
 .../controller/java/P2LJavaAlgorithm.scala      |   33 +
 .../controller/java/PJavaAlgorithm.scala        |   28 +
 .../controller/java/PJavaDataSource.scala       |   28 +
 .../controller/java/PJavaPreparator.scala       |   26 +
 .../java/SerializableComparator.scala           |   20 +
 .../predictionio/controller/package.scala       |  168 +++
 .../apache/predictionio/core/AbstractDoer.scala |   66 +
 .../predictionio/core/BaseAlgorithm.scala       |  123 ++
 .../predictionio/core/BaseDataSource.scala      |   52 +
 .../apache/predictionio/core/BaseEngine.scala   |  100 ++
 .../predictionio/core/BaseEvaluator.scala       |   72 +
 .../predictionio/core/BasePreparator.scala      |   42 +
 .../apache/predictionio/core/BaseServing.scala  |   51 +
 .../org/apache/predictionio/core/package.scala  |   21 +
 .../scala/org/apache/predictionio/package.scala |   19 +
 .../predictionio/workflow/CoreWorkflow.scala    |  163 +++
 .../predictionio/workflow/CreateServer.scala    |  737 ++++++++++
 .../predictionio/workflow/CreateWorkflow.scala  |  274 ++++
 .../workflow/EngineServerPlugin.scala           |   40 +
 .../workflow/EngineServerPluginContext.scala    |   88 ++
 .../workflow/EngineServerPluginsActor.scala     |   46 +
 .../workflow/EvaluationWorkflow.scala           |   42 +
 .../predictionio/workflow/FakeWorkflow.scala    |  106 ++
 .../predictionio/workflow/JsonExtractor.scala   |  164 +++
 .../workflow/JsonExtractorOption.scala          |   23 +
 .../workflow/PersistentModelManifest.scala      |   18 +
 .../apache/predictionio/workflow/Workflow.scala |  135 ++
 .../predictionio/workflow/WorkflowContext.scala |   45 +
 .../predictionio/workflow/WorkflowParams.scala  |   42 +
 .../predictionio/workflow/WorkflowUtils.scala   |  419 ++++++
 .../controller/metric_evaluator.scala.html      |   95 --
 .../io/prediction/workflow/index.scala.html     |   92 --
 .../controller/metric_evaluator.scala.html      |   95 ++
 .../predictionio/workflow/index.scala.html      |   92 ++
 .../java/io/prediction/workflow/JavaParams.java |   30 -
 .../java/io/prediction/workflow/JavaQuery.java  |   46 -
 .../workflow/JavaQueryTypeAdapterFactory.java   |   60 -
 .../predictionio/workflow/JavaParams.java       |   30 +
 .../apache/predictionio/workflow/JavaQuery.java |   46 +
 .../workflow/JavaQueryTypeAdapterFactory.java   |   60 +
 .../io/prediction/controller/EngineTest.scala   |  615 ---------
 .../prediction/controller/EvaluationTest.scala  |   46 -
 .../prediction/controller/EvaluatorTest.scala   |   93 --
 .../controller/FastEvalEngineTest.scala         |  181 ---
 .../controller/MetricEvaluatorTest.scala        |   52 -
 .../io/prediction/controller/MetricTest.scala   |  143 --
 .../io/prediction/controller/SampleEngine.scala |  472 -------
 .../scala/io/prediction/workflow/BaseTest.scala |   75 -
 .../workflow/EngineWorkflowTest.scala           |    0
 .../workflow/EvaluationWorkflowTest.scala       |   61 -
 .../workflow/JsonExtractorSuite.scala           |  383 ------
 .../predictionio/controller/EngineTest.scala    |  615 +++++++++
 .../controller/EvaluationTest.scala             |   46 +
 .../predictionio/controller/EvaluatorTest.scala |   93 ++
 .../controller/FastEvalEngineTest.scala         |  181 +++
 .../controller/MetricEvaluatorTest.scala        |   52 +
 .../predictionio/controller/MetricTest.scala    |  143 ++
 .../predictionio/controller/SampleEngine.scala  |  472 +++++++
 .../apache/predictionio/workflow/BaseTest.scala |   75 +
 .../workflow/EngineWorkflowTest.scala           |    0
 .../workflow/EvaluationWorkflowTest.scala       |   61 +
 .../workflow/JsonExtractorSuite.scala           |  383 ++++++
 .../main/scala/io/prediction/data/Utils.scala   |   50 -
 .../scala/io/prediction/data/api/Common.scala   |   80 --
 .../io/prediction/data/api/EventInfo.scala      |   24 -
 .../io/prediction/data/api/EventServer.scala    |  640 ---------
 .../prediction/data/api/EventServerPlugin.scala |   33 -
 .../data/api/EventServerPluginContext.scala     |   49 -
 .../io/prediction/data/api/PluginsActor.scala   |   52 -
 .../scala/io/prediction/data/api/Stats.scala    |   79 --
 .../io/prediction/data/api/StatsActor.scala     |   74 -
 .../scala/io/prediction/data/api/Webhooks.scala |  151 ---
 .../data/api/WebhooksConnectors.scala           |   34 -
 .../main/scala/io/prediction/data/package.scala |   21 -
 .../io/prediction/data/storage/AccessKeys.scala |   71 -
 .../scala/io/prediction/data/storage/Apps.scala |   58 -
 .../io/prediction/data/storage/BiMap.scala      |  164 ---
 .../io/prediction/data/storage/Channels.scala   |   79 --
 .../io/prediction/data/storage/DataMap.scala    |  241 ----
 .../data/storage/DateTimeJson4sSupport.scala    |   47 -
 .../data/storage/EngineInstances.scala          |  177 ---
 .../data/storage/EngineManifests.scala          |  117 --
 .../io/prediction/data/storage/EntityMap.scala  |   98 --
 .../data/storage/EvaluationInstances.scala      |  135 --
 .../io/prediction/data/storage/Event.scala      |  164 ---
 .../data/storage/EventJson4sSupport.scala       |  236 ----
 .../data/storage/LEventAggregator.scala         |  145 --
 .../io/prediction/data/storage/LEvents.scala    |  489 -------
 .../io/prediction/data/storage/Models.scala     |   80 --
 .../data/storage/PEventAggregator.scala         |  209 ---
 .../io/prediction/data/storage/PEvents.scala    |  182 ---
 .../prediction/data/storage/PropertyMap.scala   |   96 --
 .../io/prediction/data/storage/Storage.scala    |  403 ------
 .../io/prediction/data/storage/Utils.scala      |   47 -
 .../storage/elasticsearch/ESAccessKeys.scala    |  116 --
 .../data/storage/elasticsearch/ESApps.scala     |  127 --
 .../data/storage/elasticsearch/ESChannels.scala |  114 --
 .../elasticsearch/ESEngineInstances.scala       |  155 ---
 .../elasticsearch/ESEngineManifests.scala       |   81 --
 .../elasticsearch/ESEvaluationInstances.scala   |  133 --
 .../storage/elasticsearch/ESSequences.scala     |   61 -
 .../data/storage/elasticsearch/ESUtils.scala    |   45 -
 .../storage/elasticsearch/StorageClient.scala   |   47 -
 .../data/storage/elasticsearch/package.scala    |   22 -
 .../data/storage/hbase/HBEventsUtil.scala       |  412 ------
 .../data/storage/hbase/HBLEvents.scala          |  192 ---
 .../data/storage/hbase/HBPEvents.scala          |  112 --
 .../data/storage/hbase/PIOHBaseUtil.scala       |   28 -
 .../data/storage/hbase/StorageClient.scala      |   83 --
 .../prediction/data/storage/hbase/package.scala |   22 -
 .../data/storage/hbase/upgrade/HB_0_8_0.scala   |  190 ---
 .../data/storage/hbase/upgrade/Upgrade.scala    |   72 -
 .../storage/hbase/upgrade/Upgrade_0_8_3.scala   |  221 ---
 .../data/storage/hdfs/HDFSModels.scala          |   60 -
 .../data/storage/hdfs/StorageClient.scala       |   33 -
 .../prediction/data/storage/hdfs/package.scala  |   22 -
 .../data/storage/jdbc/JDBCAccessKeys.scala      |   84 --
 .../prediction/data/storage/jdbc/JDBCApps.scala |   86 --
 .../data/storage/jdbc/JDBCChannels.scala        |   66 -
 .../data/storage/jdbc/JDBCEngineInstances.scala |  194 ---
 .../data/storage/jdbc/JDBCEngineManifests.scala |  111 --
 .../storage/jdbc/JDBCEvaluationInstances.scala  |  162 ---
 .../data/storage/jdbc/JDBCLEvents.scala         |  241 ----
 .../data/storage/jdbc/JDBCModels.scala          |   52 -
 .../data/storage/jdbc/JDBCPEvents.scala         |  160 ---
 .../data/storage/jdbc/JDBCUtils.scala           |  103 --
 .../data/storage/jdbc/StorageClient.scala       |   50 -
 .../prediction/data/storage/jdbc/package.scala  |   23 -
 .../data/storage/localfs/LocalFSModels.scala    |   59 -
 .../data/storage/localfs/StorageClient.scala    |   43 -
 .../data/storage/localfs/package.scala          |   22 -
 .../io/prediction/data/storage/package.scala    |   26 -
 .../scala/io/prediction/data/store/Common.scala |   50 -
 .../io/prediction/data/store/LEventStore.scala  |  142 --
 .../io/prediction/data/store/PEventStore.scala  |  116 --
 .../data/store/java/LJavaEventStore.scala       |  142 --
 .../data/store/java/OptionHelper.scala          |   29 -
 .../data/store/java/PJavaEventStore.scala       |  109 --
 .../io/prediction/data/store/package.scala      |   21 -
 .../io/prediction/data/view/DataView.scala      |  110 --
 .../io/prediction/data/view/LBatchView.scala    |  200 ---
 .../io/prediction/data/view/PBatchView.scala    |  209 ---
 .../io/prediction/data/view/QuickTest.scala     |   94 --
 .../data/webhooks/ConnectorException.scala      |   31 -
 .../data/webhooks/ConnectorUtil.scala           |   46 -
 .../data/webhooks/FormConnector.scala           |   32 -
 .../data/webhooks/JsonConnector.scala           |   31 -
 .../exampleform/ExampleFormConnector.scala      |  123 --
 .../examplejson/ExampleJsonConnector.scala      |  153 ---
 .../webhooks/mailchimp/MailChimpConnector.scala |  305 -----
 .../webhooks/segmentio/SegmentIOConnector.scala |  306 -----
 .../org/apache/predictionio/data/Utils.scala    |   50 +
 .../apache/predictionio/data/api/Common.scala   |   80 ++
 .../predictionio/data/api/EventInfo.scala       |   24 +
 .../predictionio/data/api/EventServer.scala     |  640 +++++++++
 .../data/api/EventServerPlugin.scala            |   33 +
 .../data/api/EventServerPluginContext.scala     |   49 +
 .../predictionio/data/api/PluginsActor.scala    |   52 +
 .../apache/predictionio/data/api/Stats.scala    |   79 ++
 .../predictionio/data/api/StatsActor.scala      |   74 +
 .../apache/predictionio/data/api/Webhooks.scala |  151 +++
 .../data/api/WebhooksConnectors.scala           |   34 +
 .../org/apache/predictionio/data/package.scala  |   21 +
 .../predictionio/data/storage/AccessKeys.scala  |   71 +
 .../apache/predictionio/data/storage/Apps.scala |   58 +
 .../predictionio/data/storage/BiMap.scala       |  164 +++
 .../predictionio/data/storage/Channels.scala    |   79 ++
 .../predictionio/data/storage/DataMap.scala     |  241 ++++
 .../data/storage/DateTimeJson4sSupport.scala    |   47 +
 .../data/storage/EngineInstances.scala          |  177 +++
 .../data/storage/EngineManifests.scala          |  117 ++
 .../predictionio/data/storage/EntityMap.scala   |   98 ++
 .../data/storage/EvaluationInstances.scala      |  135 ++
 .../predictionio/data/storage/Event.scala       |  164 +++
 .../data/storage/EventJson4sSupport.scala       |  236 ++++
 .../data/storage/LEventAggregator.scala         |  145 ++
 .../predictionio/data/storage/LEvents.scala     |  489 +++++++
 .../predictionio/data/storage/Models.scala      |   80 ++
 .../data/storage/PEventAggregator.scala         |  209 +++
 .../predictionio/data/storage/PEvents.scala     |  182 +++
 .../predictionio/data/storage/PropertyMap.scala |   96 ++
 .../predictionio/data/storage/Storage.scala     |  403 ++++++
 .../predictionio/data/storage/Utils.scala       |   47 +
 .../storage/elasticsearch/ESAccessKeys.scala    |  116 ++
 .../data/storage/elasticsearch/ESApps.scala     |  127 ++
 .../data/storage/elasticsearch/ESChannels.scala |  114 ++
 .../elasticsearch/ESEngineInstances.scala       |  155 +++
 .../elasticsearch/ESEngineManifests.scala       |   81 ++
 .../elasticsearch/ESEvaluationInstances.scala   |  133 ++
 .../storage/elasticsearch/ESSequences.scala     |   61 +
 .../data/storage/elasticsearch/ESUtils.scala    |   45 +
 .../storage/elasticsearch/StorageClient.scala   |   47 +
 .../data/storage/elasticsearch/package.scala    |   22 +
 .../data/storage/hbase/HBEventsUtil.scala       |  412 ++++++
 .../data/storage/hbase/HBLEvents.scala          |  192 +++
 .../data/storage/hbase/HBPEvents.scala          |  112 ++
 .../data/storage/hbase/PIOHBaseUtil.scala       |   28 +
 .../data/storage/hbase/StorageClient.scala      |   83 ++
 .../data/storage/hbase/package.scala            |   22 +
 .../data/storage/hbase/upgrade/HB_0_8_0.scala   |  190 +++
 .../data/storage/hbase/upgrade/Upgrade.scala    |   72 +
 .../storage/hbase/upgrade/Upgrade_0_8_3.scala   |  221 +++
 .../data/storage/hdfs/HDFSModels.scala          |   60 +
 .../data/storage/hdfs/StorageClient.scala       |   33 +
 .../data/storage/hdfs/package.scala             |   22 +
 .../data/storage/jdbc/JDBCAccessKeys.scala      |   84 ++
 .../data/storage/jdbc/JDBCApps.scala            |   86 ++
 .../data/storage/jdbc/JDBCChannels.scala        |   66 +
 .../data/storage/jdbc/JDBCEngineInstances.scala |  194 +++
 .../data/storage/jdbc/JDBCEngineManifests.scala |  111 ++
 .../storage/jdbc/JDBCEvaluationInstances.scala  |  162 +++
 .../data/storage/jdbc/JDBCLEvents.scala         |  241 ++++
 .../data/storage/jdbc/JDBCModels.scala          |   52 +
 .../data/storage/jdbc/JDBCPEvents.scala         |  160 +++
 .../data/storage/jdbc/JDBCUtils.scala           |  103 ++
 .../data/storage/jdbc/StorageClient.scala       |   50 +
 .../data/storage/jdbc/package.scala             |   23 +
 .../data/storage/localfs/LocalFSModels.scala    |   59 +
 .../data/storage/localfs/StorageClient.scala    |   43 +
 .../data/storage/localfs/package.scala          |   22 +
 .../predictionio/data/storage/package.scala     |   26 +
 .../apache/predictionio/data/store/Common.scala |   50 +
 .../predictionio/data/store/LEventStore.scala   |  142 ++
 .../predictionio/data/store/PEventStore.scala   |  116 ++
 .../data/store/java/LJavaEventStore.scala       |  142 ++
 .../data/store/java/OptionHelper.scala          |   29 +
 .../data/store/java/PJavaEventStore.scala       |  109 ++
 .../predictionio/data/store/package.scala       |   21 +
 .../predictionio/data/view/DataView.scala       |  110 ++
 .../predictionio/data/view/LBatchView.scala     |  200 +++
 .../predictionio/data/view/PBatchView.scala     |  209 +++
 .../predictionio/data/view/QuickTest.scala      |   94 ++
 .../data/webhooks/ConnectorException.scala      |   31 +
 .../data/webhooks/ConnectorUtil.scala           |   46 +
 .../data/webhooks/FormConnector.scala           |   32 +
 .../data/webhooks/JsonConnector.scala           |   31 +
 .../exampleform/ExampleFormConnector.scala      |  123 ++
 .../examplejson/ExampleJsonConnector.scala      |  153 +++
 .../webhooks/mailchimp/MailChimpConnector.scala |  305 +++++
 .../webhooks/segmentio/SegmentIOConnector.scala |  306 +++++
 .../prediction/data/api/EventServiceSpec.scala  |   68 -
 .../prediction/data/api/SegmentIOAuthSpec.scala |  175 ---
 .../io/prediction/data/storage/BiMapSpec.scala  |  196 ---
 .../prediction/data/storage/DataMapSpec.scala   |  243 ----
 .../data/storage/LEventAggregatorSpec.scala     |  103 --
 .../prediction/data/storage/LEventsSpec.scala   |  245 ----
 .../data/storage/PEventAggregatorSpec.scala     |   72 -
 .../prediction/data/storage/PEventsSpec.scala   |  210 ---
 .../data/storage/StorageTestUtils.scala         |   42 -
 .../io/prediction/data/storage/TestEvents.scala |  263 ----
 .../data/webhooks/ConnectorTestUtil.scala       |   47 -
 .../exampleform/ExampleFormConnectorSpec.scala  |  164 ---
 .../examplejson/ExampleJsonConnectorSpec.scala  |  179 ---
 .../mailchimp/MailChimpConnectorSpec.scala      |  254 ----
 .../segmentio/SegmentIOConnectorSpec.scala      |  335 -----
 .../data/api/EventServiceSpec.scala             |   68 +
 .../data/api/SegmentIOAuthSpec.scala            |  175 +++
 .../predictionio/data/storage/BiMapSpec.scala   |  196 +++
 .../predictionio/data/storage/DataMapSpec.scala |  243 ++++
 .../data/storage/LEventAggregatorSpec.scala     |  103 ++
 .../predictionio/data/storage/LEventsSpec.scala |  245 ++++
 .../data/storage/PEventAggregatorSpec.scala     |   72 +
 .../predictionio/data/storage/PEventsSpec.scala |  210 +++
 .../data/storage/StorageTestUtils.scala         |   42 +
 .../predictionio/data/storage/TestEvents.scala  |  263 ++++
 .../data/webhooks/ConnectorTestUtil.scala       |   47 +
 .../exampleform/ExampleFormConnectorSpec.scala  |  164 +++
 .../examplejson/ExampleJsonConnectorSpec.scala  |  179 +++
 .../mailchimp/MailChimpConnectorSpec.scala      |  254 ++++
 .../segmentio/SegmentIOConnectorSpec.scala      |  335 +++++
 .../prediction/e2/engine/BinaryVectorizer.scala |   61 -
 .../e2/engine/CategoricalNaiveBayes.scala       |  176 ---
 .../io/prediction/e2/engine/MarkovChain.scala   |   89 --
 .../e2/evaluation/CrossValidation.scala         |   64 -
 .../main/scala/io/prediction/e2/package.scala   |   22 -
 e2/src/main/scala/io/prediction/package.scala   |   21 -
 .../e2/engine/BinaryVectorizer.scala            |   61 +
 .../e2/engine/CategoricalNaiveBayes.scala       |  176 +++
 .../predictionio/e2/engine/MarkovChain.scala    |   89 ++
 .../e2/evaluation/CrossValidation.scala         |   64 +
 .../org/apache/predictionio/e2/package.scala    |   22 +
 .../scala/org/apache/predictionio/package.scala |   21 +
 .../e2/engine/BinaryVectorizerTest.scala        |   56 -
 .../e2/engine/CategoricalNaiveBayesTest.scala   |  132 --
 .../prediction/e2/engine/MarkovChainTest.scala  |   49 -
 .../e2/evaluation/CrossValidationTest.scala     |  111 --
 .../e2/fixture/BinaryVectorizerFixture.scala    |   59 -
 .../e2/fixture/MarkovChainFixture.scala         |   39 -
 .../e2/fixture/NaiveBayesFixture.scala          |   48 -
 .../e2/fixture/SharedSparkContext.scala         |   51 -
 .../e2/engine/BinaryVectorizerTest.scala        |   56 +
 .../e2/engine/CategoricalNaiveBayesTest.scala   |  132 ++
 .../e2/engine/MarkovChainTest.scala             |   49 +
 .../e2/evaluation/CrossValidationTest.scala     |  111 ++
 .../e2/fixture/BinaryVectorizerFixture.scala    |   59 +
 .../e2/fixture/MarkovChainFixture.scala         |   39 +
 .../e2/fixture/NaiveBayesFixture.scala          |   48 +
 .../e2/fixture/SharedSparkContext.scala         |   51 +
 .../io/prediction/tools/RegisterEngine.scala    |   84 --
 .../scala/io/prediction/tools/RunServer.scala   |  178 ---
 .../scala/io/prediction/tools/RunWorkflow.scala |  212 ---
 .../main/scala/io/prediction/tools/Runner.scala |  211 ---
 .../io/prediction/tools/admin/AdminAPI.scala    |  156 ---
 .../prediction/tools/admin/CommandClient.scala  |  160 ---
 .../scala/io/prediction/tools/admin/README.md   |  161 ---
 .../io/prediction/tools/console/AccessKey.scala |   83 --
 .../scala/io/prediction/tools/console/App.scala |  537 --------
 .../io/prediction/tools/console/Console.scala   | 1277 ------------------
 .../io/prediction/tools/console/Export.scala    |   42 -
 .../io/prediction/tools/console/Import.scala    |   39 -
 .../io/prediction/tools/console/Template.scala  |  429 ------
 .../tools/dashboard/CorsSupport.scala           |   75 -
 .../prediction/tools/dashboard/Dashboard.scala  |  156 ---
 .../prediction/tools/export/EventsToFile.scala  |  104 --
 .../prediction/tools/imprt/FileToEvents.scala   |  103 --
 .../predictionio/tools/RegisterEngine.scala     |   84 ++
 .../apache/predictionio/tools/RunServer.scala   |  178 +++
 .../apache/predictionio/tools/RunWorkflow.scala |  212 +++
 .../org/apache/predictionio/tools/Runner.scala  |  211 +++
 .../predictionio/tools/admin/AdminAPI.scala     |  156 +++
 .../tools/admin/CommandClient.scala             |  160 +++
 .../apache/predictionio/tools/admin/README.md   |  161 +++
 .../predictionio/tools/console/AccessKey.scala  |   83 ++
 .../apache/predictionio/tools/console/App.scala |  537 ++++++++
 .../predictionio/tools/console/Console.scala    | 1277 ++++++++++++++++++
 .../predictionio/tools/console/Export.scala     |   42 +
 .../predictionio/tools/console/Import.scala     |   39 +
 .../predictionio/tools/console/Template.scala   |  429 ++++++
 .../tools/dashboard/CorsSupport.scala           |   75 +
 .../tools/dashboard/Dashboard.scala             |  156 +++
 .../tools/export/EventsToFile.scala             |  104 ++
 .../predictionio/tools/imprt/FileToEvents.scala |  103 ++
 .../tools/console/accesskey.scala.txt           |   20 -
 .../tools/console/adminserver.scala.txt         |    6 -
 .../io/prediction/tools/console/app.scala.txt   |   74 -
 .../io/prediction/tools/console/build.scala.txt |   11 -
 .../tools/console/dashboard.scala.txt           |    6 -
 .../prediction/tools/console/deploy.scala.txt   |   29 -
 .../io/prediction/tools/console/eval.scala.txt  |   10 -
 .../tools/console/eventserver.scala.txt         |    8 -
 .../prediction/tools/console/export.scala.txt   |   14 -
 .../io/prediction/tools/console/imprt.scala.txt |   12 -
 .../io/prediction/tools/console/main.scala.txt  |   52 -
 .../io/prediction/tools/console/run.scala.txt   |   17 -
 .../prediction/tools/console/status.scala.txt   |    3 -
 .../prediction/tools/console/template.scala.txt |   25 -
 .../io/prediction/tools/console/train.scala.txt |   28 -
 .../prediction/tools/console/upgrade.scala.txt  |   15 -
 .../prediction/tools/console/version.scala.txt  |    3 -
 .../prediction/tools/dashboard/index.scala.html |   99 --
 .../itemrank/params/algorithmsJson.scala.txt    |   16 -
 .../itemrank/params/datasourceJson.scala.txt    |   26 -
 .../itemrank/params/preparatorJson.scala.txt    |   10 -
 .../itemrank/params/servingJson.scala.txt       |    1 -
 .../itemrec/params/algorithmsJson.scala.txt     |   15 -
 .../itemrec/params/datasourceJson.scala.txt     |   26 -
 .../itemrec/params/preparatorJson.scala.txt     |   10 -
 .../itemrec/params/servingJson.scala.txt        |    1 -
 .../itemsim/params/algorithmsJson.scala.txt     |   13 -
 .../itemsim/params/datasourceJson.scala.txt     |   26 -
 .../itemsim/params/preparatorJson.scala.txt     |   10 -
 .../itemsim/params/servingJson.scala.txt        |    1 -
 .../tools/templates/scala/buildSbt.scala.txt    |   14 -
 .../tools/templates/scala/engineJson.scala.txt  |    9 -
 .../templates/scala/manifestJson.scala.txt      |    9 -
 .../scala/project/assemblySbt.scala.txt         |    1 -
 .../scala/src/main/scala/engine.scala.txt       |   76 --
 .../tools/console/accesskey.scala.txt           |   20 +
 .../tools/console/adminserver.scala.txt         |    6 +
 .../predictionio/tools/console/app.scala.txt    |   74 +
 .../predictionio/tools/console/build.scala.txt  |   11 +
 .../tools/console/dashboard.scala.txt           |    6 +
 .../predictionio/tools/console/deploy.scala.txt |   29 +
 .../predictionio/tools/console/eval.scala.txt   |   10 +
 .../tools/console/eventserver.scala.txt         |    8 +
 .../predictionio/tools/console/export.scala.txt |   14 +
 .../predictionio/tools/console/imprt.scala.txt  |   12 +
 .../predictionio/tools/console/main.scala.txt   |   52 +
 .../predictionio/tools/console/run.scala.txt    |   17 +
 .../predictionio/tools/console/status.scala.txt |    3 +
 .../tools/console/template.scala.txt            |   25 +
 .../predictionio/tools/console/train.scala.txt  |   28 +
 .../tools/console/upgrade.scala.txt             |   15 +
 .../tools/console/version.scala.txt             |    3 +
 .../tools/dashboard/index.scala.html            |   99 ++
 .../itemrank/params/algorithmsJson.scala.txt    |   16 +
 .../itemrank/params/datasourceJson.scala.txt    |   26 +
 .../itemrank/params/preparatorJson.scala.txt    |   10 +
 .../itemrank/params/servingJson.scala.txt       |    1 +
 .../itemrec/params/algorithmsJson.scala.txt     |   15 +
 .../itemrec/params/datasourceJson.scala.txt     |   26 +
 .../itemrec/params/preparatorJson.scala.txt     |   10 +
 .../itemrec/params/servingJson.scala.txt        |    1 +
 .../itemsim/params/algorithmsJson.scala.txt     |   13 +
 .../itemsim/params/datasourceJson.scala.txt     |   26 +
 .../itemsim/params/preparatorJson.scala.txt     |   10 +
 .../itemsim/params/servingJson.scala.txt        |    1 +
 .../tools/templates/scala/buildSbt.scala.txt    |   14 +
 .../tools/templates/scala/engineJson.scala.txt  |    9 +
 .../templates/scala/manifestJson.scala.txt      |    9 +
 .../scala/project/assemblySbt.scala.txt         |    1 +
 .../scala/src/main/scala/engine.scala.txt       |   76 ++
 .../prediction/tools/admin/AdminAPISpec.scala   |   66 -
 .../predictionio/tools/admin/AdminAPISpec.scala |   66 +
 506 files changed, 28276 insertions(+), 28276 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/common/src/main/scala/io/prediction/annotation/DeveloperApi.java
----------------------------------------------------------------------
diff --git a/common/src/main/scala/io/prediction/annotation/DeveloperApi.java b/common/src/main/scala/io/prediction/annotation/DeveloperApi.java
deleted file mode 100644
index 63536ef..0000000
--- a/common/src/main/scala/io/prediction/annotation/DeveloperApi.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.prediction.annotation;
-
-import java.lang.annotation.*;
-
-/**
- * A lower-level, unstable API intended for developers.
- *
- * Developer API's might change or be removed in minor versions of Spark.
- *
- * NOTE: If there exists a Scaladoc comment that immediately precedes this
- * annotation, the first line of the comment must be ":: DeveloperApi ::" with
- * no trailing blank line. This is because of the known issue that Scaladoc
- * displays only either the annotation or the comment, whichever comes first.
- */
-@Retention(RetentionPolicy.RUNTIME)
-@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD,
-        ElementType.PARAMETER, ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE,
-        ElementType.PACKAGE})
-public @interface DeveloperApi {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/common/src/main/scala/io/prediction/annotation/Experimental.java
----------------------------------------------------------------------
diff --git a/common/src/main/scala/io/prediction/annotation/Experimental.java b/common/src/main/scala/io/prediction/annotation/Experimental.java
deleted file mode 100644
index 86ec052..0000000
--- a/common/src/main/scala/io/prediction/annotation/Experimental.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.annotation;
-
-import java.lang.annotation.*;
-
-/**
- * An experimental user-facing API.
- *
- * Experimental API's might change or be removed, or be adopted as first-class
- * API's.
- *
- * NOTE: If there exists a Scaladoc comment that immediately precedes this
- * annotation, the first line of the comment must be ":: Experimental ::" with
- * no trailing blank line. This is because of the known issue that Scaladoc
- * displays only either the annotation or the comment, whichever comes first.
- */
-@Retention(RetentionPolicy.RUNTIME)
-@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD,
-  ElementType.PARAMETER, ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE,
-  ElementType.PACKAGE})
-public @interface Experimental {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/common/src/main/scala/io/prediction/authentication/KeyAuthentication.scala
----------------------------------------------------------------------
diff --git a/common/src/main/scala/io/prediction/authentication/KeyAuthentication.scala b/common/src/main/scala/io/prediction/authentication/KeyAuthentication.scala
deleted file mode 100644
index 752b5e1..0000000
--- a/common/src/main/scala/io/prediction/authentication/KeyAuthentication.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.authentication
-
-/**
-  * This is a (very) simple authentication for the dashboard and engine servers
-  * It is highly recommended to implement a stonger authentication mechanism
-  */
-
-import com.typesafe.config.ConfigFactory
-import spray.http.HttpRequest
-import spray.routing.authentication._
-import spray.routing.{AuthenticationFailedRejection, RequestContext}
-
-import scala.concurrent.ExecutionContext.Implicits.global
-import scala.concurrent.Future
-
-trait KeyAuthentication {
-
-  object ServerKey {
-    private val config = ConfigFactory.load("server.conf")
-    val get = config.getString("io.prediction.server.accessKey")
-    val param = "accessKey"
-  }
-
-  def withAccessKeyFromFile: RequestContext => Future[Authentication[HttpRequest]] = {
-    ctx: RequestContext =>
-      val accessKeyParamOpt = ctx.request.uri.query.get(ServerKey.param)
-      Future {
-
-        val passedKey = accessKeyParamOpt.getOrElse {
-          Left(AuthenticationFailedRejection(
-            AuthenticationFailedRejection.CredentialsRejected, List()))
-        }
-
-        if (passedKey.equals(ServerKey.get)) Right(ctx.request)
-        else Left(AuthenticationFailedRejection(
-          AuthenticationFailedRejection.CredentialsRejected, List()))
-
-      }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/common/src/main/scala/io/prediction/configuration/SSLConfiguration.scala
----------------------------------------------------------------------
diff --git a/common/src/main/scala/io/prediction/configuration/SSLConfiguration.scala b/common/src/main/scala/io/prediction/configuration/SSLConfiguration.scala
deleted file mode 100644
index f784130..0000000
--- a/common/src/main/scala/io/prediction/configuration/SSLConfiguration.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.configuration
-
-/**
-  * Created by ykhodorkovsky on 2/26/16.
-  */
-
-import java.io.FileInputStream
-import java.security.KeyStore
-import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory}
-
-import com.typesafe.config.ConfigFactory
-import spray.io.ServerSSLEngineProvider
-
-trait SSLConfiguration {
-
-  private val serverConfig = ConfigFactory.load("server.conf")
-
-  private val keyStoreResource =
-    serverConfig.getString("io.prediction.server.ssl-keystore-resource")
-  private val password = serverConfig.getString("io.prediction.server.ssl-keystore-pass")
-  private val keyAlias = serverConfig.getString("io.prediction.server.ssl-key-alias")
-
-  private val keyStore = {
-
-    // Loading keystore from specified file
-    val clientStore = KeyStore.getInstance("JKS")
-    val inputStream = new FileInputStream(
-      getClass().getClassLoader().getResource(keyStoreResource).getFile())
-    clientStore.load(inputStream, password.toCharArray)
-    inputStream.close()
-    clientStore
-  }
-
-  // Creating SSL context
-  implicit def sslContext: SSLContext = {
-    val context = SSLContext.getInstance("TLS")
-    val tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
-    val kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm)
-    kmf.init(keyStore, password.toCharArray)
-    tmf.init(keyStore)
-    context.init(kmf.getKeyManagers, tmf.getTrustManagers, null)
-    context
-  }
-
-  // provide implicit SSLEngine with some protocols
-  implicit def sslEngineProvider: ServerSSLEngineProvider = {
-    ServerSSLEngineProvider { engine =>
-      engine.setEnabledCipherSuites(Array(
-        "TLS_RSA_WITH_AES_256_CBC_SHA",
-        "TLS_ECDH_ECDSA_WITH_RC4_128_SHA",
-        "TLS_RSA_WITH_AES_128_CBC_SHA"))
-      engine.setEnabledProtocols(Array("TLSv1", "TLSv1.2", "TLSv1.1"))
-      engine
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/common/src/main/scala/org/apache/predictionio/annotation/DeveloperApi.java
----------------------------------------------------------------------
diff --git a/common/src/main/scala/org/apache/predictionio/annotation/DeveloperApi.java b/common/src/main/scala/org/apache/predictionio/annotation/DeveloperApi.java
new file mode 100644
index 0000000..d372184
--- /dev/null
+++ b/common/src/main/scala/org/apache/predictionio/annotation/DeveloperApi.java
@@ -0,0 +1,34 @@
+/** Copyright 2015 TappingStone, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.annotation;
+
+import java.lang.annotation.*;
+
+/**
+ * A lower-level, unstable API intended for developers.
+ *
+ * Developer API's might change or be removed in minor versions of Spark.
+ *
+ * NOTE: If there exists a Scaladoc comment that immediately precedes this
+ * annotation, the first line of the comment must be ":: DeveloperApi ::" with
+ * no trailing blank line. This is because of the known issue that Scaladoc
+ * displays only either the annotation or the comment, whichever comes first.
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD,
+        ElementType.PARAMETER, ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE,
+        ElementType.PACKAGE})
+public @interface DeveloperApi {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/common/src/main/scala/org/apache/predictionio/annotation/Experimental.java
----------------------------------------------------------------------
diff --git a/common/src/main/scala/org/apache/predictionio/annotation/Experimental.java b/common/src/main/scala/org/apache/predictionio/annotation/Experimental.java
new file mode 100644
index 0000000..a304505
--- /dev/null
+++ b/common/src/main/scala/org/apache/predictionio/annotation/Experimental.java
@@ -0,0 +1,35 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.annotation;
+
+import java.lang.annotation.*;
+
+/**
+ * An experimental user-facing API.
+ *
+ * Experimental API's might change or be removed, or be adopted as first-class
+ * API's.
+ *
+ * NOTE: If there exists a Scaladoc comment that immediately precedes this
+ * annotation, the first line of the comment must be ":: Experimental ::" with
+ * no trailing blank line. This is because of the known issue that Scaladoc
+ * displays only either the annotation or the comment, whichever comes first.
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD,
+  ElementType.PARAMETER, ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE,
+  ElementType.PACKAGE})
+public @interface Experimental {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/common/src/main/scala/org/apache/predictionio/authentication/KeyAuthentication.scala
----------------------------------------------------------------------
diff --git a/common/src/main/scala/org/apache/predictionio/authentication/KeyAuthentication.scala b/common/src/main/scala/org/apache/predictionio/authentication/KeyAuthentication.scala
new file mode 100644
index 0000000..0553952
--- /dev/null
+++ b/common/src/main/scala/org/apache/predictionio/authentication/KeyAuthentication.scala
@@ -0,0 +1,55 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.authentication
+
+/**
+  * This is a (very) simple authentication for the dashboard and engine servers
+  * It is highly recommended to implement a stonger authentication mechanism
+  */
+
+import com.typesafe.config.ConfigFactory
+import spray.http.HttpRequest
+import spray.routing.authentication._
+import spray.routing.{AuthenticationFailedRejection, RequestContext}
+
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.concurrent.Future
+
+trait KeyAuthentication {
+
+  object ServerKey {
+    private val config = ConfigFactory.load("server.conf")
+    val get = config.getString("io.prediction.server.accessKey")
+    val param = "accessKey"
+  }
+
+  def withAccessKeyFromFile: RequestContext => Future[Authentication[HttpRequest]] = {
+    ctx: RequestContext =>
+      val accessKeyParamOpt = ctx.request.uri.query.get(ServerKey.param)
+      Future {
+
+        val passedKey = accessKeyParamOpt.getOrElse {
+          Left(AuthenticationFailedRejection(
+            AuthenticationFailedRejection.CredentialsRejected, List()))
+        }
+
+        if (passedKey.equals(ServerKey.get)) Right(ctx.request)
+        else Left(AuthenticationFailedRejection(
+          AuthenticationFailedRejection.CredentialsRejected, List()))
+
+      }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/common/src/main/scala/org/apache/predictionio/configuration/SSLConfiguration.scala
----------------------------------------------------------------------
diff --git a/common/src/main/scala/org/apache/predictionio/configuration/SSLConfiguration.scala b/common/src/main/scala/org/apache/predictionio/configuration/SSLConfiguration.scala
new file mode 100644
index 0000000..2a9344d
--- /dev/null
+++ b/common/src/main/scala/org/apache/predictionio/configuration/SSLConfiguration.scala
@@ -0,0 +1,71 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.configuration
+
+/**
+  * Created by ykhodorkovsky on 2/26/16.
+  */
+
+import java.io.FileInputStream
+import java.security.KeyStore
+import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory}
+
+import com.typesafe.config.ConfigFactory
+import spray.io.ServerSSLEngineProvider
+
+trait SSLConfiguration {
+
+  private val serverConfig = ConfigFactory.load("server.conf")
+
+  private val keyStoreResource =
+    serverConfig.getString("io.prediction.server.ssl-keystore-resource")
+  private val password = serverConfig.getString("io.prediction.server.ssl-keystore-pass")
+  private val keyAlias = serverConfig.getString("io.prediction.server.ssl-key-alias")
+
+  private val keyStore = {
+
+    // Loading keystore from specified file
+    val clientStore = KeyStore.getInstance("JKS")
+    val inputStream = new FileInputStream(
+      getClass().getClassLoader().getResource(keyStoreResource).getFile())
+    clientStore.load(inputStream, password.toCharArray)
+    inputStream.close()
+    clientStore
+  }
+
+  // Creating SSL context
+  implicit def sslContext: SSLContext = {
+    val context = SSLContext.getInstance("TLS")
+    val tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
+    val kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm)
+    kmf.init(keyStore, password.toCharArray)
+    tmf.init(keyStore)
+    context.init(kmf.getKeyManagers, tmf.getTrustManagers, null)
+    context
+  }
+
+  // provide implicit SSLEngine with some protocols
+  implicit def sslEngineProvider: ServerSSLEngineProvider = {
+    ServerSSLEngineProvider { engine =>
+      engine.setEnabledCipherSuites(Array(
+        "TLS_RSA_WITH_AES_256_CBC_SHA",
+        "TLS_ECDH_ECDSA_WITH_RC4_128_SHA",
+        "TLS_RSA_WITH_AES_128_CBC_SHA"))
+      engine.setEnabledProtocols(Array("TLSv1", "TLSv1.2", "TLSv1.1"))
+      engine
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/CustomQuerySerializer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/CustomQuerySerializer.scala b/core/src/main/scala/io/prediction/controller/CustomQuerySerializer.scala
deleted file mode 100644
index 35ab6c3..0000000
--- a/core/src/main/scala/io/prediction/controller/CustomQuerySerializer.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseQuerySerializer
-
-/** If your query class cannot be automatically serialized/deserialized to/from
-  * JSON, implement a trait by extending this trait, and overriding the
-  * `querySerializer` member with your
-  * [[https://github.com/json4s/json4s#serializing-non-supported-types custom JSON4S serializer]].
-  * Algorithm and serving classes using your query class would only need to mix
-  * in the trait to enable the custom serializer.
-  *
-  * @group Helper
-  */
-trait CustomQuerySerializer extends BaseQuerySerializer
-
-/** DEPRECATED. Use [[CustomQuerySerializer]] instead.
-  *
-  * @group Helper
-  */
-@deprecated("Use CustomQuerySerializer instead.", "0.9.2")
-trait WithQuerySerializer extends CustomQuerySerializer
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/Deployment.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/Deployment.scala b/core/src/main/scala/io/prediction/controller/Deployment.scala
deleted file mode 100644
index 49e14d5..0000000
--- a/core/src/main/scala/io/prediction/controller/Deployment.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseEngine
-
-import scala.language.implicitConversions
-
-/** Defines a deployment that contains an [[Engine]]
-  *
-  * @group Engine
-  */
-trait Deployment extends EngineFactory {
-  protected[this] var _engine: BaseEngine[_, _, _, _] = _
-  protected[this] var engineSet: Boolean = false
-
-  /** Returns the [[Engine]] of this [[Deployment]] */
-  def apply(): BaseEngine[_, _, _, _] = {
-    assert(engineSet, "Engine not set")
-    _engine
-  }
-
-  /** Returns the [[Engine]] contained in this [[Deployment]]. */
-  private [prediction]
-  def engine: BaseEngine[_, _, _, _] = {
-    assert(engineSet, "Engine not set")
-    _engine
-  }
-
-  /** Sets the [[Engine]] for this [[Deployment]]
-    *
-    * @param engine An implementation of [[Engine]]
-    * @tparam EI Evaluation information class
-    * @tparam Q Query class
-    * @tparam P Predicted result class
-    * @tparam A Actual result class
-    */
-  def engine_=[EI, Q, P, A](engine: BaseEngine[EI, Q, P, A]) {
-    assert(!engineSet, "Engine can be set at most once")
-    _engine = engine
-    engineSet = true
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/Engine.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/Engine.scala b/core/src/main/scala/io/prediction/controller/Engine.scala
deleted file mode 100644
index 5cc2e31..0000000
--- a/core/src/main/scala/io/prediction/controller/Engine.scala
+++ /dev/null
@@ -1,829 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import grizzled.slf4j.Logger
-import io.prediction.core.BaseAlgorithm
-import io.prediction.core.BaseDataSource
-import io.prediction.core.BaseEngine
-import io.prediction.core.BasePreparator
-import io.prediction.core.BaseServing
-import io.prediction.core.Doer
-import io.prediction.data.storage.EngineInstance
-import io.prediction.data.storage.StorageClientException
-import io.prediction.workflow.CreateWorkflow
-import io.prediction.workflow.EngineLanguage
-import io.prediction.workflow.JsonExtractorOption.JsonExtractorOption
-import io.prediction.workflow.NameParamsSerializer
-import io.prediction.workflow.PersistentModelManifest
-import io.prediction.workflow.SparkWorkflowUtils
-import io.prediction.workflow.StopAfterPrepareInterruption
-import io.prediction.workflow.StopAfterReadInterruption
-import io.prediction.workflow.WorkflowParams
-import io.prediction.workflow.WorkflowUtils
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.read
-
-import scala.collection.JavaConversions
-import scala.language.implicitConversions
-
-/** This class chains up the entire data process. PredictionIO uses this
-  * information to create workflows and deployments. In Scala, you should
-  * implement an object that extends the [[EngineFactory]] trait similar to the
-  * following example.
-  *
-  * {{{
-  * object ItemRankEngine extends EngineFactory {
-  *   def apply() = {
-  *     new Engine(
-  *       classOf[ItemRankDataSource],
-  *       classOf[ItemRankPreparator],
-  *       Map(
-  *         "knn" -> classOf[KNNAlgorithm],
-  *         "rand" -> classOf[RandomAlgorithm],
-  *         "mahoutItemBased" -> classOf[MahoutItemBasedAlgorithm]),
-  *       classOf[ItemRankServing])
-  *   }
-  * }
-  * }}}
-  *
-  * @see [[EngineFactory]]
-  * @tparam TD Training data class.
-  * @tparam EI Evaluation info class.
-  * @tparam PD Prepared data class.
-  * @tparam Q Input query class.
-  * @tparam P Output prediction class.
-  * @tparam A Actual value class.
-  * @param dataSourceClassMap Map of data source names to class.
-  * @param preparatorClassMap Map of preparator names to class.
-  * @param algorithmClassMap Map of algorithm names to classes.
-  * @param servingClassMap Map of serving names to class.
-  * @group Engine
-  */
-class Engine[TD, EI, PD, Q, P, A](
-    val dataSourceClassMap: Map[String,
-      Class[_ <: BaseDataSource[TD, EI, Q, A]]],
-    val preparatorClassMap: Map[String, Class[_ <: BasePreparator[TD, PD]]],
-    val algorithmClassMap: Map[String, Class[_ <: BaseAlgorithm[PD, _, Q, P]]],
-    val servingClassMap: Map[String, Class[_ <: BaseServing[Q, P]]])
-  extends BaseEngine[EI, Q, P, A] {
-
-  private[prediction]
-  implicit lazy val formats = Utils.json4sDefaultFormats +
-    new NameParamsSerializer
-
-  @transient lazy protected val logger = Logger[this.type]
-
-  /** This auxiliary constructor is provided for backward compatibility.
-    *
-    * @param dataSourceClass Data source class.
-    * @param preparatorClass Preparator class.
-    * @param algorithmClassMap Map of algorithm names to classes.
-    * @param servingClass Serving class.
-    */
-  def this(
-    dataSourceClass: Class[_ <: BaseDataSource[TD, EI, Q, A]],
-    preparatorClass: Class[_ <: BasePreparator[TD, PD]],
-    algorithmClassMap: Map[String, Class[_ <: BaseAlgorithm[PD, _, Q, P]]],
-    servingClass: Class[_ <: BaseServing[Q, P]]) = this(
-      Map("" -> dataSourceClass),
-      Map("" -> preparatorClass),
-      algorithmClassMap,
-      Map("" -> servingClass)
-    )
-
-  /** Java-friendly constructor
-    *
-    * @param dataSourceClass Data source class.
-    * @param preparatorClass Preparator class.
-    * @param algorithmClassMap Map of algorithm names to classes.
-    * @param servingClass Serving class.
-    */
-  def this(dataSourceClass: Class[_ <: BaseDataSource[TD, EI, Q, A]],
-    preparatorClass: Class[_ <: BasePreparator[TD, PD]],
-    algorithmClassMap: _root_.java.util.Map[String, Class[_ <: BaseAlgorithm[PD, _, Q, P]]],
-    servingClass: Class[_ <: BaseServing[Q, P]]) = this(
-    Map("" -> dataSourceClass),
-    Map("" -> preparatorClass),
-    JavaConversions.mapAsScalaMap(algorithmClassMap).toMap,
-    Map("" -> servingClass)
-  )
-
-  /** Returns a new Engine instance, mimicking case class's copy method behavior.
-    */
-  def copy(
-    dataSourceClassMap: Map[String, Class[_ <: BaseDataSource[TD, EI, Q, A]]]
-      = dataSourceClassMap,
-    preparatorClassMap: Map[String, Class[_ <: BasePreparator[TD, PD]]]
-      = preparatorClassMap,
-    algorithmClassMap: Map[String, Class[_ <: BaseAlgorithm[PD, _, Q, P]]]
-      = algorithmClassMap,
-    servingClassMap: Map[String, Class[_ <: BaseServing[Q, P]]]
-      = servingClassMap): Engine[TD, EI, PD, Q, P, A] = {
-    new Engine(
-      dataSourceClassMap,
-      preparatorClassMap,
-      algorithmClassMap,
-      servingClassMap)
-  }
-
-  /** Training this engine would return a list of models.
-    *
-    * @param sc An instance of SparkContext.
-    * @param engineParams An instance of [[EngineParams]] for running a single training.
-    * @param params An instance of [[WorkflowParams]] that controls the workflow.
-    * @return A list of models.
-    */
-  def train(
-      sc: SparkContext,
-      engineParams: EngineParams,
-      engineInstanceId: String,
-      params: WorkflowParams): Seq[Any] = {
-    val (dataSourceName, dataSourceParams) = engineParams.dataSourceParams
-    val dataSource = Doer(dataSourceClassMap(dataSourceName), dataSourceParams)
-
-    val (preparatorName, preparatorParams) = engineParams.preparatorParams
-    val preparator = Doer(preparatorClassMap(preparatorName), preparatorParams)
-
-    val algoParamsList = engineParams.algorithmParamsList
-    require(
-      algoParamsList.size > 0,
-      "EngineParams.algorithmParamsList must have at least 1 element.")
-
-    val algorithms = algoParamsList.map { case (algoName, algoParams) =>
-      Doer(algorithmClassMap(algoName), algoParams)
-    }
-
-    val models = Engine.train(
-      sc, dataSource, preparator, algorithms, params)
-
-    val algoCount = algorithms.size
-    val algoTuples: Seq[(String, Params, BaseAlgorithm[_, _, _, _], Any)] =
-    (0 until algoCount).map { ax => {
-      // val (name, params) = algoParamsList(ax)
-      val (name, params) = algoParamsList(ax)
-      (name, params, algorithms(ax), models(ax))
-    }}
-
-    makeSerializableModels(
-      sc,
-      engineInstanceId = engineInstanceId,
-      algoTuples = algoTuples)
-  }
-
-  /** Algorithm models can be persisted before deploy. However, it is also
-    * possible that models are not persisted. This method retrains non-persisted
-    * models and return a list of models that can be used directly in deploy.
-    */
-  private[prediction]
-  def prepareDeploy(
-    sc: SparkContext,
-    engineParams: EngineParams,
-    engineInstanceId: String,
-    persistedModels: Seq[Any],
-    params: WorkflowParams): Seq[Any] = {
-
-    val algoParamsList = engineParams.algorithmParamsList
-    val algorithms = algoParamsList.map { case (algoName, algoParams) =>
-      Doer(algorithmClassMap(algoName), algoParams)
-    }
-
-    val models = if (persistedModels.exists(m => m.isInstanceOf[Unit.type])) {
-      // If any of persistedModels is Unit, we need to re-train the model.
-      logger.info("Some persisted models are Unit, need to re-train.")
-      val (dataSourceName, dataSourceParams) = engineParams.dataSourceParams
-      val dataSource = Doer(dataSourceClassMap(dataSourceName), dataSourceParams)
-
-      val (preparatorName, preparatorParams) = engineParams.preparatorParams
-      val preparator = Doer(preparatorClassMap(preparatorName), preparatorParams)
-
-      val td = dataSource.readTrainingBase(sc)
-      val pd = preparator.prepareBase(sc, td)
-
-      val models = algorithms.zip(persistedModels).map { case (algo, m) =>
-        m match {
-          case Unit => algo.trainBase(sc, pd)
-          case _ => m
-        }
-      }
-      models
-    } else {
-      logger.info("Using persisted model")
-      persistedModels
-    }
-
-    models
-    .zip(algorithms)
-    .zip(algoParamsList)
-    .zipWithIndex
-    .map {
-      case (((model, algo), (algoName, algoParams)), ax) => {
-        model match {
-          case modelManifest: PersistentModelManifest => {
-            logger.info("Custom-persisted model detected for algorithm " +
-              algo.getClass.getName)
-            SparkWorkflowUtils.getPersistentModel(
-              modelManifest,
-              Seq(engineInstanceId, ax, algoName).mkString("-"),
-              algoParams,
-              Some(sc),
-              getClass.getClassLoader)
-          }
-          case m => {
-            try {
-              logger.info(
-                s"Loaded model ${m.getClass.getName} for algorithm " +
-                s"${algo.getClass.getName}")
-              sc.stop
-              m
-            } catch {
-              case e: NullPointerException =>
-                logger.warn(
-                  s"Null model detected for algorithm ${algo.getClass.getName}")
-                m
-            }
-          }
-        }  // model match
-      }
-    }
-  }
-
-  /** Extract model for persistent layer.
-    *
-    * PredictionIO presist models for future use. It allows custom
-    * implementation for persisting models. You need to implement the
-    * [[io.prediction.controller.PersistentModel]] interface. This method
-    * traverses all models in the workflow. If the model is a
-    * [[io.prediction.controller.PersistentModel]], it calls the save method
-    * for custom persistence logic.
-    *
-    * For model doesn't support custom logic, PredictionIO serializes the whole
-    * model if the corresponding algorithm is local. On the other hand, if the
-    * model is parallel (i.e. model associated with a number of huge RDDS), this
-    * method return Unit, in which case PredictionIO will retrain the whole
-    * model from scratch next time it is used.
-    */
-  private def makeSerializableModels(
-    sc: SparkContext,
-    engineInstanceId: String,
-    // AlgoName, Algo, Model
-    algoTuples: Seq[(String, Params, BaseAlgorithm[_, _, _, _], Any)]
-  ): Seq[Any] = {
-
-    logger.info(s"engineInstanceId=$engineInstanceId")
-
-    algoTuples
-    .zipWithIndex
-    .map { case ((name, params, algo, model), ax) =>
-      algo.makePersistentModel(
-        sc = sc,
-        modelId = Seq(engineInstanceId, ax, name).mkString("-"),
-        algoParams = params,
-        bm = model)
-    }
-  }
-
-  /** This is implemented such that [[io.prediction.controller.Evaluation]] can
-    * use this method to generate inputs for [[io.prediction.controller.Metric]].
-    *
-    * @param sc An instance of SparkContext.
-    * @param engineParams An instance of [[EngineParams]] for running a single evaluation.
-    * @param params An instance of [[WorkflowParams]] that controls the workflow.
-    * @return A list of evaluation information and RDD of query, predicted
-    *         result, and actual result tuple tuple.
-    */
-  def eval(
-    sc: SparkContext,
-    engineParams: EngineParams,
-    params: WorkflowParams)
-  : Seq[(EI, RDD[(Q, P, A)])] = {
-    val (dataSourceName, dataSourceParams) = engineParams.dataSourceParams
-    val dataSource = Doer(dataSourceClassMap(dataSourceName), dataSourceParams)
-
-    val (preparatorName, preparatorParams) = engineParams.preparatorParams
-    val preparator = Doer(preparatorClassMap(preparatorName), preparatorParams)
-
-    val algoParamsList = engineParams.algorithmParamsList
-    require(
-      algoParamsList.size > 0,
-      "EngineParams.algorithmParamsList must have at least 1 element.")
-
-    val algorithms = algoParamsList.map { case (algoName, algoParams) => {
-      try {
-        Doer(algorithmClassMap(algoName), algoParams)
-      } catch {
-        case e: NoSuchElementException => {
-          if (algoName == "") {
-            logger.error("Empty algorithm name supplied but it could not " +
-              "match with any algorithm in the engine's definition. " +
-              "Existing algorithm name(s) are: " +
-              s"${algorithmClassMap.keys.mkString(", ")}. Aborting.")
-          } else {
-            logger.error(s"$algoName cannot be found in the engine's " +
-              "definition. Existing algorithm name(s) are: " +
-              s"${algorithmClassMap.keys.mkString(", ")}. Aborting.")
-          }
-          sys.exit(1)
-        }
-      }
-    }}
-
-    val (servingName, servingParams) = engineParams.servingParams
-    val serving = Doer(servingClassMap(servingName), servingParams)
-
-    Engine.eval(sc, dataSource, preparator, algorithms, serving)
-  }
-
-  override def jValueToEngineParams(
-    variantJson: JValue,
-    jsonExtractor: JsonExtractorOption): EngineParams = {
-
-    val engineLanguage = EngineLanguage.Scala
-    // Extract EngineParams
-    logger.info(s"Extracting datasource params...")
-    val dataSourceParams: (String, Params) =
-      WorkflowUtils.getParamsFromJsonByFieldAndClass(
-        variantJson,
-        "datasource",
-        dataSourceClassMap,
-        engineLanguage,
-        jsonExtractor)
-    logger.info(s"Datasource params: $dataSourceParams")
-
-    logger.info(s"Extracting preparator params...")
-    val preparatorParams: (String, Params) =
-      WorkflowUtils.getParamsFromJsonByFieldAndClass(
-        variantJson,
-        "preparator",
-        preparatorClassMap,
-        engineLanguage,
-        jsonExtractor)
-    logger.info(s"Preparator params: $preparatorParams")
-
-    val algorithmsParams: Seq[(String, Params)] =
-      variantJson findField {
-        case JField("algorithms", _) => true
-        case _ => false
-      } map { jv =>
-        val algorithmsParamsJson = jv._2
-        algorithmsParamsJson match {
-          case JArray(s) => s.map { algorithmParamsJValue =>
-            val eap = algorithmParamsJValue.extract[CreateWorkflow.AlgorithmParams]
-            (
-              eap.name,
-              WorkflowUtils.extractParams(
-                engineLanguage,
-                compact(render(eap.params)),
-                algorithmClassMap(eap.name),
-                jsonExtractor)
-            )
-          }
-          case _ => Nil
-        }
-      } getOrElse Seq(("", EmptyParams()))
-
-    logger.info(s"Extracting serving params...")
-    val servingParams: (String, Params) =
-      WorkflowUtils.getParamsFromJsonByFieldAndClass(
-        variantJson,
-        "serving",
-        servingClassMap,
-        engineLanguage,
-        jsonExtractor)
-    logger.info(s"Serving params: $servingParams")
-
-    new EngineParams(
-      dataSourceParams = dataSourceParams,
-      preparatorParams = preparatorParams,
-      algorithmParamsList = algorithmsParams,
-      servingParams = servingParams)
-  }
-
-  private[prediction] def engineInstanceToEngineParams(
-    engineInstance: EngineInstance,
-    jsonExtractor: JsonExtractorOption): EngineParams = {
-
-    implicit val formats = DefaultFormats
-    val engineLanguage = EngineLanguage.Scala
-
-    val dataSourceParamsWithName: (String, Params) = {
-      val (name, params) =
-        read[(String, JValue)](engineInstance.dataSourceParams)
-      if (!dataSourceClassMap.contains(name)) {
-        logger.error(s"Unable to find datasource class with name '$name'" +
-          " defined in Engine.")
-        sys.exit(1)
-      }
-      val extractedParams = WorkflowUtils.extractParams(
-        engineLanguage,
-        compact(render(params)),
-        dataSourceClassMap(name),
-        jsonExtractor)
-      (name, extractedParams)
-    }
-
-    val preparatorParamsWithName: (String, Params) = {
-      val (name, params) =
-        read[(String, JValue)](engineInstance.preparatorParams)
-      if (!preparatorClassMap.contains(name)) {
-        logger.error(s"Unable to find preparator class with name '$name'" +
-          " defined in Engine.")
-        sys.exit(1)
-      }
-      val extractedParams = WorkflowUtils.extractParams(
-        engineLanguage,
-        compact(render(params)),
-        preparatorClassMap(name),
-        jsonExtractor)
-      (name, extractedParams)
-    }
-
-    val algorithmsParamsWithNames =
-      read[Seq[(String, JValue)]](engineInstance.algorithmsParams).map {
-        case (algoName, params) =>
-          val extractedParams = WorkflowUtils.extractParams(
-            engineLanguage,
-            compact(render(params)),
-            algorithmClassMap(algoName),
-            jsonExtractor)
-          (algoName, extractedParams)
-      }
-
-    val servingParamsWithName: (String, Params) = {
-      val (name, params) = read[(String, JValue)](engineInstance.servingParams)
-      if (!servingClassMap.contains(name)) {
-        logger.error(s"Unable to find serving class with name '$name'" +
-          " defined in Engine.")
-        sys.exit(1)
-      }
-      val extractedParams = WorkflowUtils.extractParams(
-        engineLanguage,
-        compact(render(params)),
-        servingClassMap(name),
-        jsonExtractor)
-      (name, extractedParams)
-    }
-
-    new EngineParams(
-      dataSourceParams = dataSourceParamsWithName,
-      preparatorParams = preparatorParamsWithName,
-      algorithmParamsList = algorithmsParamsWithNames,
-      servingParams = servingParamsWithName)
-  }
-}
-
-/** This object contains concrete implementation for some methods of the
-  * [[Engine]] class.
-  *
-  * @group Engine
-  */
-object Engine {
-  private type EX = Int
-  private type AX = Int
-  private type QX = Long
-
-  @transient lazy private val logger = Logger[this.type]
-
-  /** Helper class to accept either a single data source, or a map of data
-    * sources, with a companion object providing implicit conversions, so
-    * using this class directly is not necessary.
-    *
-    * @tparam TD Training data class
-    * @tparam EI Evaluation information class
-    * @tparam Q Input query class
-    * @tparam A Actual result class
-    */
-  class DataSourceMap[TD, EI, Q, A](
-    val m: Map[String, Class[_ <: BaseDataSource[TD, EI, Q, A]]]) {
-    def this(c: Class[_ <: BaseDataSource[TD, EI, Q, A]]) = this(Map("" -> c))
-  }
-
-  /** Companion object providing implicit conversions, so using this directly
-    * is not necessary.
-    */
-  object DataSourceMap {
-    implicit def cToMap[TD, EI, Q, A](
-      c: Class[_ <: BaseDataSource[TD, EI, Q, A]]):
-      DataSourceMap[TD, EI, Q, A] = new DataSourceMap(c)
-    implicit def mToMap[TD, EI, Q, A](
-      m: Map[String, Class[_ <: BaseDataSource[TD, EI, Q, A]]]):
-      DataSourceMap[TD, EI, Q, A] = new DataSourceMap(m)
-  }
-
-  /** Helper class to accept either a single preparator, or a map of
-    * preparators, with a companion object providing implicit conversions, so
-    * using this class directly is not necessary.
-    *
-    * @tparam TD Training data class
-    * @tparam PD Prepared data class
-    */
-  class PreparatorMap[TD, PD](
-    val m: Map[String, Class[_ <: BasePreparator[TD, PD]]]) {
-    def this(c: Class[_ <: BasePreparator[TD, PD]]) = this(Map("" -> c))
-  }
-
-  /** Companion object providing implicit conversions, so using this directly
-    * is not necessary.
-    */
-  object PreparatorMap {
-    implicit def cToMap[TD, PD](
-      c: Class[_ <: BasePreparator[TD, PD]]):
-      PreparatorMap[TD, PD] = new PreparatorMap(c)
-    implicit def mToMap[TD, PD](
-      m: Map[String, Class[_ <: BasePreparator[TD, PD]]]):
-      PreparatorMap[TD, PD] = new PreparatorMap(m)
-  }
-
-  /** Helper class to accept either a single serving, or a map of serving, with
-    * a companion object providing implicit conversions, so using this class
-    * directly is not necessary.
-    *
-    * @tparam Q Input query class
-    * @tparam P Predicted result class
-    */
-  class ServingMap[Q, P](
-    val m: Map[String, Class[_ <: BaseServing[Q, P]]]) {
-    def this(c: Class[_ <: BaseServing[Q, P]]) = this(Map("" -> c))
-  }
-
-  /** Companion object providing implicit conversions, so using this directly
-    * is not necessary.
-    */
-  object ServingMap {
-    implicit def cToMap[Q, P](
-      c: Class[_ <: BaseServing[Q, P]]): ServingMap[Q, P] =
-        new ServingMap(c)
-    implicit def mToMap[Q, P](
-      m: Map[String, Class[_ <: BaseServing[Q, P]]]): ServingMap[Q, P] =
-        new ServingMap(m)
-  }
-
-  /** Convenient method for returning an instance of [[Engine]].
-    *
-    * @param dataSourceMap Accepts either an instance of Class of the data
-    *                      source, or a Map of data source classes (implicitly
-    *                      converted to [[DataSourceMap]].
-    * @param preparatorMap Accepts either an instance of Class of the
-    *                      preparator, or a Map of preparator classes
-    *                      (implicitly converted to [[PreparatorMap]].
-    * @param algorithmClassMap Accepts a Map of algorithm classes.
-    * @param servingMap Accepts either an instance of Class of the serving, or
-    *                   a Map of serving classes (implicitly converted to
-    *                   [[ServingMap]].
-    * @tparam TD Training data class
-    * @tparam EI Evaluation information class
-    * @tparam PD Prepared data class
-    * @tparam Q Input query class
-    * @tparam P Predicted result class
-    * @tparam A Actual result class
-    * @return An instance of [[Engine]]
-    */
-  def apply[TD, EI, PD, Q, P, A](
-    dataSourceMap: DataSourceMap[TD, EI, Q, A],
-    preparatorMap: PreparatorMap[TD, PD],
-    algorithmClassMap: Map[String, Class[_ <: BaseAlgorithm[PD, _, Q, P]]],
-    servingMap: ServingMap[Q, P]): Engine[TD, EI, PD, Q, P, A] = new Engine(
-      dataSourceMap.m,
-      preparatorMap.m,
-      algorithmClassMap,
-      servingMap.m
-    )
-
-  /** Provides concrete implementation of training for [[Engine]].
-    *
-    * @param sc An instance of SparkContext
-    * @param dataSource An instance of data source
-    * @param preparator An instance of preparator
-    * @param algorithmList A list of algorithm instances
-    * @param params An instance of [[WorkflowParams]] that controls the training
-    *               process.
-    * @tparam TD Training data class
-    * @tparam PD Prepared data class
-    * @tparam Q Input query class
-    * @return A list of trained models
-    */
-  def train[TD, PD, Q](
-      sc: SparkContext,
-      dataSource: BaseDataSource[TD, _, Q, _],
-      preparator: BasePreparator[TD, PD],
-      algorithmList: Seq[BaseAlgorithm[PD, _, Q, _]],
-      params: WorkflowParams
-    ): Seq[Any] = {
-    logger.info("EngineWorkflow.train")
-    logger.info(s"DataSource: $dataSource")
-    logger.info(s"Preparator: $preparator")
-    logger.info(s"AlgorithmList: $algorithmList")
-
-    if (params.skipSanityCheck) {
-      logger.info("Data sanity check is off.")
-    } else {
-      logger.info("Data sanity check is on.")
-    }
-
-    val td = try {
-      dataSource.readTrainingBase(sc)
-    } catch {
-      case e: StorageClientException =>
-        logger.error(s"Error occured reading from data source. (Reason: " +
-          e.getMessage + ") Please see the log for debugging details.", e)
-        sys.exit(1)
-    }
-
-    if (!params.skipSanityCheck) {
-      td match {
-        case sanityCheckable: SanityCheck => {
-          logger.info(s"${td.getClass.getName} supports data sanity" +
-            " check. Performing check.")
-          sanityCheckable.sanityCheck()
-        }
-        case _ => {
-          logger.info(s"${td.getClass.getName} does not support" +
-            " data sanity check. Skipping check.")
-        }
-      }
-    }
-
-    if (params.stopAfterRead) {
-      logger.info("Stopping here because --stop-after-read is set.")
-      throw StopAfterReadInterruption()
-    }
-
-    val pd = preparator.prepareBase(sc, td)
-
-    if (!params.skipSanityCheck) {
-      pd match {
-        case sanityCheckable: SanityCheck => {
-          logger.info(s"${pd.getClass.getName} supports data sanity" +
-            " check. Performing check.")
-          sanityCheckable.sanityCheck()
-        }
-        case _ => {
-          logger.info(s"${pd.getClass.getName} does not support" +
-            " data sanity check. Skipping check.")
-        }
-      }
-    }
-
-    if (params.stopAfterPrepare) {
-      logger.info("Stopping here because --stop-after-prepare is set.")
-      throw StopAfterPrepareInterruption()
-    }
-
-    val models: Seq[Any] = algorithmList.map(_.trainBase(sc, pd))
-
-    if (!params.skipSanityCheck) {
-      models.foreach { model => {
-        model match {
-          case sanityCheckable: SanityCheck => {
-            logger.info(s"${model.getClass.getName} supports data sanity" +
-              " check. Performing check.")
-            sanityCheckable.sanityCheck()
-          }
-          case _ => {
-            logger.info(s"${model.getClass.getName} does not support" +
-              " data sanity check. Skipping check.")
-          }
-        }
-      }}
-    }
-
-    logger.info("EngineWorkflow.train completed")
-    models
-  }
-
-  /** Provides concrete implementation of evaluation for [[Engine]].
-    *
-    * @param sc An instance of SparkContext
-    * @param dataSource An instance of data source
-    * @param preparator An instance of preparator
-    * @param algorithmList A list of algorithm instances
-    * @param serving An instance of serving
-    * @tparam TD Training data class
-    * @tparam PD Prepared data class
-    * @tparam Q Input query class
-    * @tparam P Predicted result class
-    * @tparam A Actual result class
-    * @tparam EI Evaluation information class
-    * @return A list of evaluation information, RDD of query, predicted result,
-    *         and actual result tuple tuple.
-    */
-  def eval[TD, PD, Q, P, A, EI](
-      sc: SparkContext,
-      dataSource: BaseDataSource[TD, EI, Q, A],
-      preparator: BasePreparator[TD, PD],
-      algorithmList: Seq[BaseAlgorithm[PD, _, Q, P]],
-      serving: BaseServing[Q, P]): Seq[(EI, RDD[(Q, P, A)])] = {
-    logger.info(s"DataSource: $dataSource")
-    logger.info(s"Preparator: $preparator")
-    logger.info(s"AlgorithmList: $algorithmList")
-    logger.info(s"Serving: $serving")
-
-    val algoMap: Map[AX, BaseAlgorithm[PD, _, Q, P]] = algorithmList
-      .zipWithIndex
-      .map(_.swap)
-      .toMap
-    val algoCount = algoMap.size
-
-    val evalTupleMap: Map[EX, (TD, EI, RDD[(Q, A)])] = dataSource
-      .readEvalBase(sc)
-      .zipWithIndex
-      .map(_.swap)
-      .toMap
-
-    val evalCount = evalTupleMap.size
-
-    val evalTrainMap: Map[EX, TD] = evalTupleMap.mapValues(_._1)
-    val evalInfoMap: Map[EX, EI] = evalTupleMap.mapValues(_._2)
-    val evalQAsMap: Map[EX, RDD[(QX, (Q, A))]] = evalTupleMap
-      .mapValues(_._3)
-      .mapValues{ _.zipWithUniqueId().map(_.swap) }
-
-    val preparedMap: Map[EX, PD] = evalTrainMap.mapValues { td => {
-      preparator.prepareBase(sc, td)
-    }}
-
-    val algoModelsMap: Map[EX, Map[AX, Any]] = preparedMap.mapValues { pd => {
-      algoMap.mapValues(_.trainBase(sc,pd))
-    }}
-
-    val suppQAsMap: Map[EX, RDD[(QX, (Q, A))]] = evalQAsMap.mapValues { qas =>
-      qas.map { case (qx, (q, a)) => (qx, (serving.supplementBase(q), a)) }
-    }
-
-    val algoPredictsMap: Map[EX, RDD[(QX, Seq[P])]] = (0 until evalCount)
-    .map { ex => {
-      val modelMap: Map[AX, Any] = algoModelsMap(ex)
-
-      val qs: RDD[(QX, Q)] = suppQAsMap(ex).mapValues(_._1)
-
-      val algoPredicts: Seq[RDD[(QX, (AX, P))]] = (0 until algoCount)
-      .map { ax => {
-        val algo = algoMap(ax)
-        val model = modelMap(ax)
-        val rawPredicts: RDD[(QX, P)] = algo.batchPredictBase(sc, model, qs)
-        val predicts: RDD[(QX, (AX, P))] = rawPredicts.map { case (qx, p) => {
-          (qx, (ax, p))
-        }}
-        predicts
-      }}
-
-      val unionAlgoPredicts: RDD[(QX, Seq[P])] = sc.union(algoPredicts)
-      .groupByKey()
-      .mapValues { ps => {
-        assert (ps.size == algoCount, "Must have same length as algoCount")
-        // TODO. Check size == algoCount
-        ps.toSeq.sortBy(_._1).map(_._2)
-      }}
-
-      (ex, unionAlgoPredicts)
-    }}
-    .toMap
-
-    val servingQPAMap: Map[EX, RDD[(Q, P, A)]] = algoPredictsMap
-    .map { case (ex, psMap) => {
-      // The query passed to serving.serve is the original one, not
-      // supplemented.
-      val qasMap: RDD[(QX, (Q, A))] = evalQAsMap(ex)
-      val qpsaMap: RDD[(QX, Q, Seq[P], A)] = psMap.join(qasMap)
-      .map { case (qx, t) => (qx, t._2._1, t._1, t._2._2) }
-
-      val qpaMap: RDD[(Q, P, A)] = qpsaMap.map {
-        case (qx, q, ps, a) => (q, serving.serveBase(q, ps), a)
-      }
-      (ex, qpaMap)
-    }}
-
-    (0 until evalCount).map { ex => {
-      (evalInfoMap(ex), servingQPAMap(ex))
-    }}
-    .toSeq
-  }
-}
-
-/** Mix in this trait for queries that contain prId (PredictedResultId).
-  * This is useful when your engine expects queries to also be associated with
-  * prId keys when feedback loop is enabled.
-  *
-  * @group Helper
-  */
-@deprecated("To be removed in future releases.", "0.9.2")
-trait WithPrId {
-  val prId: String = ""
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/EngineFactory.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/EngineFactory.scala b/core/src/main/scala/io/prediction/controller/EngineFactory.scala
deleted file mode 100644
index f6988c7..0000000
--- a/core/src/main/scala/io/prediction/controller/EngineFactory.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.core.BaseEngine
-
-import scala.language.implicitConversions
-
-/** If you intend to let PredictionIO create workflow and deploy serving
-  * automatically, you will need to implement an object that extends this class
-  * and return an [[Engine]].
-  *
-  * @group Engine
-  */
-abstract class EngineFactory {
-  /** Creates an instance of an [[Engine]]. */
-  def apply(): BaseEngine[_, _, _, _]
-
-  /** Override this method to programmatically return engine parameters. */
-  def engineParams(key: String): EngineParams = EngineParams()
-}
-
-/** DEPRECATED. Use [[EngineFactory]] instead.
-  *
-  * @group Engine
-  */
-@deprecated("Use EngineFactory instead.", "0.9.2")
-trait IEngineFactory extends EngineFactory


[13/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/store/LEventStore.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/store/LEventStore.scala b/data/src/main/scala/io/prediction/data/store/LEventStore.scala
deleted file mode 100644
index be543eb..0000000
--- a/data/src/main/scala/io/prediction/data/store/LEventStore.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.store
-
-import io.prediction.data.storage.Storage
-import io.prediction.data.storage.Event
-
-import org.joda.time.DateTime
-
-import scala.concurrent.Await
-import scala.concurrent.ExecutionContext.Implicits.global
-import scala.concurrent.duration.Duration
-
-/** This object provides a set of operation to access Event Store
-  * without going through Spark's parallelization
-  */
-object LEventStore {
-
-  private val defaultTimeout = Duration(60, "seconds")
-
-  @transient lazy private val eventsDb = Storage.getLEvents()
-
-  /** Reads events of the specified entity. May use this in Algorithm's predict()
-    * or Serving logic to have fast event store access.
-    *
-    * @param appName return events of this app
-    * @param entityType return events of this entityType
-    * @param entityId return events of this entityId
-    * @param channelName return events of this channel (default channel if it's None)
-    * @param eventNames return events with any of these event names.
-    * @param targetEntityType return events of this targetEntityType:
-    *   - None means no restriction on targetEntityType
-    *   - Some(None) means no targetEntityType for this event
-    *   - Some(Some(x)) means targetEntityType should match x.
-    * @param targetEntityId return events of this targetEntityId
-    *   - None means no restriction on targetEntityId
-    *   - Some(None) means no targetEntityId for this event
-    *   - Some(Some(x)) means targetEntityId should match x.
-    * @param startTime return events with eventTime >= startTime
-    * @param untilTime return events with eventTime < untilTime
-    * @param limit Limit number of events. Get all events if None or Some(-1)
-    * @param latest Return latest event first (default true)
-    * @return Iterator[Event]
-    */
-  def findByEntity(
-    appName: String,
-    entityType: String,
-    entityId: String,
-    channelName: Option[String] = None,
-    eventNames: Option[Seq[String]] = None,
-    targetEntityType: Option[Option[String]] = None,
-    targetEntityId: Option[Option[String]] = None,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    limit: Option[Int] = None,
-    latest: Boolean = true,
-    timeout: Duration = defaultTimeout): Iterator[Event] = {
-
-    val (appId, channelId) = Common.appNameToId(appName, channelName)
-
-    Await.result(eventsDb.futureFind(
-      appId = appId,
-      channelId = channelId,
-      startTime = startTime,
-      untilTime = untilTime,
-      entityType = Some(entityType),
-      entityId = Some(entityId),
-      eventNames = eventNames,
-      targetEntityType = targetEntityType,
-      targetEntityId = targetEntityId,
-      limit = limit,
-      reversed = Some(latest)),
-      timeout)
-  }
-
-  /** Reads events generically. If entityType or entityId is not specified, it
-    * results in table scan.
-    *
-    * @param appName return events of this app
-    * @param entityType return events of this entityType
-    *   - None means no restriction on entityType
-    *   - Some(x) means entityType should match x.
-    * @param entityId return events of this entityId
-    *   - None means no restriction on entityId
-    *   - Some(x) means entityId should match x.
-    * @param channelName return events of this channel (default channel if it's None)
-    * @param eventNames return events with any of these event names.
-    * @param targetEntityType return events of this targetEntityType:
-    *   - None means no restriction on targetEntityType
-    *   - Some(None) means no targetEntityType for this event
-    *   - Some(Some(x)) means targetEntityType should match x.
-    * @param targetEntityId return events of this targetEntityId
-    *   - None means no restriction on targetEntityId
-    *   - Some(None) means no targetEntityId for this event
-    *   - Some(Some(x)) means targetEntityId should match x.
-    * @param startTime return events with eventTime >= startTime
-    * @param untilTime return events with eventTime < untilTime
-    * @param limit Limit number of events. Get all events if None or Some(-1)
-    * @return Iterator[Event]
-    */
-  def find(
-    appName: String,
-    entityType: Option[String] = None,
-    entityId: Option[String] = None,
-    channelName: Option[String] = None,
-    eventNames: Option[Seq[String]] = None,
-    targetEntityType: Option[Option[String]] = None,
-    targetEntityId: Option[Option[String]] = None,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    limit: Option[Int] = None,
-    timeout: Duration = defaultTimeout): Iterator[Event] = {
-
-    val (appId, channelId) = Common.appNameToId(appName, channelName)
-
-    Await.result(eventsDb.futureFind(
-      appId = appId,
-      channelId = channelId,
-      startTime = startTime,
-      untilTime = untilTime,
-      entityType = entityType,
-      entityId = entityId,
-      eventNames = eventNames,
-      targetEntityType = targetEntityType,
-      targetEntityId = targetEntityId,
-      limit = limit), timeout)
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/store/PEventStore.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/store/PEventStore.scala b/data/src/main/scala/io/prediction/data/store/PEventStore.scala
deleted file mode 100644
index cd20da9..0000000
--- a/data/src/main/scala/io/prediction/data/store/PEventStore.scala
+++ /dev/null
@@ -1,116 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.store
-
-import io.prediction.data.storage.Storage
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.PropertyMap
-
-import org.joda.time.DateTime
-
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-
-/** This object provides a set of operation to access Event Store
-  * with Spark's parallelization
-  */
-object PEventStore {
-
-  @transient lazy private val eventsDb = Storage.getPEvents()
-
-  /** Read events from Event Store
-    *
-    * @param appName return events of this app
-    * @param channelName return events of this channel (default channel if it's None)
-    * @param startTime return events with eventTime >= startTime
-    * @param untilTime return events with eventTime < untilTime
-    * @param entityType return events of this entityType
-    * @param entityId return events of this entityId
-    * @param eventNames return events with any of these event names.
-    * @param targetEntityType return events of this targetEntityType:
-    *   - None means no restriction on targetEntityType
-    *   - Some(None) means no targetEntityType for this event
-    *   - Some(Some(x)) means targetEntityType should match x.
-    * @param targetEntityId return events of this targetEntityId
-    *   - None means no restriction on targetEntityId
-    *   - Some(None) means no targetEntityId for this event
-    *   - Some(Some(x)) means targetEntityId should match x.
-    * @param sc Spark context
-    * @return RDD[Event]
-    */
-  def find(
-    appName: String,
-    channelName: Option[String] = None,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    entityType: Option[String] = None,
-    entityId: Option[String] = None,
-    eventNames: Option[Seq[String]] = None,
-    targetEntityType: Option[Option[String]] = None,
-    targetEntityId: Option[Option[String]] = None
-  )(sc: SparkContext): RDD[Event] = {
-
-    val (appId, channelId) = Common.appNameToId(appName, channelName)
-
-    eventsDb.find(
-      appId = appId,
-      channelId = channelId,
-      startTime = startTime,
-      untilTime = untilTime,
-      entityType = entityType,
-      entityId = entityId,
-      eventNames = eventNames,
-      targetEntityType = targetEntityType,
-      targetEntityId = targetEntityId
-    )(sc)
-
-  }
-
-  /** Aggregate properties of entities based on these special events:
-    * \$set, \$unset, \$delete events.
-    *
-    * @param appName use events of this app
-    * @param entityType aggregate properties of the entities of this entityType
-    * @param channelName use events of this channel (default channel if it's None)
-    * @param startTime use events with eventTime >= startTime
-    * @param untilTime use events with eventTime < untilTime
-    * @param required only keep entities with these required properties defined
-    * @param sc Spark context
-    * @return RDD[(String, PropertyMap)] RDD of entityId and PropetyMap pair
-    */
-  def aggregateProperties(
-    appName: String,
-    entityType: String,
-    channelName: Option[String] = None,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    required: Option[Seq[String]] = None)
-    (sc: SparkContext): RDD[(String, PropertyMap)] = {
-
-      val (appId, channelId) = Common.appNameToId(appName, channelName)
-
-      eventsDb.aggregateProperties(
-        appId = appId,
-        entityType = entityType,
-        channelId = channelId,
-        startTime = startTime,
-        untilTime = untilTime,
-        required = required
-      )(sc)
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/store/java/LJavaEventStore.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/store/java/LJavaEventStore.scala b/data/src/main/scala/io/prediction/data/store/java/LJavaEventStore.scala
deleted file mode 100644
index d619f65..0000000
--- a/data/src/main/scala/io/prediction/data/store/java/LJavaEventStore.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.store.java
-
-import io.prediction.data.storage.Event
-import io.prediction.data.store.LEventStore
-import org.joda.time.DateTime
-
-import scala.collection.JavaConversions
-import scala.concurrent.duration.Duration
-
-/** This Java-friendly object provides a set of operation to access Event Store
-  * without going through Spark's parallelization
-  */
-object LJavaEventStore {
-
-  /** Reads events of the specified entity. May use this in Algorithm's predict()
-    * or Serving logic to have fast event store access.
-    *
-    * @param appName return events of this app
-    * @param entityType return events of this entityType
-    * @param entityId return events of this entityId
-    * @param channelName return events of this channel (default channel if it's None)
-    * @param eventNames return events with any of these event names.
-    * @param targetEntityType return events of this targetEntityType:
-    *   - None means no restriction on targetEntityType
-    *   - Some(None) means no targetEntityType for this event
-    *   - Some(Some(x)) means targetEntityType should match x.
-    * @param targetEntityId return events of this targetEntityId
-    *   - None means no restriction on targetEntityId
-    *   - Some(None) means no targetEntityId for this event
-    *   - Some(Some(x)) means targetEntityId should match x.
-    * @param startTime return events with eventTime >= startTime
-    * @param untilTime return events with eventTime < untilTime
-    * @param limit Limit number of events. Get all events if None or Some(-1)
-    * @param latest Return latest event first
-    * @return java.util.List[Event]
-    */
-  def findByEntity(
-    appName: String,
-    entityType: String,
-    entityId: String,
-    channelName: Option[String],
-    eventNames: Option[java.util.List[String]],
-    targetEntityType: Option[Option[String]],
-    targetEntityId: Option[Option[String]],
-    startTime: Option[DateTime],
-    untilTime: Option[DateTime],
-    limit: Option[Integer],
-    latest: Boolean,
-    timeout: Duration): java.util.List[Event] = {
-
-    val eventNamesSeq = eventNames.map(JavaConversions.asScalaBuffer(_).toSeq)
-    val limitInt = limit.map(_.intValue())
-
-    JavaConversions.seqAsJavaList(
-      LEventStore.findByEntity(
-        appName,
-        entityType,
-        entityId,
-        channelName,
-        eventNamesSeq,
-        targetEntityType,
-        targetEntityId,
-        startTime,
-        untilTime,
-        limitInt,
-        latest,
-        timeout
-      ).toSeq)
-  }
-
-  /** Reads events generically. If entityType or entityId is not specified, it
-    * results in table scan.
-    *
-    * @param appName return events of this app
-    * @param entityType return events of this entityType
-    *   - None means no restriction on entityType
-    *   - Some(x) means entityType should match x.
-    * @param entityId return events of this entityId
-    *   - None means no restriction on entityId
-    *   - Some(x) means entityId should match x.
-    * @param channelName return events of this channel (default channel if it's None)
-    * @param eventNames return events with any of these event names.
-    * @param targetEntityType return events of this targetEntityType:
-    *   - None means no restriction on targetEntityType
-    *   - Some(None) means no targetEntityType for this event
-    *   - Some(Some(x)) means targetEntityType should match x.
-    * @param targetEntityId return events of this targetEntityId
-    *   - None means no restriction on targetEntityId
-    *   - Some(None) means no targetEntityId for this event
-    *   - Some(Some(x)) means targetEntityId should match x.
-    * @param startTime return events with eventTime >= startTime
-    * @param untilTime return events with eventTime < untilTime
-    * @param limit Limit number of events. Get all events if None or Some(-1)
-    * @return java.util.List[Event]
-    */
-  def find(
-    appName: String,
-    entityType: Option[String],
-    entityId: Option[String],
-    channelName: Option[String],
-    eventNames: Option[java.util.List[String]],
-    targetEntityType: Option[Option[String]],
-    targetEntityId: Option[Option[String]],
-    startTime: Option[DateTime],
-    untilTime: Option[DateTime],
-    limit: Option[Integer],
-    timeout: Duration): java.util.List[Event] = {
-
-    val eventNamesSeq = eventNames.map(JavaConversions.asScalaBuffer(_).toSeq)
-    val limitInt = limit.map(_.intValue())
-
-    JavaConversions.seqAsJavaList(
-      LEventStore.find(
-        appName,
-        entityType,
-        entityId,
-        channelName,
-        eventNamesSeq,
-        targetEntityType,
-        targetEntityId,
-        startTime,
-        untilTime,
-        limitInt,
-        timeout
-      ).toSeq)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/store/java/OptionHelper.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/store/java/OptionHelper.scala b/data/src/main/scala/io/prediction/data/store/java/OptionHelper.scala
deleted file mode 100644
index dee608d..0000000
--- a/data/src/main/scala/io/prediction/data/store/java/OptionHelper.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.store.java
-
-/** Used by Java-based engines to mock Some and None */
-object OptionHelper {
-  /** Mimics a None from Java-based engine */
-  def none[T]: Option[T] = {
-    Option(null.asInstanceOf[T])
-  }
-
-  /** Mimics a Some from Java-based engine */
-  def some[T](value: T): Option[T] = {
-    Some(value)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/store/java/PJavaEventStore.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/store/java/PJavaEventStore.scala b/data/src/main/scala/io/prediction/data/store/java/PJavaEventStore.scala
deleted file mode 100644
index c0657d2..0000000
--- a/data/src/main/scala/io/prediction/data/store/java/PJavaEventStore.scala
+++ /dev/null
@@ -1,109 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.store.java
-
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.PropertyMap
-import io.prediction.data.store.PEventStore
-import org.apache.spark.SparkContext
-import org.apache.spark.api.java.JavaRDD
-import org.joda.time.DateTime
-
-import scala.collection.JavaConversions
-
-/** This Java-friendly object provides a set of operation to access Event Store
-  * with Spark's parallelization
-  */
-object PJavaEventStore {
-
-  /** Read events from Event Store
-    *
-    * @param appName return events of this app
-    * @param channelName return events of this channel (default channel if it's None)
-    * @param startTime return events with eventTime >= startTime
-    * @param untilTime return events with eventTime < untilTime
-    * @param entityType return events of this entityType
-    * @param entityId return events of this entityId
-    * @param eventNames return events with any of these event names.
-    * @param targetEntityType return events of this targetEntityType:
-    *   - None means no restriction on targetEntityType
-    *   - Some(None) means no targetEntityType for this event
-    *   - Some(Some(x)) means targetEntityType should match x.
-    * @param targetEntityId return events of this targetEntityId
-    *   - None means no restriction on targetEntityId
-    *   - Some(None) means no targetEntityId for this event
-    *   - Some(Some(x)) means targetEntityId should match x.
-    * @param sc Spark context
-    * @return JavaRDD[Event]
-    */
-  def find(
-    appName: String,
-    channelName: Option[String],
-    startTime: Option[DateTime],
-    untilTime: Option[DateTime],
-    entityType: Option[String],
-    entityId: Option[String],
-    eventNames: Option[java.util.List[String]],
-    targetEntityType: Option[Option[String]],
-    targetEntityId: Option[Option[String]],
-    sc: SparkContext): JavaRDD[Event] = {
-
-    val eventNamesSeq = eventNames.map(JavaConversions.asScalaBuffer(_).toSeq)
-
-    PEventStore.find(
-      appName,
-      channelName,
-      startTime,
-      untilTime,
-      entityType,
-      entityId,
-      eventNamesSeq,
-      targetEntityType,
-      targetEntityId
-    )(sc)
-  }
-
-  /** Aggregate properties of entities based on these special events:
-    * \$set, \$unset, \$delete events.
-    *
-    * @param appName use events of this app
-    * @param entityType aggregate properties of the entities of this entityType
-    * @param channelName use events of this channel (default channel if it's None)
-    * @param startTime use events with eventTime >= startTime
-    * @param untilTime use events with eventTime < untilTime
-    * @param required only keep entities with these required properties defined
-    * @param sc Spark context
-    * @return JavaRDD[(String, PropertyMap)] JavaRDD of entityId and PropetyMap pair
-    */
-  def aggregateProperties(
-    appName: String,
-    entityType: String,
-    channelName: Option[String],
-    startTime: Option[DateTime],
-    untilTime: Option[DateTime],
-    required: Option[java.util.List[String]],
-    sc: SparkContext): JavaRDD[(String, PropertyMap)] = {
-
-    PEventStore.aggregateProperties(
-      appName,
-    entityType,
-    channelName,
-    startTime,
-    untilTime
-    )(sc)
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/store/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/store/package.scala b/data/src/main/scala/io/prediction/data/store/package.scala
deleted file mode 100644
index 4856416..0000000
--- a/data/src/main/scala/io/prediction/data/store/package.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data
-
-/** Provides high level interfaces to the Event Store from within a prediction
-  * engine.
-  */
-package object store {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/view/DataView.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/view/DataView.scala b/data/src/main/scala/io/prediction/data/view/DataView.scala
deleted file mode 100644
index 52a67fd..0000000
--- a/data/src/main/scala/io/prediction/data/view/DataView.scala
+++ /dev/null
@@ -1,110 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.view
-
-import io.prediction.annotation.Experimental
-import io.prediction.data.storage.Event
-
-import grizzled.slf4j.Logger
-import io.prediction.data.store.PEventStore
-
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.DataFrame
-import org.apache.spark.sql.SQLContext
-import org.joda.time.DateTime
-
-import scala.reflect.ClassTag
-import scala.reflect.runtime.universe._
-import scala.util.hashing.MurmurHash3
-
-/**
- * :: Experimental ::
- */
-@Experimental
-object DataView {
-  /**
-    * :: Experimental ::
-    *
-    * Create a DataFrame from events of a specified app.
-    *
-    * @param appName return events of this app
-    * @param channelName use events of this channel (default channel if it's None)
-    * @param startTime return events with eventTime >= startTime
-    * @param untilTime return events with eventTime < untilTime
-    * @param conversionFunction a function that turns raw Events into events of interest.
-    *                           If conversionFunction returns None, such events are dropped.
-    * @param name identify the DataFrame created
-    * @param version used to track changes to the conversionFunction, e.g. version = "20150413"
-    *                and update whenever the function is changed.
-    * @param sqlContext SQL context
-    * @tparam E the output type of the conversion function. The type needs to extend Product
-    *           (e.g. case class)
-    * @return a DataFrame of events
-    */
-  @Experimental
-  def create[E <: Product: TypeTag: ClassTag](
-    appName: String,
-    channelName: Option[String] = None,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    conversionFunction: Event => Option[E],
-    name: String = "",
-    version: String = "")(sqlContext: SQLContext): DataFrame = {
-
-    @transient lazy val logger = Logger[this.type]
-
-    val sc = sqlContext.sparkContext
-
-    val beginTime = startTime match {
-      case Some(t) => t
-      case None => new DateTime(0L)
-    }
-    val endTime = untilTime match {
-      case Some(t) => t
-      case None => DateTime.now() // fix the current time
-    }
-    // detect changes to the case class
-    val uid = java.io.ObjectStreamClass.lookup(implicitly[reflect.ClassTag[E]].runtimeClass)
-        .getSerialVersionUID
-    val hash = MurmurHash3.stringHash(s"$beginTime-$endTime-$version-$uid")
-    val baseDir = s"${sys.env("PIO_FS_BASEDIR")}/view"
-    val fileName = s"$baseDir/$name-$appName-$hash.parquet"
-    try {
-      sqlContext.parquetFile(fileName)
-    } catch {
-      case e: java.io.FileNotFoundException =>
-        logger.info("Cached copy not found, reading from DB.")
-        // if cached copy is found, use it. If not, grab from Storage
-        val result: RDD[E] = PEventStore.find(
-            appName = appName,
-            channelName = channelName,
-            startTime = startTime,
-            untilTime = Some(endTime))(sc)
-          .flatMap((e) => conversionFunction(e))
-        import sqlContext.implicits._ // needed for RDD.toDF()
-        val resultDF = result.toDF()
-
-        resultDF.saveAsParquetFile(fileName)
-        sqlContext.parquetFile(fileName)
-      case e: java.lang.RuntimeException =>
-        if (e.toString.contains("is not a Parquet file")) {
-          logger.error(s"$fileName does not contain a valid Parquet file. " +
-            "Please delete it and try again.")
-        }
-        throw e
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/view/LBatchView.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/view/LBatchView.scala b/data/src/main/scala/io/prediction/data/view/LBatchView.scala
deleted file mode 100644
index f806056..0000000
--- a/data/src/main/scala/io/prediction/data/view/LBatchView.scala
+++ /dev/null
@@ -1,200 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.view
-
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.EventValidation
-import io.prediction.data.storage.DataMap
-import io.prediction.data.storage.Storage
-
-import org.joda.time.DateTime
-import scala.language.implicitConversions
-
-import scala.concurrent.ExecutionContext.Implicits.global // TODO
-
-@deprecated("Use LEvents or LEventStore instead.", "0.9.2")
-object ViewPredicates {
-  def getStartTimePredicate(startTimeOpt: Option[DateTime])
-  : (Event => Boolean) = {
-    startTimeOpt.map(getStartTimePredicate).getOrElse(_ => true)
-  }
-
-  def getStartTimePredicate(startTime: DateTime): (Event => Boolean) = {
-    e => (!(e.eventTime.isBefore(startTime) || e.eventTime.isEqual(startTime)))
-  }
-
-  def getUntilTimePredicate(untilTimeOpt: Option[DateTime])
-  : (Event => Boolean) = {
-    untilTimeOpt.map(getUntilTimePredicate).getOrElse(_ => true)
-  }
-
-  def getUntilTimePredicate(untilTime: DateTime): (Event => Boolean) = {
-    _.eventTime.isBefore(untilTime)
-  }
-
-  def getEntityTypePredicate(entityTypeOpt: Option[String]): (Event => Boolean)
-  = {
-    entityTypeOpt.map(getEntityTypePredicate).getOrElse(_ => true)
-  }
-
-  def getEntityTypePredicate(entityType: String): (Event => Boolean) = {
-    (_.entityType == entityType)
-  }
-
-  def getEventPredicate(eventOpt: Option[String]): (Event => Boolean)
-  = {
-    eventOpt.map(getEventPredicate).getOrElse(_ => true)
-  }
-
-  def getEventPredicate(event: String): (Event => Boolean) = {
-    (_.event == event)
-  }
-}
-
-@deprecated("Use LEvents instead.", "0.9.2")
-object ViewAggregators {
-  def getDataMapAggregator(): ((Option[DataMap], Event) => Option[DataMap]) = {
-    (p, e) => {
-      e.event match {
-        case "$set" => {
-          if (p == None) {
-            Some(e.properties)
-          } else {
-            p.map(_ ++ e.properties)
-          }
-        }
-        case "$unset" => {
-          if (p == None) {
-            None
-          } else {
-            p.map(_ -- e.properties.keySet)
-          }
-        }
-        case "$delete" => None
-        case _ => p // do nothing for others
-      }
-    }
-  }
-}
-
-@deprecated("Use LEvents instead.", "0.9.2")
-object EventSeq {
-  // Need to
-  // >>> import scala.language.implicitConversions
-  // to enable implicit conversion. Only import in the code where this is
-  // necessary to avoid confusion.
-  implicit def eventSeqToList(es: EventSeq): List[Event] = es.events
-  implicit def listToEventSeq(l: List[Event]): EventSeq = new EventSeq(l)
-}
-
-
-@deprecated("Use LEvents instead.", "0.9.2")
-class EventSeq(val events: List[Event]) {
-  def filter(
-    eventOpt: Option[String] = None,
-    entityTypeOpt: Option[String] = None,
-    startTimeOpt: Option[DateTime] = None,
-    untilTimeOpt: Option[DateTime] = None): EventSeq = {
-
-    events
-    .filter(ViewPredicates.getEventPredicate(eventOpt))
-    .filter(ViewPredicates.getStartTimePredicate(startTimeOpt))
-    .filter(ViewPredicates.getUntilTimePredicate(untilTimeOpt))
-    .filter(ViewPredicates.getEntityTypePredicate(entityTypeOpt))
-  }
-
-  def filter(p: (Event => Boolean)): EventSeq = events.filter(p)
-
-  def aggregateByEntityOrdered[T](init: T, op: (T, Event) => T)
-  : Map[String, T] = {
-    events
-    .groupBy( _.entityId )
-    .mapValues( _.sortBy(_.eventTime.getMillis).foldLeft[T](init)(op))
-    .toMap
-  }
-
-
-}
-
-
-@deprecated("Use LEventStore instead.", "0.9.2")
-class LBatchView(
-  val appId: Int,
-  val startTime: Option[DateTime],
-  val untilTime: Option[DateTime]) {
-
-  @transient lazy val eventsDb = Storage.getLEvents()
-
-  @transient lazy val _events = eventsDb.find(
-    appId = appId,
-    startTime = startTime,
-    untilTime = untilTime).toList
-
-  @transient lazy val events: EventSeq = new EventSeq(_events)
-
-  /* Aggregate event data
-   *
-   * @param entityType only aggregate event with entityType
-   * @param startTimeOpt if specified, only aggregate event after (inclusive)
-   * startTimeOpt
-   * @param untilTimeOpt if specified, only aggregate event until (exclusive)
-   * endTimeOpt
-   */
-  def aggregateProperties(
-      entityType: String,
-      startTimeOpt: Option[DateTime] = None,
-      untilTimeOpt: Option[DateTime] = None
-      ): Map[String, DataMap] = {
-
-    events
-    .filter(entityTypeOpt = Some(entityType))
-    .filter(e => EventValidation.isSpecialEvents(e.event))
-    .aggregateByEntityOrdered(
-      init = None,
-      op = ViewAggregators.getDataMapAggregator())
-    .filter{ case (k, v) => (v != None) }
-    .mapValues(_.get)
-
-  }
-
-  /*
-  def aggregateByEntityOrdered[T](
-    predicate: Event => Boolean,
-    init: T,
-    op: (T, Event) => T): Map[String, T] = {
-
-    _events
-      .filter( predicate(_) )
-      .groupBy( _.entityId )
-      .mapValues( _.sortBy(_.eventTime.getMillis).foldLeft[T](init)(op))
-      .toMap
-
-  }
-  */
-
-  /*
-  def groupByEntityOrdered[T](
-    predicate: Event => Boolean,
-    map: Event => T): Map[String, Seq[T]] = {
-
-    _events
-      .filter( predicate(_) )
-      .groupBy( _.entityId )
-      .mapValues( _.sortBy(_.eventTime.getMillis).map(map(_)) )
-      .toMap
-  }
-  */
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/view/PBatchView.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/view/PBatchView.scala b/data/src/main/scala/io/prediction/data/view/PBatchView.scala
deleted file mode 100644
index 5b0f878..0000000
--- a/data/src/main/scala/io/prediction/data/view/PBatchView.scala
+++ /dev/null
@@ -1,209 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.view
-
-import io.prediction.data.storage.hbase.HBPEvents
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.EventValidation
-import io.prediction.data.storage.DataMap
-import io.prediction.data.storage.Storage
-
-import org.joda.time.DateTime
-
-import org.json4s.JValue
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-
-// each JValue data associated with the time it is set
-private[prediction] case class PropTime(val d: JValue, val t: Long) extends Serializable
-
-private[prediction] case class SetProp (
-  val fields: Map[String, PropTime],
-  // last set time. Note: fields could be empty with valid set time
-  val t: Long) extends Serializable {
-
-  def ++ (that: SetProp): SetProp = {
-    val commonKeys = fields.keySet.intersect(that.fields.keySet)
-
-    val common: Map[String, PropTime] = commonKeys.map { k =>
-      val thisData = this.fields(k)
-      val thatData = that.fields(k)
-      // only keep the value with latest time
-      val v = if (thisData.t > thatData.t) thisData else thatData
-      (k, v)
-    }.toMap
-
-    val combinedFields = common ++
-      (this.fields -- commonKeys) ++ (that.fields -- commonKeys)
-
-    // keep the latest set time
-    val combinedT = if (this.t > that.t) this.t else that.t
-
-    SetProp(
-      fields = combinedFields,
-      t = combinedT
-    )
-  }
-}
-
-private[prediction] case class UnsetProp (fields: Map[String, Long]) extends Serializable {
-  def ++ (that: UnsetProp): UnsetProp = {
-    val commonKeys = fields.keySet.intersect(that.fields.keySet)
-
-    val common: Map[String, Long] = commonKeys.map { k =>
-      val thisData = this.fields(k)
-      val thatData = that.fields(k)
-      // only keep the value with latest time
-      val v = if (thisData > thatData) thisData else thatData
-      (k, v)
-    }.toMap
-
-    val combinedFields = common ++
-      (this.fields -- commonKeys) ++ (that.fields -- commonKeys)
-
-    UnsetProp(
-      fields = combinedFields
-    )
-  }
-}
-
-private[prediction] case class DeleteEntity (t: Long) extends Serializable {
-  def ++ (that: DeleteEntity): DeleteEntity = {
-    if (this.t > that.t) this else that
-  }
-}
-
-private[prediction] case class EventOp (
-  val setProp: Option[SetProp] = None,
-  val unsetProp: Option[UnsetProp] = None,
-  val deleteEntity: Option[DeleteEntity] = None
-) extends Serializable {
-
-  def ++ (that: EventOp): EventOp = {
-    EventOp(
-      setProp = (setProp ++ that.setProp).reduceOption(_ ++ _),
-      unsetProp = (unsetProp ++ that.unsetProp).reduceOption(_ ++ _),
-      deleteEntity = (deleteEntity ++ that.deleteEntity).reduceOption(_ ++ _)
-    )
-  }
-
-  def toDataMap(): Option[DataMap] = {
-    setProp.flatMap { set =>
-
-      val unsetKeys: Set[String] = unsetProp.map( unset =>
-        unset.fields.filter{ case (k, v) => (v >= set.fields(k).t) }.keySet
-      ).getOrElse(Set())
-
-      val combinedFields = deleteEntity.map { delete =>
-        if (delete.t >= set.t) {
-          None
-        } else {
-          val deleteKeys: Set[String] = set.fields
-            .filter { case (k, PropTime(kv, t)) =>
-              (delete.t >= t)
-            }.keySet
-          Some(set.fields -- unsetKeys -- deleteKeys)
-        }
-      }.getOrElse{
-        Some(set.fields -- unsetKeys)
-      }
-
-      // Note: mapValues() doesn't return concrete Map and causes
-      // NotSerializableException issue. Use map(identity) to work around this.
-      // see https://issues.scala-lang.org/browse/SI-7005
-      combinedFields.map(f => DataMap(f.mapValues(_.d).map(identity)))
-    }
-  }
-
-}
-
-private[prediction] object EventOp {
-  def apply(e: Event): EventOp = {
-    val t = e.eventTime.getMillis
-    e.event match {
-      case "$set" => {
-        val fields = e.properties.fields.mapValues(jv =>
-          PropTime(jv, t)
-        ).map(identity)
-
-        EventOp(
-          setProp = Some(SetProp(fields = fields, t = t))
-        )
-      }
-      case "$unset" => {
-        val fields = e.properties.fields.mapValues(jv => t).map(identity)
-        EventOp(
-          unsetProp = Some(UnsetProp(fields = fields))
-        )
-      }
-      case "$delete" => {
-        EventOp(
-          deleteEntity = Some(DeleteEntity(t))
-        )
-      }
-      case _ => {
-        EventOp()
-      }
-    }
-  }
-}
-
-@deprecated("Use PEvents or PEventStore instead.", "0.9.2")
-class PBatchView(
-  val appId: Int,
-  val startTime: Option[DateTime],
-  val untilTime: Option[DateTime],
-  val sc: SparkContext) {
-
-  // NOTE: parallel Events DB interface
-  @transient lazy val eventsDb = Storage.getPEvents()
-
-  @transient lazy val _events: RDD[Event] =
-    eventsDb.getByAppIdAndTimeAndEntity(
-      appId = appId,
-      startTime = startTime,
-      untilTime = untilTime,
-      entityType = None,
-      entityId = None)(sc)
-
-  // TODO: change to use EventSeq?
-  @transient lazy val events: RDD[Event] = _events
-
-  def aggregateProperties(
-    entityType: String,
-    startTimeOpt: Option[DateTime] = None,
-    untilTimeOpt: Option[DateTime] = None
-  ): RDD[(String, DataMap)] = {
-
-    _events
-      .filter( e => ((e.entityType == entityType) &&
-        (EventValidation.isSpecialEvents(e.event))) )
-      .map( e => (e.entityId, EventOp(e) ))
-      .aggregateByKey[EventOp](EventOp())(
-        // within same partition
-        seqOp = { case (u, v) => u ++ v },
-        // across partition
-        combOp = { case (accu, u) => accu ++ u }
-      )
-      .mapValues(_.toDataMap)
-      .filter{ case (k, v) => v.isDefined }
-      .map{ case (k, v) => (k, v.get) }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/view/QuickTest.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/view/QuickTest.scala b/data/src/main/scala/io/prediction/data/view/QuickTest.scala
deleted file mode 100644
index 68ade1d..0000000
--- a/data/src/main/scala/io/prediction/data/view/QuickTest.scala
+++ /dev/null
@@ -1,94 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.view
-
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.LEvents
-import io.prediction.data.storage.EventValidation
-import io.prediction.data.storage.DataMap
-import io.prediction.data.storage.Storage
-
-import scala.concurrent.ExecutionContext.Implicits.global // TODO
-
-import grizzled.slf4j.Logger
-import org.joda.time.DateTime
-
-import scala.language.implicitConversions
-
-class TestHBLEvents() {
-  @transient lazy val eventsDb = Storage.getLEvents()
-
-  def run(): Unit = {
-    val r = eventsDb.find(
-      appId = 1,
-      startTime = None,
-      untilTime = None,
-      entityType = Some("pio_user"),
-      entityId = Some("3")).toList
-    println(r)
-  }
-}
-
-class TestSource(val appId: Int) {
-  @transient lazy val logger = Logger[this.type]
-  @transient lazy val batchView = new LBatchView(appId,
-    None, None)
-
-  def run(): Unit = {
-    println(batchView.events)
-  }
-}
-
-object QuickTest {
-
-  def main(args: Array[String]) {
-    val t = new TestHBLEvents()
-    t.run()
-
-    // val ts = new TestSource(args(0).toInt)
-    // ts.run()
-  }
-}
-
-object TestEventTime {
-  @transient lazy val batchView = new LBatchView(9, None, None)
-
-  // implicit def back2list(es: EventSeq) = es.events
-
-  def main(args: Array[String]) {
-    val e = batchView.events.filter(
-      eventOpt = Some("rate"),
-      startTimeOpt = Some(new DateTime(1998, 1, 1, 0, 0))
-      // untilTimeOpt = Some(new DateTime(1997, 1, 1, 0, 0))
-    )
-      // untilTimeOpt = Some(new DateTime(2000, 1, 1, 0, 0)))
-
-    e.foreach { println }
-    println()
-    println()
-    println()
-    val u = batchView.aggregateProperties("pio_item")
-    u.foreach { println }
-    println()
-    println()
-    println()
-
-    // val l: Seq[Event] = e
-    val l = e.map { _.entityId }
-    l.foreach { println }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/webhooks/ConnectorException.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/webhooks/ConnectorException.scala b/data/src/main/scala/io/prediction/data/webhooks/ConnectorException.scala
deleted file mode 100644
index 0b64afb..0000000
--- a/data/src/main/scala/io/prediction/data/webhooks/ConnectorException.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.webhooks
-
-/** Webhooks Connnector Exception
-  *
-  * @param message the detail message
-  * @param cause the cause
-  */
-private[prediction] class ConnectorException(message: String, cause: Throwable)
-  extends Exception(message, cause) {
-
-  /** Webhooks Connnector Exception with cause being set to null
-    *
-    * @param message the detail message
-    */
-  def this(message: String) = this(message, null)
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/webhooks/ConnectorUtil.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/webhooks/ConnectorUtil.scala b/data/src/main/scala/io/prediction/data/webhooks/ConnectorUtil.scala
deleted file mode 100644
index 424b6ba..0000000
--- a/data/src/main/scala/io/prediction/data/webhooks/ConnectorUtil.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.webhooks
-
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.EventJson4sSupport
-
-import org.json4s.Formats
-import org.json4s.DefaultFormats
-import org.json4s.JObject
-import org.json4s.native.Serialization.read
-import org.json4s.native.Serialization.write
-
-
-private[prediction] object ConnectorUtil {
-
-  implicit val eventJson4sFormats: Formats = DefaultFormats +
-    new EventJson4sSupport.APISerializer
-
-  // intentionally use EventJson4sSupport.APISerializer to convert
-  // from JSON to Event object. Don't allow connector directly create
-  // Event object so that the Event object formation is consistent
-  // by enforcing JSON format
-
-  def toEvent(connector: JsonConnector, data: JObject): Event = {
-    read[Event](write(connector.toEventJson(data)))
-  }
-
-  def toEvent(connector: FormConnector, data: Map[String, String]): Event = {
-    read[Event](write(connector.toEventJson(data)))
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/webhooks/FormConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/webhooks/FormConnector.scala b/data/src/main/scala/io/prediction/data/webhooks/FormConnector.scala
deleted file mode 100644
index 9087f31..0000000
--- a/data/src/main/scala/io/prediction/data/webhooks/FormConnector.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.webhooks
-
-import org.json4s.JObject
-
-/** Connector for Webhooks connection with Form submission data format
-  */
-private[prediction] trait FormConnector {
-
-  // TODO: support conversion to multiple events?
-
-  /** Convert from original Form submission data to Event JObject
-    * @param data Map of key-value pairs in String type received through webhooks
-    * @return Event JObject
-   */
-  def toEventJson(data: Map[String, String]): JObject
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/webhooks/JsonConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/webhooks/JsonConnector.scala b/data/src/main/scala/io/prediction/data/webhooks/JsonConnector.scala
deleted file mode 100644
index e0e80fe..0000000
--- a/data/src/main/scala/io/prediction/data/webhooks/JsonConnector.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.webhooks
-
-import org.json4s.JObject
-
-/** Connector for Webhooks connection */
-private[prediction] trait JsonConnector {
-
-  // TODO: support conversion to multiple events?
-
-  /** Convert from original JObject to Event JObject
-    * @param data original JObject recevived through webhooks
-    * @return Event JObject
-   */
-  def toEventJson(data: JObject): JObject
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnector.scala b/data/src/main/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnector.scala
deleted file mode 100644
index f19e009..0000000
--- a/data/src/main/scala/io/prediction/data/webhooks/exampleform/ExampleFormConnector.scala
+++ /dev/null
@@ -1,123 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.webhooks.exampleform
-
-import io.prediction.data.webhooks.FormConnector
-import io.prediction.data.webhooks.ConnectorException
-
-import org.json4s.JObject
-
-
-/** Example FormConnector with following types of webhook form data inputs:
-  *
-  * UserAction
-  *
-  *   "type"="userAction"
-  *   "userId"="as34smg4",
-  *   "event"="do_something",
-  *   "context[ip]"="24.5.68.47", // optional
-  *   "context[prop1]"="2.345", // optional
-  *   "context[prop2]"="value1" // optional
-  *   "anotherProperty1"="100",
-  *   "anotherProperty2"="optional1", // optional
-  *   "timestamp"="2015-01-02T00:30:12.984Z"
-  *
-  * UserActionItem
-  *
-  *   "type"="userActionItem"
-  *   "userId"="as34smg4",
-  *   "event"="do_something_on",
-  *   "itemId"="kfjd312bc",
-  *   "context[ip]"="1.23.4.56",
-  *   "context[prop1]"="2.345",
-  *   "context[prop2]"="value1",
-  *   "anotherPropertyA"="4.567", // optional
-  *   "anotherPropertyB"="false", // optional
-  *   "timestamp"="2015-01-15T04:20:23.567Z"
-  *
-  */
-private[prediction] object ExampleFormConnector extends FormConnector {
-
-  override
-  def toEventJson(data: Map[String, String]): JObject = {
-    val json = try {
-      data.get("type") match {
-        case Some("userAction") => userActionToEventJson(data)
-        case Some("userActionItem") => userActionItemToEventJson(data)
-        case Some(x) => throw new ConnectorException(
-          s"Cannot convert unknown type ${x} to event JSON")
-        case None => throw new ConnectorException(
-          s"The field 'type' is required.")
-      }
-    } catch {
-      case e: ConnectorException => throw e
-      case e: Exception => throw new ConnectorException(
-        s"Cannot convert ${data} to event JSON. ${e.getMessage()}", e)
-    }
-    json
-  }
-
-  def userActionToEventJson(data: Map[String, String]): JObject = {
-    import org.json4s.JsonDSL._
-
-    // two level optional data
-    val context = if (data.exists(_._1.startsWith("context["))) {
-      Some(
-        ("ip" -> data.get("context[ip]")) ~
-        ("prop1" -> data.get("context[prop1]").map(_.toDouble)) ~
-        ("prop2" -> data.get("context[prop2]"))
-      )
-    } else {
-      None
-    }
-
-    val json =
-      ("event" -> data("event")) ~
-      ("entityType" -> "user") ~
-      ("entityId" -> data("userId")) ~
-      ("eventTime" -> data("timestamp")) ~
-      ("properties" -> (
-        ("context" -> context) ~
-        ("anotherProperty1" -> data("anotherProperty1").toInt) ~
-        ("anotherProperty2" -> data.get("anotherProperty2"))
-      ))
-    json
-  }
-
-
-  def userActionItemToEventJson(data: Map[String, String]): JObject = {
-    import org.json4s.JsonDSL._
-
-    val json =
-      ("event" -> data("event")) ~
-      ("entityType" -> "user") ~
-      ("entityId" -> data("userId")) ~
-      ("targetEntityType" -> "item") ~
-      ("targetEntityId" -> data("itemId")) ~
-      ("eventTime" -> data("timestamp")) ~
-      ("properties" -> (
-        ("context" -> (
-          ("ip" -> data("context[ip]")) ~
-          ("prop1" -> data("context[prop1]").toDouble) ~
-          ("prop2" -> data("context[prop2]"))
-        )) ~
-        ("anotherPropertyA" -> data.get("anotherPropertyA").map(_.toDouble)) ~
-        ("anotherPropertyB" -> data.get("anotherPropertyB").map(_.toBoolean))
-      ))
-    json
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnector.scala b/data/src/main/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnector.scala
deleted file mode 100644
index 4d4b991..0000000
--- a/data/src/main/scala/io/prediction/data/webhooks/examplejson/ExampleJsonConnector.scala
+++ /dev/null
@@ -1,153 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.webhooks.examplejson
-
-import io.prediction.data.webhooks.JsonConnector
-import io.prediction.data.webhooks.ConnectorException
-
-import org.json4s.Formats
-import org.json4s.DefaultFormats
-import org.json4s.JObject
-
-/** Example JsonConnector with following types of webhooks JSON input:
-  *
-  * UserAction
-  *
-  * {
-  *   "type": "userAction"
-  *   "userId": "as34smg4",
-  *   "event": "do_something",
-  *   "context": {
-  *     "ip": "24.5.68.47",
-  *     "prop1": 2.345,
-  *     "prop2": "value1"
-  *   },
-  *   "anotherProperty1": 100,
-  *   "anotherProperty2": "optional1",
-  *   "timestamp": "2015-01-02T00:30:12.984Z"
-  * }
-  *
-  * UserActionItem
-  *
-  * {
-  *   "type": "userActionItem"
-  *   "userId": "as34smg4",
-  *   "event": "do_something_on",
-  *   "itemId": "kfjd312bc",
-  *   "context": {
-  *     "ip": "1.23.4.56",
-  *     "prop1": 2.345,
-  *     "prop2": "value1"
-  *   },
-  *   "anotherPropertyA": 4.567,
-  *   "anotherPropertyB": false,
-  *   "timestamp": "2015-01-15T04:20:23.567Z"
-  * }
-  */
-private[prediction] object ExampleJsonConnector extends JsonConnector {
-
-  implicit val json4sFormats: Formats = DefaultFormats
-
-  override def toEventJson(data: JObject): JObject = {
-    val common = try {
-      data.extract[Common]
-    } catch {
-      case e: Exception => throw new ConnectorException(
-        s"Cannot extract Common field from ${data}. ${e.getMessage()}", e)
-    }
-
-    val json = try {
-      common.`type` match {
-        case "userAction" =>
-          toEventJson(common = common, userAction = data.extract[UserAction])
-        case "userActionItem" =>
-          toEventJson(common = common, userActionItem = data.extract[UserActionItem])
-        case x: String =>
-          throw new ConnectorException(
-            s"Cannot convert unknown type '${x}' to Event JSON.")
-      }
-    } catch {
-      case e: ConnectorException => throw e
-      case e: Exception => throw new ConnectorException(
-        s"Cannot convert ${data} to eventJson. ${e.getMessage()}", e)
-    }
-
-    json
-  }
-
-  def toEventJson(common: Common, userAction: UserAction): JObject = {
-    import org.json4s.JsonDSL._
-
-    // map to EventAPI JSON
-    val json =
-      ("event" -> userAction.event) ~
-        ("entityType" -> "user") ~
-        ("entityId" -> userAction.userId) ~
-        ("eventTime" -> userAction.timestamp) ~
-        ("properties" -> (
-          ("context" -> userAction.context) ~
-            ("anotherProperty1" -> userAction.anotherProperty1) ~
-            ("anotherProperty2" -> userAction.anotherProperty2)
-          ))
-    json
-  }
-
-  def toEventJson(common: Common, userActionItem: UserActionItem): JObject = {
-    import org.json4s.JsonDSL._
-
-    // map to EventAPI JSON
-    val json =
-      ("event" -> userActionItem.event) ~
-        ("entityType" -> "user") ~
-        ("entityId" -> userActionItem.userId) ~
-        ("targetEntityType" -> "item") ~
-        ("targetEntityId" -> userActionItem.itemId) ~
-        ("eventTime" -> userActionItem.timestamp) ~
-        ("properties" -> (
-          ("context" -> userActionItem.context) ~
-            ("anotherPropertyA" -> userActionItem.anotherPropertyA) ~
-            ("anotherPropertyB" -> userActionItem.anotherPropertyB)
-          ))
-    json
-  }
-
-  // Common required fields
-  case class Common(
-    `type`: String
-  )
-
-  // User Actions fields
-  case class UserAction (
-    userId: String,
-    event: String,
-    context: Option[JObject],
-    anotherProperty1: Int,
-    anotherProperty2: Option[String],
-    timestamp: String
-  )
-
-  // UserActionItem fields
-  case class UserActionItem (
-    userId: String,
-    event: String,
-    itemId: String,
-    context: JObject,
-    anotherPropertyA: Option[Double],
-    anotherPropertyB: Option[Boolean],
-    timestamp: String
-  )
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala b/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala
deleted file mode 100644
index b2793a0..0000000
--- a/data/src/main/scala/io/prediction/data/webhooks/mailchimp/MailChimpConnector.scala
+++ /dev/null
@@ -1,305 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-
-package io.prediction.data.webhooks.mailchimp
-
-import io.prediction.data.webhooks.FormConnector
-import io.prediction.data.webhooks.ConnectorException
-import io.prediction.data.storage.EventValidation
-import io.prediction.data.Utils
-
-import org.json4s.JObject
-
-import org.joda.time.DateTime
-import org.joda.time.format.DateTimeFormat
-
-private[prediction] object MailChimpConnector extends FormConnector {
-
-  override
-  def toEventJson(data: Map[String, String]): JObject = {
-
-    val json = data.get("type") match {
-      case Some("subscribe") => subscribeToEventJson(data)
-      // UNSUBSCRIBE
-      case Some("unsubscribe") => unsubscribeToEventJson(data)
-      // PROFILE UPDATES
-      case Some("profile") => profileToEventJson(data)
-      // EMAIL UPDATE
-      case Some("upemail") => upemailToEventJson(data)
-      // CLEANED EMAILS
-      case Some("cleaned") => cleanedToEventJson(data)
-      // CAMPAIGN SENDING STATUS
-      case Some("campaign") => campaignToEventJson(data)
-      // invalid type
-      case Some(x) => throw new ConnectorException(
-        s"Cannot convert unknown MailChimp data type ${x} to event JSON")
-      case None => throw new ConnectorException(
-        s"The field 'type' is required for MailChimp data.")
-    }
-    json
-  }
-
-
-  val mailChimpDateTimeFormat = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss")
-    .withZone(EventValidation.defaultTimeZone)
-
-  def parseMailChimpDateTime(s: String): DateTime = {
-    mailChimpDateTimeFormat.parseDateTime(s)
-  }
-
-  def subscribeToEventJson(data: Map[String, String]): JObject = {
-
-    import org.json4s.JsonDSL._
-
-    /*
-    "type": "subscribe",
-    "fired_at": "2009-03-26 21:35:57",
-    "data[id]": "8a25ff1d98",
-    "data[list_id]": "a6b5da1054",
-    "data[email]": "api@mailchimp.com",
-    "data[email_type]": "html",
-    "data[merges][EMAIL]": "api@mailchimp.com",
-    "data[merges][FNAME]": "MailChimp",
-    "data[merges][LNAME]": "API",
-    "data[merges][INTERESTS]": "Group1,Group2",
-    "data[ip_opt]": "10.20.10.30",
-    "data[ip_signup]": "10.20.10.30"
-    */
-
-    // convert to ISO8601 format
-    val eventTime = Utils.dateTimeToString(parseMailChimpDateTime(data("fired_at")))
-
-    // TODO: handle optional fields
-    val json =
-      ("event" -> "subscribe") ~
-      ("entityType" -> "user") ~
-      ("entityId" -> data("data[id]")) ~
-      ("targetEntityType" -> "list") ~
-      ("targetEntityId" -> data("data[list_id]")) ~
-      ("eventTime" -> eventTime) ~
-      ("properties" -> (
-        ("email" -> data("data[email]")) ~
-        ("email_type" -> data("data[email_type]")) ~
-        ("merges" -> (
-          ("EMAIL" -> data("data[merges][EMAIL]")) ~
-          ("FNAME" -> data("data[merges][FNAME]"))) ~
-          ("LNAME" -> data("data[merges][LNAME]")) ~
-          ("INTERESTS" -> data.get("data[merges][INTERESTS]"))
-        )) ~
-        ("ip_opt" -> data("data[ip_opt]")) ~
-        ("ip_signup" -> data("data[ip_signup]")
-      ))
-
-    json
-
-  }
-
-  def unsubscribeToEventJson(data: Map[String, String]): JObject = {
-
-    import org.json4s.JsonDSL._
-
-    /*
-    "action" will either be "unsub" or "delete".
-    The reason will be "manual" unless caused by a spam complaint - then it will be "abuse"
-
-    "type": "unsubscribe",
-    "fired_at": "2009-03-26 21:40:57",
-    "data[action]": "unsub",
-    "data[reason]": "manual",
-    "data[id]": "8a25ff1d98",
-    "data[list_id]": "a6b5da1054",
-    "data[email]": "api+unsub@mailchimp.com",
-    "data[email_type]": "html",
-    "data[merges][EMAIL]": "api+unsub@mailchimp.com",
-    "data[merges][FNAME]": "MailChimp",
-    "data[merges][LNAME]": "API",
-    "data[merges][INTERESTS]": "Group1,Group2",
-    "data[ip_opt]": "10.20.10.30",
-    "data[campaign_id]": "cb398d21d2",
-    */
-
-    // convert to ISO8601 format
-    val eventTime = Utils.dateTimeToString(parseMailChimpDateTime(data("fired_at")))
-
-    val json =
-      ("event" -> "unsubscribe") ~
-      ("entityType" -> "user") ~
-      ("entityId" -> data("data[id]")) ~
-      ("targetEntityType" -> "list") ~
-      ("targetEntityId" -> data("data[list_id]")) ~
-      ("eventTime" -> eventTime) ~
-      ("properties" -> (
-        ("action" -> data("data[action]")) ~
-        ("reason" -> data("data[reason]")) ~
-        ("email" -> data("data[email]")) ~
-        ("email_type" -> data("data[email_type]")) ~
-        ("merges" -> (
-          ("EMAIL" -> data("data[merges][EMAIL]")) ~
-          ("FNAME" -> data("data[merges][FNAME]"))) ~
-          ("LNAME" -> data("data[merges][LNAME]")) ~
-          ("INTERESTS" -> data.get("data[merges][INTERESTS]"))
-        )) ~
-        ("ip_opt" -> data("data[ip_opt]")) ~
-        ("campaign_id" -> data("data[campaign_id]")
-      ))
-
-    json
-
-  }
-
-  def profileToEventJson(data: Map[String, String]): JObject = {
-
-    import org.json4s.JsonDSL._
-
-    /*
-    "type": "profile",
-    "fired_at": "2009-03-26 21:31:21",
-    "data[id]": "8a25ff1d98",
-    "data[list_id]": "a6b5da1054",
-    "data[email]": "api@mailchimp.com",
-    "data[email_type]": "html",
-    "data[merges][EMAIL]": "api@mailchimp.com",
-    "data[merges][FNAME]": "MailChimp",
-    "data[merges][LNAME]": "API",
-    "data[merges][INTERESTS]": "Group1,Group2", \\OPTIONAL
-    "data[ip_opt]": "10.20.10.30"
-    */
-
-    // convert to ISO8601 format
-    val eventTime = Utils.dateTimeToString(parseMailChimpDateTime(data("fired_at")))
-
-    val json =
-      ("event" -> "profile") ~
-      ("entityType" -> "user") ~
-      ("entityId" -> data("data[id]")) ~
-      ("targetEntityType" -> "list") ~
-      ("targetEntityId" -> data("data[list_id]")) ~
-      ("eventTime" -> eventTime) ~
-      ("properties" -> (
-        ("email" -> data("data[email]")) ~
-        ("email_type" -> data("data[email_type]")) ~
-        ("merges" -> (
-          ("EMAIL" -> data("data[merges][EMAIL]")) ~
-          ("FNAME" -> data("data[merges][FNAME]"))) ~
-          ("LNAME" -> data("data[merges][LNAME]")) ~
-          ("INTERESTS" -> data.get("data[merges][INTERESTS]"))
-        )) ~
-        ("ip_opt" -> data("data[ip_opt]")
-      ))
-
-    json
-
-  }
-
-  def upemailToEventJson(data: Map[String, String]): JObject = {
-
-    import org.json4s.JsonDSL._
-
-    /*
-    "type": "upemail",
-    "fired_at": "2009-03-26 22:15:09",
-    "data[list_id]": "a6b5da1054",
-    "data[new_id]": "51da8c3259",
-    "data[new_email]": "api+new@mailchimp.com",
-    "data[old_email]": "api+old@mailchimp.com"
-    */
-
-    // convert to ISO8601 format
-    val eventTime = Utils.dateTimeToString(parseMailChimpDateTime(data("fired_at")))
-
-    val json =
-      ("event" -> "upemail") ~
-      ("entityType" -> "user") ~
-      ("entityId" -> data("data[new_id]")) ~
-      ("targetEntityType" -> "list") ~
-      ("targetEntityId" -> data("data[list_id]")) ~
-      ("eventTime" -> eventTime) ~
-      ("properties" -> (
-        ("new_email" -> data("data[new_email]")) ~
-        ("old_email" -> data("data[old_email]"))
-      ))
-
-    json
-
-  }
-
-  def cleanedToEventJson(data: Map[String, String]): JObject = {
-
-    import org.json4s.JsonDSL._
-
-    /*
-    Reason will be one of "hard" (for hard bounces) or "abuse"
-    "type": "cleaned",
-    "fired_at": "2009-03-26 22:01:00",
-    "data[list_id]": "a6b5da1054",
-    "data[campaign_id]": "4fjk2ma9xd",
-    "data[reason]": "hard",
-    "data[email]": "api+cleaned@mailchimp.com"
-    */
-
-    // convert to ISO8601 format
-    val eventTime = Utils.dateTimeToString(parseMailChimpDateTime(data("fired_at")))
-
-    val json =
-      ("event" -> "cleaned") ~
-      ("entityType" -> "list") ~
-      ("entityId" -> data("data[list_id]")) ~
-      ("eventTime" -> eventTime) ~
-      ("properties" -> (
-        ("campaignId" -> data("data[campaign_id]")) ~
-        ("reason" -> data("data[reason]")) ~
-        ("email" -> data("data[email]"))
-      ))
-
-    json
-
-  }
-
-  def campaignToEventJson(data: Map[String, String]): JObject = {
-
-    import org.json4s.JsonDSL._
-
-    /*
-    "type": "campaign",
-    "fired_at": "2009-03-26 21:31:21",
-    "data[id]": "5aa2102003",
-    "data[subject]": "Test Campaign Subject",
-    "data[status]": "sent",
-    "data[reason]": "",
-    "data[list_id]": "a6b5da1054"
-    */
-
-    // convert to ISO8601 format
-    val eventTime = Utils.dateTimeToString(parseMailChimpDateTime(data("fired_at")))
-
-    val json =
-      ("event" -> "campaign") ~
-      ("entityType" -> "campaign") ~
-      ("entityId" -> data("data[id]")) ~
-      ("targetEntityType" -> "list") ~
-      ("targetEntityId" -> data("data[list_id]")) ~
-      ("eventTime" -> eventTime) ~
-      ("properties" -> (
-        ("subject" -> data("data[subject]")) ~
-        ("status" -> data("data[status]")) ~
-        ("reason" -> data("data[reason]"))
-      ))
-
-    json
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnector.scala b/data/src/main/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnector.scala
deleted file mode 100644
index 318043c..0000000
--- a/data/src/main/scala/io/prediction/data/webhooks/segmentio/SegmentIOConnector.scala
+++ /dev/null
@@ -1,306 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.webhooks.segmentio
-
-import io.prediction.data.webhooks.{ConnectorException, JsonConnector}
-import org.json4s._
-
-private[prediction] object SegmentIOConnector extends JsonConnector {
-
-  // private lazy val supportedAPI = Vector("2", "2.0", "2.0.0")
-
-  implicit val json4sFormats: Formats = DefaultFormats
-
-  override
-  def toEventJson(data: JObject): JObject = {
-    try {
-      val version: String = data.values("version").toString
-/*
-      if (!supportedAPI.contains(version)) {
-        throw new ConnectorException(
-          s"Supported segment.io API versions: [2]. got [$version]"
-        )
-      }
-*/
-    } catch { case _: Throwable \u21d2
-      throw new ConnectorException(s"Failed to get segment.io API version.")
-    }
-
-    val common = try {
-      data.extract[Common]
-    } catch {
-      case e: Throwable \u21d2 throw new ConnectorException(
-        s"Cannot extract Common field from $data. ${e.getMessage}", e
-      )
-    }
-
-    try {
-      common.`type` match {
-        case "identify" \u21d2
-          toEventJson(
-            common = common,
-            identify = data.extract[Events.Identify]
-          )
-
-        case "track" \u21d2
-          toEventJson(
-            common = common,
-            track = data.extract[Events.Track]
-          )
-
-        case "alias" \u21d2
-          toEventJson(
-            common = common,
-            alias = data.extract[Events.Alias]
-          )
-
-        case "page" \u21d2
-          toEventJson(
-            common = common,
-            page = data.extract[Events.Page]
-          )
-
-        case "screen" \u21d2
-          toEventJson(
-            common = common,
-            screen = data.extract[Events.Screen]
-          )
-
-        case "group" \u21d2
-          toEventJson(
-            common = common,
-            group = data.extract[Events.Group]
-          )
-
-        case _ \u21d2
-          throw new ConnectorException(
-            s"Cannot convert unknown type ${common.`type`} to event JSON."
-          )
-      }
-    } catch {
-      case e: ConnectorException => throw e
-      case e: Exception =>
-        throw new ConnectorException(
-          s"Cannot convert $data to event JSON. ${e.getMessage}", e
-        )
-    }
-  }
-
-  def toEventJson(common: Common, identify: Events.Identify ): JObject = {
-    import org.json4s.JsonDSL._
-    val eventProperties = "traits" \u2192 identify.traits
-    toJson(common, eventProperties)
-  }
-
-  def toEventJson(common: Common, track: Events.Track): JObject = {
-    import org.json4s.JsonDSL._
-    val eventProperties =
-      ("properties" \u2192 track.properties) ~
-      ("event" \u2192 track.event)
-    toJson(common, eventProperties)
-  }
-
-  def toEventJson(common: Common, alias: Events.Alias): JObject = {
-    import org.json4s.JsonDSL._
-    toJson(common, "previous_id" \u2192 alias.previous_id)
-  }
-
-  def toEventJson(common: Common, screen: Events.Screen): JObject = {
-    import org.json4s.JsonDSL._
-    val eventProperties =
-      ("name" \u2192 screen.name) ~
-      ("properties" \u2192 screen.properties)
-    toJson(common, eventProperties)
-  }
-
-  def toEventJson(common: Common, page: Events.Page): JObject = {
-    import org.json4s.JsonDSL._
-    val eventProperties =
-      ("name" \u2192 page.name) ~
-      ("properties" \u2192 page.properties)
-    toJson(common, eventProperties)
-  }
-
-  def toEventJson(common: Common, group: Events.Group): JObject = {
-    import org.json4s.JsonDSL._
-    val eventProperties =
-      ("group_id" \u2192 group.group_id) ~
-      ("traits" \u2192 group.traits)
-    toJson(common, eventProperties)
-  }
-
-  private def toJson(common: Common, props: JObject): JsonAST.JObject = {
-    val commonFields = commonToJson(common)
-    JObject(("properties" \u2192 properties(common, props)) :: commonFields.obj)
-  }
-
-  private def properties(common: Common, eventProps: JObject): JObject = {
-    import org.json4s.JsonDSL._
-    common.context map { context \u21d2
-      try {
-        ("context" \u2192 Extraction.decompose(context)) ~ eventProps
-      } catch {
-        case e: Throwable \u21d2
-          throw new ConnectorException(
-            s"Cannot convert $context to event JSON. ${e.getMessage }", e
-          )
-      }
-    } getOrElse eventProps
-  }
-
-  private def commonToJson(common: Common): JObject =
-    commonToJson(common, common.`type`)
-
-  private def commonToJson(common: Common, typ: String): JObject = {
-    import org.json4s.JsonDSL._
-      common.user_id.orElse(common.anonymous_id) match {
-        case Some(userId) \u21d2
-          ("event" \u2192 typ) ~
-            ("entityType" \u2192 "user") ~
-            ("entityId" \u2192 userId) ~
-            ("eventTime" \u2192 common.timestamp)
-
-        case None \u21d2
-          throw new ConnectorException(
-            "there was no `userId` or `anonymousId` in the common fields."
-          )
-      }
-  }
-}
-
-object Events {
-
-  private[prediction] case class Track(
-    event: String,
-    properties: Option[JObject] = None
-  )
-
-  private[prediction] case class Alias(previous_id: String, user_id: String)
-
-  private[prediction] case class Group(
-    group_id: String,
-    traits: Option[JObject] = None
-  )
-
-  private[prediction] case class Screen(
-    name: Option[String] = None,
-    properties: Option[JObject] = None
-  )
-
-  private[prediction] case class Page(
-    name: Option[String] = None,
-    properties: Option[JObject] = None
-  )
-
-  private[prediction] case class Identify(
-    user_id: String,
-    traits: Option[JObject]
-  )
-
-}
-
-object Common {
-
-  private[prediction] case class Integrations(
-    All: Boolean = false,
-    Mixpanel: Boolean = false,
-    Marketo: Boolean = false,
-    Salesforse: Boolean = false
-  )
-
-  private[prediction] case class Context(
-    ip: String,
-    library: Library,
-    user_agent: String,
-    app: Option[App] = None,
-    campaign: Option[Campaign] = None,
-    device: Option[Device] = None,
-    network: Option[Network] = None,
-    location: Option[Location] = None,
-    os: Option[OS] = None,
-    referrer: Option[Referrer] = None,
-    screen: Option[Screen] = None,
-    timezone: Option[String] = None
-  )
-
-  private[prediction] case class Screen(width: Int, height: Int, density: Int)
-
-  private[prediction] case class Referrer(id: String, `type`: String)
-
-  private[prediction] case class OS(name: String, version: String)
-
-  private[prediction] case class Location(
-    city: Option[String] = None,
-    country: Option[String] = None,
-    latitude: Option[Double] = None,
-    longitude: Option[Double] = None,
-    speed: Option[Int] = None
-  )
-
-  case class Page(
-    path: String,
-    referrer: String,
-    search: String,
-    title: String,
-    url: String
-  )
-
-  private[prediction] case class Network(
-    bluetooth: Option[Boolean] = None,
-    carrier: Option[String] = None,
-    cellular: Option[Boolean] = None,
-    wifi: Option[Boolean] = None
-  )
-
-  private[prediction] case class Library(name: String, version: String)
-
-  private[prediction] case class Device(
-    id: Option[String] = None,
-    advertising_id: Option[String] = None,
-    ad_tracking_enabled: Option[Boolean] = None,
-    manufacturer: Option[String] = None,
-    model: Option[String] = None,
-    name: Option[String] = None,
-    `type`: Option[String] = None,
-    token: Option[String] = None
-  )
-
-  private[prediction] case class Campaign(
-    name: Option[String] = None,
-    source: Option[String] = None,
-    medium: Option[String] = None,
-    term: Option[String] = None,
-    content: Option[String] = None
-  )
-
-  private[prediction] case class App(
-    name: Option[String] = None,
-    version: Option[String] = None,
-    build: Option[String] = None
-  )
-
-}
-
-private[prediction] case class Common(
-  `type`: String,
-  sent_at: String,
-  timestamp: String,
-  version: String,
-  anonymous_id: Option[String] = None,
-  user_id: Option[String] = None,
-  context: Option[Common.Context] = None,
-  integrations: Option[Common.Integrations] = None
-)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/Utils.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/Utils.scala b/data/src/main/scala/org/apache/predictionio/data/Utils.scala
new file mode 100644
index 0000000..db8c7a2
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/Utils.scala
@@ -0,0 +1,50 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data
+
+import org.joda.time.DateTime
+import org.joda.time.format.ISODateTimeFormat
+
+import java.lang.IllegalArgumentException
+
+private[prediction] object Utils {
+
+  // use dateTime() for strict ISO8601 format
+  val dateTimeFormatter = ISODateTimeFormat.dateTime().withOffsetParsed()
+
+  val dateTimeNoMillisFormatter =
+    ISODateTimeFormat.dateTimeNoMillis().withOffsetParsed()
+
+  def stringToDateTime(dt: String): DateTime = {
+    // We accept two formats.
+    // 1. "yyyy-MM-dd'T'HH:mm:ss.SSSZZ"
+    // 2. "yyyy-MM-dd'T'HH:mm:ssZZ"
+    // The first one also takes milliseconds into account.
+    try {
+      // formatting for "yyyy-MM-dd'T'HH:mm:ss.SSSZZ"
+      dateTimeFormatter.parseDateTime(dt)
+    } catch {
+      case e: IllegalArgumentException => {
+        // handle when the datetime string doesn't specify milliseconds.
+        dateTimeNoMillisFormatter.parseDateTime(dt)
+      }
+    }
+  }
+
+  def dateTimeToString(dt: DateTime): String = dateTimeFormatter.print(dt)
+    // dt.toString
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/api/Common.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/api/Common.scala b/data/src/main/scala/org/apache/predictionio/data/api/Common.scala
new file mode 100644
index 0000000..c380daa
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/api/Common.scala
@@ -0,0 +1,80 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.api
+
+import org.apache.predictionio.data.webhooks.ConnectorException
+import org.apache.predictionio.data.storage.StorageException
+
+import spray.routing._
+import spray.routing.Directives._
+import spray.routing.Rejection
+import spray.http.StatusCodes
+import spray.http.StatusCode
+import spray.httpx.Json4sSupport
+
+import org.json4s.Formats
+import org.json4s.DefaultFormats
+
+object Common {
+
+  object Json4sProtocol extends Json4sSupport {
+    implicit def json4sFormats: Formats = DefaultFormats
+  }
+
+  import Json4sProtocol._
+
+  val rejectionHandler = RejectionHandler {
+    case MalformedRequestContentRejection(msg, _) :: _ =>
+      complete(StatusCodes.BadRequest, Map("message" -> msg))
+    case MissingQueryParamRejection(msg) :: _ =>
+      complete(StatusCodes.NotFound,
+        Map("message" -> s"missing required query parameter ${msg}."))
+    case AuthenticationFailedRejection(cause, challengeHeaders) :: _ => {
+      val msg = cause match {
+        case AuthenticationFailedRejection.CredentialsRejected =>
+          "Invalid accessKey."
+        case AuthenticationFailedRejection.CredentialsMissing =>
+          "Missing accessKey."
+      }
+      complete(StatusCodes.Unauthorized, challengeHeaders, Map("message" -> msg))
+    }
+    case ChannelRejection(msg) :: _ =>
+      complete(StatusCodes.Unauthorized, Map("message" -> msg))
+    case NonExistentAppRejection(msg) :: _ =>
+      complete(StatusCodes.Unauthorized, Map("message" -> msg))
+  }
+
+  val exceptionHandler = ExceptionHandler {
+    case e: ConnectorException => {
+      val msg = s"${e.getMessage()}"
+      complete(StatusCodes.BadRequest, Map("message" -> msg))
+    }
+    case e: StorageException => {
+      val msg = s"${e.getMessage()}"
+      complete(StatusCodes.InternalServerError, Map("message" -> msg))
+    }
+    case e: Exception => {
+      val msg = s"${e.getMessage()}"
+      complete(StatusCodes.InternalServerError, Map("message" -> msg))
+    }
+  }
+}
+
+/** invalid channel */
+case class ChannelRejection(msg: String) extends Rejection
+
+/** the app doesn't exist */
+case class NonExistentAppRejection(msg: String) extends Rejection

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/api/EventInfo.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/api/EventInfo.scala b/data/src/main/scala/org/apache/predictionio/data/api/EventInfo.scala
new file mode 100644
index 0000000..e25234f
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/api/EventInfo.scala
@@ -0,0 +1,24 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.api
+
+import org.apache.predictionio.data.storage.Event
+
+case class EventInfo(
+  appId: Int,
+  channelId: Option[Int],
+  event: Event)
+



[04/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/console/App.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/console/App.scala b/tools/src/main/scala/io/prediction/tools/console/App.scala
deleted file mode 100644
index 2056f9d..0000000
--- a/tools/src/main/scala/io/prediction/tools/console/App.scala
+++ /dev/null
@@ -1,537 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools.console
-
-import io.prediction.data.storage
-
-import grizzled.slf4j.Logging
-
-case class AppArgs(
-  id: Option[Int] = None,
-  name: String = "",
-  channel: String = "",
-  dataDeleteChannel: Option[String] = None,
-  all: Boolean = false,
-  force: Boolean = false,
-  description: Option[String] = None)
-
-object App extends Logging {
-  def create(ca: ConsoleArgs): Int = {
-    val apps = storage.Storage.getMetaDataApps()
-    // get the client in the beginning so error exit right away if can't access client
-    val events = storage.Storage.getLEvents()
-    apps.getByName(ca.app.name) map { app =>
-      error(s"App ${ca.app.name} already exists. Aborting.")
-      1
-    } getOrElse {
-      ca.app.id.map { id =>
-        apps.get(id) map { app =>
-          error(
-            s"App ID ${id} already exists and maps to the app '${app.name}'. " +
-            "Aborting.")
-          return 1
-        }
-      }
-      val appid = apps.insert(storage.App(
-        id = ca.app.id.getOrElse(0),
-        name = ca.app.name,
-        description = ca.app.description))
-      appid map { id =>
-        val dbInit = events.init(id)
-        val r = if (dbInit) {
-          info(s"Initialized Event Store for this app ID: ${id}.")
-          val accessKeys = storage.Storage.getMetaDataAccessKeys
-          val accessKey = accessKeys.insert(storage.AccessKey(
-            key = ca.accessKey.accessKey,
-            appid = id,
-            events = Seq()))
-          accessKey map { k =>
-            info("Created new app:")
-            info(s"      Name: ${ca.app.name}")
-            info(s"        ID: ${id}")
-            info(s"Access Key: ${k}")
-            0
-          } getOrElse {
-            error(s"Unable to create new access key.")
-            1
-          }
-        } else {
-          error(s"Unable to initialize Event Store for this app ID: ${id}.")
-          // revert back the meta data change
-          try {
-            apps.delete(id)
-            0
-          } catch {
-            case e: Exception =>
-              error(s"Failed to revert back the App meta-data change.", e)
-              error(s"The app ${ca.app.name} CANNOT be used!")
-              error(s"Please run 'pio app delete ${ca.app.name}' " +
-                "to delete this app!")
-              1
-          }
-        }
-        events.close()
-        r
-      } getOrElse {
-        error(s"Unable to create new app.")
-        1
-      }
-    }
-  }
-
-  def list(ca: ConsoleArgs): Int = {
-    val apps = storage.Storage.getMetaDataApps.getAll().sortBy(_.name)
-    val accessKeys = storage.Storage.getMetaDataAccessKeys
-    val title = "Name"
-    val ak = "Access Key"
-    info(f"$title%20s |   ID | $ak%64s | Allowed Event(s)")
-    apps foreach { app =>
-      val keys = accessKeys.getByAppid(app.id)
-      keys foreach { k =>
-        val events =
-          if (k.events.size > 0) k.events.sorted.mkString(",") else "(all)"
-        info(f"${app.name}%20s | ${app.id}%4d | ${k.key}%64s | $events%s")
-      }
-    }
-    info(s"Finished listing ${apps.size} app(s).")
-    0
-  }
-
-  def show(ca: ConsoleArgs): Int = {
-    val apps = storage.Storage.getMetaDataApps
-    val accessKeys = storage.Storage.getMetaDataAccessKeys
-    val channels = storage.Storage.getMetaDataChannels
-    apps.getByName(ca.app.name) map { app =>
-      info(s"    App Name: ${app.name}")
-      info(s"      App ID: ${app.id}")
-      info(s" Description: ${app.description.getOrElse("")}")
-      val keys = accessKeys.getByAppid(app.id)
-
-      var firstKey = true
-      keys foreach { k =>
-        val events =
-          if (k.events.size > 0) k.events.sorted.mkString(",") else "(all)"
-        if (firstKey) {
-          info(f"  Access Key: ${k.key}%s | ${events}%s")
-          firstKey = false
-        } else {
-          info(f"              ${k.key}%s | ${events}%s")
-        }
-      }
-
-      val chans = channels.getByAppid(app.id)
-      var firstChan = true
-      val titleName = "Channel Name"
-      val titleID = "Channel ID"
-      chans.foreach { ch =>
-        if (firstChan) {
-          info(f"    Channels: ${titleName}%16s | ${titleID}%10s ")
-          firstChan = false
-        }
-        info(f"              ${ch.name}%16s | ${ch.id}%10s")
-      }
-      0
-    } getOrElse {
-      error(s"App ${ca.app.name} does not exist. Aborting.")
-      1
-    }
-  }
-
-  def delete(ca: ConsoleArgs): Int = {
-    val apps = storage.Storage.getMetaDataApps
-    val accesskeys = storage.Storage.getMetaDataAccessKeys
-    val channels = storage.Storage.getMetaDataChannels
-    val events = storage.Storage.getLEvents()
-    val status = apps.getByName(ca.app.name) map { app =>
-      info(s"The following app (including all channels) will be deleted. Are you sure?")
-      info(s"    App Name: ${app.name}")
-      info(s"      App ID: ${app.id}")
-      info(s" Description: ${app.description.getOrElse("")}")
-      val chans = channels.getByAppid(app.id)
-      var firstChan = true
-      val titleName = "Channel Name"
-      val titleID = "Channel ID"
-      chans.foreach { ch =>
-        if (firstChan) {
-          info(f"    Channels: ${titleName}%16s | ${titleID}%10s ")
-          firstChan = false
-        }
-        info(f"              ${ch.name}%16s | ${ch.id}%10s")
-      }
-
-      val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
-      choice match {
-        case "YES" => {
-          // delete channels
-          val delChannelStatus: Seq[Int] = chans.map { ch =>
-            if (events.remove(app.id, Some(ch.id))) {
-              info(s"Removed Event Store of the channel ID: ${ch.id}")
-              try {
-                channels.delete(ch.id)
-                info(s"Deleted channel ${ch.name}")
-                0
-              } catch {
-                case e: Exception =>
-                  error(s"Error deleting channel ${ch.name}.", e)
-                  1
-              }
-            } else {
-              error(s"Error removing Event Store of the channel ID: ${ch.id}.")
-              return 1
-            }
-          }
-
-          if (delChannelStatus.exists(_ != 0)) {
-            error("Error occurred while deleting channels. Aborting.")
-            return 1
-          }
-
-          try {
-            events.remove(app.id)
-            info(s"Removed Event Store for this app ID: ${app.id}")
-          } catch {
-            case e: Exception =>
-              error(s"Error removing Event Store for this app. Aborting.", e)
-              return 1
-          }
-
-          accesskeys.getByAppid(app.id) foreach { key =>
-            try {
-              accesskeys.delete(key.key)
-              info(s"Removed access key ${key.key}")
-            } catch {
-              case e: Exception =>
-                error(s"Error removing access key ${key.key}. Aborting.", e)
-                return 1
-            }
-          }
-
-          try {
-            apps.delete(app.id)
-            info(s"Deleted app ${app.name}.")
-          } catch {
-            case e: Exception =>
-              error(s"Error deleting app ${app.name}. Aborting.", e)
-              return 1
-          }
-
-          info("Done.")
-          0
-        }
-        case _ =>
-          info("Aborted.")
-          0
-      }
-    } getOrElse {
-      error(s"App ${ca.app.name} does not exist. Aborting.")
-      1
-    }
-    events.close()
-    status
-  }
-
-  def dataDelete(ca: ConsoleArgs): Int = {
-    if (ca.app.all) {
-      dataDeleteAll(ca)
-    } else {
-      dataDeleteOne(ca)
-    }
-  }
-
-  def dataDeleteOne(ca: ConsoleArgs): Int = {
-    val apps = storage.Storage.getMetaDataApps
-    val channels = storage.Storage.getMetaDataChannels
-    apps.getByName(ca.app.name) map { app =>
-
-      val channelId = ca.app.dataDeleteChannel.map { ch =>
-        val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap
-        if (!channelMap.contains(ch)) {
-          error(s"Unable to delete data for channel.")
-          error(s"Channel ${ch} doesn't exist.")
-          return 1
-        }
-
-        channelMap(ch)
-      }
-
-      if (channelId.isDefined) {
-        info(s"Data of the following channel will be deleted. Are you sure?")
-        info(s"Channel Name: ${ca.app.dataDeleteChannel.get}")
-        info(s"  Channel ID: ${channelId.get}")
-        info(s"    App Name: ${app.name}")
-        info(s"      App ID: ${app.id}")
-        info(s" Description: ${app.description}")
-      } else {
-        info(s"Data of the following app (default channel only) will be deleted. Are you sure?")
-        info(s"    App Name: ${app.name}")
-        info(s"      App ID: ${app.id}")
-        info(s" Description: ${app.description}")
-      }
-
-      val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
-
-      choice match {
-        case "YES" => {
-          val events = storage.Storage.getLEvents()
-          // remove table
-          val r1 = if (events.remove(app.id, channelId)) {
-            if (channelId.isDefined) {
-              info(s"Removed Event Store for this channel ID: ${channelId.get}")
-            } else {
-              info(s"Removed Event Store for this app ID: ${app.id}")
-            }
-            0
-          } else {
-            if (channelId.isDefined) {
-              error(s"Error removing Event Store for this channel.")
-            } else {
-              error(s"Error removing Event Store for this app.")
-            }
-            1
-          }
-          // re-create table
-          val dbInit = events.init(app.id, channelId)
-          val r2 = if (dbInit) {
-            if (channelId.isDefined) {
-              info(s"Initialized Event Store for this channel ID: ${channelId.get}.")
-            } else {
-              info(s"Initialized Event Store for this app ID: ${app.id}.")
-            }
-            0
-          } else {
-            if (channelId.isDefined) {
-              error(s"Unable to initialize Event Store for this channel ID:" +
-                s" ${channelId.get}.")
-            } else {
-              error(s"Unable to initialize Event Store for this appId:" +
-                s" ${app.id}.")
-            }
-            1
-          }
-          events.close()
-          info("Done.")
-          r1 + r2
-        }
-        case _ =>
-          info("Aborted.")
-          0
-      }
-    } getOrElse {
-      error(s"App ${ca.app.name} does not exist. Aborting.")
-      1
-    }
-  }
-
-  def dataDeleteAll(ca: ConsoleArgs): Int = {
-    val apps = storage.Storage.getMetaDataApps
-    val channels = storage.Storage.getMetaDataChannels
-    val events = storage.Storage.getLEvents()
-    val status = apps.getByName(ca.app.name) map { app =>
-      info(s"All data of the app (including default and all channels) will be deleted." +
-        " Are you sure?")
-      info(s"    App Name: ${app.name}")
-      info(s"      App ID: ${app.id}")
-      info(s" Description: ${app.description}")
-      val chans = channels.getByAppid(app.id)
-      var firstChan = true
-      val titleName = "Channel Name"
-      val titleID = "Channel ID"
-      chans.foreach { ch =>
-        if (firstChan) {
-          info(f"    Channels: ${titleName}%16s | ${titleID}%10s ")
-          firstChan = false
-        }
-        info(f"              ${ch.name}%16s | ${ch.id}%10s")
-      }
-
-      val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
-      choice match {
-        case "YES" => {
-          // delete channels
-          val delChannelStatus: Seq[Int] = chans.map { ch =>
-            val r1 = if (events.remove(app.id, Some(ch.id))) {
-              info(s"Removed Event Store of the channel ID: ${ch.id}")
-              0
-            } else {
-              error(s"Error removing Event Store of the channel ID: ${ch.id}.")
-              1
-            }
-            // re-create table
-            val dbInit = events.init(app.id, Some(ch.id))
-            val r2 = if (dbInit) {
-              info(s"Initialized Event Store of the channel ID: ${ch.id}")
-              0
-            } else {
-              error(s"Unable to initialize Event Store of the channel ID: ${ch.id}.")
-              1
-            }
-            r1 + r2
-          }
-
-          if (delChannelStatus.filter(_ != 0).isEmpty) {
-            val r1 = if (events.remove(app.id)) {
-              info(s"Removed Event Store for this app ID: ${app.id}")
-              0
-            } else {
-              error(s"Error removing Event Store for this app.")
-              1
-            }
-
-            val dbInit = events.init(app.id)
-            val r2 = if (dbInit) {
-              info(s"Initialized Event Store for this app ID: ${app.id}.")
-              0
-            } else {
-              error(s"Unable to initialize Event Store for this appId: ${app.id}.")
-              1
-            }
-            info("Done.")
-            r1 + r2
-          } else 1
-        }
-        case _ =>
-          info("Aborted.")
-          0
-      }
-    } getOrElse {
-      error(s"App ${ca.app.name} does not exist. Aborting.")
-      1
-    }
-    events.close()
-    status
-  }
-
-  def channelNew(ca: ConsoleArgs): Int = {
-    val apps = storage.Storage.getMetaDataApps
-    val channels = storage.Storage.getMetaDataChannels
-    val events = storage.Storage.getLEvents()
-    val newChannel = ca.app.channel
-    val status = apps.getByName(ca.app.name) map { app =>
-      val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap
-      if (channelMap.contains(newChannel)) {
-        error(s"Unable to create new channel.")
-        error(s"Channel ${newChannel} already exists.")
-        1
-      } else if (!storage.Channel.isValidName(newChannel)) {
-        error(s"Unable to create new channel.")
-        error(s"The channel name ${newChannel} is invalid.")
-        error(s"${storage.Channel.nameConstraint}")
-        1
-      } else {
-
-        val channelId = channels.insert(storage.Channel(
-          id = 0, // new id will be assigned
-          appid = app.id,
-          name = newChannel
-        ))
-        channelId.map { chanId =>
-          info(s"Updated Channel meta-data.")
-          // initialize storage
-          val dbInit = events.init(app.id, Some(chanId))
-          if (dbInit) {
-            info(s"Initialized Event Store for the channel: ${newChannel}.")
-            info(s"Created new channel:")
-            info(s"    Channel Name: ${newChannel}")
-            info(s"      Channel ID: ${chanId}")
-            info(s"          App ID: ${app.id}")
-            0
-          } else {
-            error(s"Unable to create new channel.")
-            error(s"Failed to initalize Event Store.")
-            // reverted back the meta data
-            try {
-              channels.delete(chanId)
-              0
-            } catch {
-              case e: Exception =>
-                error(s"Failed to revert back the Channel meta-data change.", e)
-                error(s"The channel ${newChannel} CANNOT be used!")
-                error(s"Please run 'pio app channel-delete ${app.name} ${newChannel}' " +
-                  "to delete this channel!")
-                1
-            }
-          }
-        }.getOrElse {
-          error(s"Unable to create new channel.")
-          error(s"Failed to update Channel meta-data.")
-          1
-        }
-      }
-    } getOrElse {
-      error(s"App ${ca.app.name} does not exist. Aborting.")
-      1
-    }
-    events.close()
-    status
-  }
-
-  def channelDelete(ca: ConsoleArgs): Int = {
-    val apps = storage.Storage.getMetaDataApps
-    val channels = storage.Storage.getMetaDataChannels
-    val events = storage.Storage.getLEvents()
-    val deleteChannel = ca.app.channel
-    val status = apps.getByName(ca.app.name) map { app =>
-      val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap
-      if (!channelMap.contains(deleteChannel)) {
-        error(s"Unable to delete channel.")
-        error(s"Channel ${deleteChannel} doesn't exist.")
-        1
-      } else {
-        info(s"The following channel will be deleted. Are you sure?")
-        info(s"    Channel Name: ${deleteChannel}")
-        info(s"      Channel ID: ${channelMap(deleteChannel)}")
-        info(s"        App Name: ${app.name}")
-        info(s"          App ID: ${app.id}")
-        val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
-        choice match {
-          case "YES" => {
-            // NOTE: remove storage first before remove meta data (in case remove storage failed)
-            val dbRemoved = events.remove(app.id, Some(channelMap(deleteChannel)))
-            if (dbRemoved) {
-              info(s"Removed Event Store for this channel: ${deleteChannel}")
-              try {
-                channels.delete(channelMap(deleteChannel))
-                info(s"Deleted channel: ${deleteChannel}.")
-                0
-              } catch {
-                case e: Exception =>
-                  error(s"Unable to delete channel.", e)
-                  error(s"Failed to update Channel meta-data.")
-                  error(s"The channel ${deleteChannel} CANNOT be used!")
-                  error(s"Please run 'pio app channel-delete ${app.name} ${deleteChannel}' " +
-                    "to delete this channel again!")
-                  1
-              }
-            } else {
-              error(s"Unable to delete channel.")
-              error(s"Error removing Event Store for this channel.")
-              1
-            }
-          }
-          case _ =>
-            info("Aborted.")
-            0
-        }
-      }
-    } getOrElse {
-      error(s"App ${ca.app.name} does not exist. Aborting.")
-      1
-    }
-    events.close()
-    status
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/console/Console.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/console/Console.scala b/tools/src/main/scala/io/prediction/tools/console/Console.scala
deleted file mode 100644
index 81e2d7a..0000000
--- a/tools/src/main/scala/io/prediction/tools/console/Console.scala
+++ /dev/null
@@ -1,1277 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools.console
-
-import java.io.File
-import java.net.URI
-
-import grizzled.slf4j.Logging
-import io.prediction.controller.Utils
-import io.prediction.core.BuildInfo
-import io.prediction.data.api.EventServer
-import io.prediction.data.api.EventServerConfig
-import io.prediction.data.storage
-import io.prediction.data.storage.EngineManifest
-import io.prediction.data.storage.EngineManifestSerializer
-import io.prediction.data.storage.hbase.upgrade.Upgrade_0_8_3
-import io.prediction.tools.RegisterEngine
-import io.prediction.tools.RunServer
-import io.prediction.tools.RunWorkflow
-import io.prediction.tools.admin.AdminServer
-import io.prediction.tools.admin.AdminServerConfig
-import io.prediction.tools.dashboard.Dashboard
-import io.prediction.tools.dashboard.DashboardConfig
-import io.prediction.workflow.JsonExtractorOption
-import io.prediction.workflow.JsonExtractorOption.JsonExtractorOption
-import io.prediction.workflow.WorkflowUtils
-import org.apache.commons.io.FileUtils
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.read
-import org.json4s.native.Serialization.write
-import semverfi._
-
-import scala.collection.JavaConversions._
-import scala.io.Source
-import scala.sys.process._
-import scala.util.Random
-import scalaj.http.Http
-
-case class ConsoleArgs(
-  common: CommonArgs = CommonArgs(),
-  build: BuildArgs = BuildArgs(),
-  app: AppArgs = AppArgs(),
-  accessKey: AccessKeyArgs = AccessKeyArgs(),
-  deploy: DeployArgs = DeployArgs(),
-  eventServer: EventServerArgs = EventServerArgs(),
-  adminServer: AdminServerArgs = AdminServerArgs(),
-  dashboard: DashboardArgs = DashboardArgs(),
-  upgrade: UpgradeArgs = UpgradeArgs(),
-  template: TemplateArgs = TemplateArgs(),
-  export: ExportArgs = ExportArgs(),
-  imprt: ImportArgs = ImportArgs(),
-  commands: Seq[String] = Seq(),
-  metricsClass: Option[String] = None,
-  metricsParamsJsonPath: Option[String] = None,
-  paramsPath: String = "params",
-  engineInstanceId: Option[String] = None,
-  mainClass: Option[String] = None)
-
-case class CommonArgs(
-  batch: String = "",
-  sparkPassThrough: Seq[String] = Seq(),
-  driverPassThrough: Seq[String] = Seq(),
-  pioHome: Option[String] = None,
-  sparkHome: Option[String] = None,
-  engineId: Option[String] = None,
-  engineVersion: Option[String] = None,
-  engineFactory: Option[String] = None,
-  engineParamsKey: Option[String] = None,
-  evaluation: Option[String] = None,
-  engineParamsGenerator: Option[String] = None,
-  variantJson: File = new File("engine.json"),
-  manifestJson: File = new File("manifest.json"),
-  stopAfterRead: Boolean = false,
-  stopAfterPrepare: Boolean = false,
-  skipSanityCheck: Boolean = false,
-  verbose: Boolean = false,
-  verbosity: Int = 0,
-  sparkKryo: Boolean = false,
-  scratchUri: Option[URI] = None,
-  jsonExtractor: JsonExtractorOption = JsonExtractorOption.Both)
-
-case class BuildArgs(
-  sbt: Option[File] = None,
-  sbtExtra: Option[String] = None,
-  sbtAssemblyPackageDependency: Boolean = true,
-  sbtClean: Boolean = false,
-  uberJar: Boolean = false,
-  forceGeneratePIOSbt: Boolean = false)
-
-case class DeployArgs(
-  ip: String = "0.0.0.0",
-  port: Int = 8000,
-  logUrl: Option[String] = None,
-  logPrefix: Option[String] = None)
-
-case class EventServerArgs(
-  enabled: Boolean = false,
-  ip: String = "0.0.0.0",
-  port: Int = 7070,
-  stats: Boolean = false)
-
-case class AdminServerArgs(
-ip: String = "127.0.0.1",
-port: Int = 7071)
-
-case class DashboardArgs(
-  ip: String = "127.0.0.1",
-  port: Int = 9000)
-
-case class UpgradeArgs(
-  from: String = "0.0.0",
-  to: String = "0.0.0",
-  oldAppId: Int = 0,
-  newAppId: Int = 0
-)
-
-object Console extends Logging {
-  def main(args: Array[String]): Unit = {
-    val parser = new scopt.OptionParser[ConsoleArgs]("pio") {
-      override def showUsageOnError: Boolean = false
-      head("PredictionIO Command Line Interface Console", BuildInfo.version)
-      help("")
-      note("Note that it is possible to supply pass-through arguments at\n" +
-        "the end of the command by using a '--' separator, e.g.\n\n" +
-        "pio train --params-path params -- --master spark://mycluster:7077\n" +
-        "\nIn the example above, the '--master' argument will be passed to\n" +
-        "underlying spark-submit command. Please refer to the usage section\n" +
-        "for each command for more information.\n\n" +
-        "The following options are common to all commands:\n")
-      opt[String]("pio-home") action { (x, c) =>
-        c.copy(common = c.common.copy(pioHome = Some(x)))
-      } text("Root directory of a PredictionIO installation.\n" +
-        "        Specify this if automatic discovery fail.")
-      opt[String]("spark-home") action { (x, c) =>
-        c.copy(common = c.common.copy(sparkHome = Some(x)))
-      } text("Root directory of an Apache Spark installation.\n" +
-        "        If not specified, will try to use the SPARK_HOME\n" +
-        "        environmental variable. If this fails as well, default to\n" +
-        "        current directory.")
-      opt[String]("engine-id") abbr("ei") action { (x, c) =>
-        c.copy(common = c.common.copy(engineId = Some(x)))
-      } text("Specify an engine ID. Usually used by distributed deployment.")
-      opt[String]("engine-version") abbr("ev") action { (x, c) =>
-        c.copy(common = c.common.copy(engineVersion = Some(x)))
-      } text("Specify an engine version. Usually used by distributed " +
-        "deployment.")
-      opt[File]("variant") abbr("v") action { (x, c) =>
-        c.copy(common = c.common.copy(variantJson = x))
-      }
-      opt[File]("manifest") abbr("m") action { (x, c) =>
-        c.copy(common = c.common.copy(manifestJson = x))
-      }
-      opt[File]("sbt") action { (x, c) =>
-        c.copy(build = c.build.copy(sbt = Some(x)))
-      } validate { x =>
-        if (x.exists) {
-          success
-        } else {
-          failure(s"${x.getCanonicalPath} does not exist.")
-        }
-      } text("Path to sbt. Default: sbt")
-      opt[Unit]("verbose") action { (x, c) =>
-        c.copy(common = c.common.copy(verbose = true))
-      }
-      opt[Unit]("spark-kryo") abbr("sk") action { (x, c) =>
-        c.copy(common = c.common.copy(sparkKryo = true))
-      }
-      opt[String]("scratch-uri") action { (x, c) =>
-        c.copy(common = c.common.copy(scratchUri = Some(new URI(x))))
-      }
-      note("")
-      cmd("version").
-        text("Displays the version of this command line console.").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "version")
-        }
-      note("")
-      cmd("help").action { (_, c) =>
-        c.copy(commands = c.commands :+ "help")
-      } children(
-        arg[String]("<command>") optional()
-          action { (x, c) =>
-            c.copy(commands = c.commands :+ x)
-          }
-        )
-      note("")
-      cmd("build").
-        text("Build an engine at the current directory.").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "build")
-        } children(
-          opt[String]("sbt-extra") action { (x, c) =>
-            c.copy(build = c.build.copy(sbtExtra = Some(x)))
-          } text("Extra command to pass to SBT when it builds your engine."),
-          opt[Unit]("clean") action { (x, c) =>
-            c.copy(build = c.build.copy(sbtClean = true))
-          } text("Clean build."),
-          opt[Unit]("no-asm") action { (x, c) =>
-            c.copy(build = c.build.copy(sbtAssemblyPackageDependency = false))
-          } text("Skip building external dependencies assembly."),
-          opt[Unit]("uber-jar") action { (x, c) =>
-            c.copy(build = c.build.copy(uberJar = true))
-          },
-          opt[Unit]("generate-pio-sbt") action { (x, c) =>
-            c.copy(build = c.build.copy(forceGeneratePIOSbt = true))
-          }
-        )
-      note("")
-      cmd("unregister").
-        text("Unregister an engine at the current directory.").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "unregister")
-        }
-      note("")
-      cmd("train").
-        text("Kick off a training using an engine. This will produce an\n" +
-          "engine instance. This command will pass all pass-through\n" +
-          "arguments to its underlying spark-submit command.").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "train")
-        } children(
-          opt[String]("batch") action { (x, c) =>
-            c.copy(common = c.common.copy(batch = x))
-          } text("Batch label of the run."),
-          opt[String]("params-path") action { (x, c) =>
-            c.copy(paramsPath = x)
-          } text("Directory to lookup parameters JSON files. Default: params"),
-          opt[String]("metrics-params") abbr("mp") action { (x, c) =>
-            c.copy(metricsParamsJsonPath = Some(x))
-          } text("Metrics parameters JSON file. Will try to use\n" +
-            "        metrics.json in the base path."),
-          opt[Unit]("skip-sanity-check") abbr("ssc") action { (x, c) =>
-            c.copy(common = c.common.copy(skipSanityCheck = true))
-          },
-          opt[Unit]("stop-after-read") abbr("sar") action { (x, c) =>
-            c.copy(common = c.common.copy(stopAfterRead = true))
-          },
-          opt[Unit]("stop-after-prepare") abbr("sap") action { (x, c) =>
-            c.copy(common = c.common.copy(stopAfterPrepare = true))
-          },
-          opt[Unit]("uber-jar") action { (x, c) =>
-            c.copy(build = c.build.copy(uberJar = true))
-          },
-          opt[Int]("verbosity") action { (x, c) =>
-            c.copy(common = c.common.copy(verbosity = x))
-          },
-          opt[String]("engine-factory") action { (x, c) =>
-            c.copy(common = c.common.copy(engineFactory = Some(x)))
-          },
-          opt[String]("engine-params-key") action { (x, c) =>
-            c.copy(common = c.common.copy(engineParamsKey = Some(x)))
-          },
-          opt[String]("json-extractor") action { (x, c) =>
-            c.copy(common = c.common.copy(jsonExtractor = JsonExtractorOption.withName(x)))
-          } validate { x =>
-              if (JsonExtractorOption.values.map(_.toString).contains(x)) {
-                success
-              } else {
-                val validOptions = JsonExtractorOption.values.mkString("|")
-                failure(s"$x is not a valid json-extractor option [$validOptions]")
-              }
-          }
-        )
-      note("")
-      cmd("eval").
-        text("Kick off an evaluation using an engine. This will produce an\n" +
-          "engine instance. This command will pass all pass-through\n" +
-          "arguments to its underlying spark-submit command.").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "eval")
-        } children(
-          arg[String]("<evaluation-class>") action { (x, c) =>
-            c.copy(common = c.common.copy(evaluation = Some(x)))
-          },
-          arg[String]("[<engine-parameters-generator-class>]") optional() action { (x, c) =>
-            c.copy(common = c.common.copy(engineParamsGenerator = Some(x)))
-          } text("Optional engine parameters generator class, overriding the first argument"),
-          opt[String]("batch") action { (x, c) =>
-            c.copy(common = c.common.copy(batch = x))
-          } text("Batch label of the run."),
-          opt[String]("json-extractor") action { (x, c) =>
-            c.copy(common = c.common.copy(jsonExtractor = JsonExtractorOption.withName(x)))
-          } validate { x =>
-            if (JsonExtractorOption.values.map(_.toString).contains(x)) {
-              success
-            } else {
-              val validOptions = JsonExtractorOption.values.mkString("|")
-              failure(s"$x is not a valid json-extractor option [$validOptions]")
-            }
-          }
-        )
-      note("")
-      cmd("deploy").
-        text("Deploy an engine instance as a prediction server. This\n" +
-          "command will pass all pass-through arguments to its underlying\n" +
-          "spark-submit command.").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "deploy")
-        } children(
-          opt[String]("batch") action { (x, c) =>
-            c.copy(common = c.common.copy(batch = x))
-          } text("Batch label of the deployment."),
-          opt[String]("engine-instance-id") action { (x, c) =>
-            c.copy(engineInstanceId = Some(x))
-          } text("Engine instance ID."),
-          opt[String]("ip") action { (x, c) =>
-            c.copy(deploy = c.deploy.copy(ip = x))
-          },
-          opt[Int]("port") action { (x, c) =>
-            c.copy(deploy = c.deploy.copy(port = x))
-          } text("Port to bind to. Default: 8000"),
-          opt[Unit]("feedback") action { (_, c) =>
-            c.copy(eventServer = c.eventServer.copy(enabled = true))
-          } text("Enable feedback loop to event server."),
-          opt[String]("event-server-ip") action { (x, c) =>
-            c.copy(eventServer = c.eventServer.copy(ip = x))
-          },
-          opt[Int]("event-server-port") action { (x, c) =>
-            c.copy(eventServer = c.eventServer.copy(port = x))
-          } text("Event server port. Default: 7070"),
-          opt[Int]("admin-server-port") action { (x, c) =>
-            c.copy(adminServer = c.adminServer.copy(port = x))
-          } text("Admin server port. Default: 7071"),
-          opt[String]("admin-server-port") action { (x, c) =>
-          c.copy(adminServer = c.adminServer.copy(ip = x))
-          } text("Admin server IP. Default: localhost"),
-          opt[String]("accesskey") action { (x, c) =>
-            c.copy(accessKey = c.accessKey.copy(accessKey = x))
-          } text("Access key of the App where feedback data will be stored."),
-          opt[Unit]("uber-jar") action { (x, c) =>
-            c.copy(build = c.build.copy(uberJar = true))
-          },
-          opt[String]("log-url") action { (x, c) =>
-            c.copy(deploy = c.deploy.copy(logUrl = Some(x)))
-          },
-          opt[String]("log-prefix") action { (x, c) =>
-            c.copy(deploy = c.deploy.copy(logPrefix = Some(x)))
-          },
-          opt[String]("json-extractor") action { (x, c) =>
-            c.copy(common = c.common.copy(jsonExtractor = JsonExtractorOption.withName(x)))
-          } validate { x =>
-            if (JsonExtractorOption.values.map(_.toString).contains(x)) {
-              success
-            } else {
-              val validOptions = JsonExtractorOption.values.mkString("|")
-              failure(s"$x is not a valid json-extractor option [$validOptions]")
-            }
-          }
-        )
-      note("")
-      cmd("undeploy").
-        text("Undeploy an engine instance as a prediction server.").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "undeploy")
-        } children(
-          opt[String]("ip") action { (x, c) =>
-            c.copy(deploy = c.deploy.copy(ip = x))
-          },
-          opt[Int]("port") action { (x, c) =>
-            c.copy(deploy = c.deploy.copy(port = x))
-          } text("Port to unbind from. Default: 8000")
-        )
-      note("")
-      cmd("dashboard").
-        text("Launch a dashboard at the specific IP and port.").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "dashboard")
-        } children(
-          opt[String]("ip") action { (x, c) =>
-            c.copy(dashboard = c.dashboard.copy(ip = x))
-          },
-          opt[Int]("port") action { (x, c) =>
-            c.copy(dashboard = c.dashboard.copy(port = x))
-          } text("Port to bind to. Default: 9000")
-        )
-      note("")
-      cmd("eventserver").
-        text("Launch an Event Server at the specific IP and port.").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "eventserver")
-        } children(
-          opt[String]("ip") action { (x, c) =>
-            c.copy(eventServer = c.eventServer.copy(ip = x))
-          },
-          opt[Int]("port") action { (x, c) =>
-            c.copy(eventServer = c.eventServer.copy(port = x))
-          } text("Port to bind to. Default: 7070"),
-          opt[Unit]("stats") action { (x, c) =>
-            c.copy(eventServer = c.eventServer.copy(stats = true))
-          }
-        )
-      cmd("adminserver").
-        text("Launch an Admin Server at the specific IP and port.").
-        action { (_, c) =>
-        c.copy(commands = c.commands :+ "adminserver")
-      } children(
-        opt[String]("ip") action { (x, c) =>
-          c.copy(adminServer = c.adminServer.copy(ip = x))
-        } text("IP to bind to. Default: localhost"),
-        opt[Int]("port") action { (x, c) =>
-          c.copy(adminServer = c.adminServer.copy(port = x))
-        } text("Port to bind to. Default: 7071")
-        )
-      note("")
-      cmd("run").
-        text("Launch a driver program. This command will pass all\n" +
-          "pass-through arguments to its underlying spark-submit command.\n" +
-          "In addition, it also supports a second level of pass-through\n" +
-          "arguments to the driver program, e.g.\n" +
-          "pio run -- --master spark://localhost:7077 -- --driver-arg foo").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "run")
-        } children(
-          arg[String]("<main class>") action { (x, c) =>
-            c.copy(mainClass = Some(x))
-          } text("Main class name of the driver program."),
-          opt[String]("sbt-extra") action { (x, c) =>
-            c.copy(build = c.build.copy(sbtExtra = Some(x)))
-          } text("Extra command to pass to SBT when it builds your engine."),
-          opt[Unit]("clean") action { (x, c) =>
-            c.copy(build = c.build.copy(sbtClean = true))
-          } text("Clean build."),
-          opt[Unit]("no-asm") action { (x, c) =>
-            c.copy(build = c.build.copy(sbtAssemblyPackageDependency = false))
-          } text("Skip building external dependencies assembly.")
-        )
-      note("")
-      cmd("status").
-        text("Displays status information about the PredictionIO system.").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "status")
-        }
-      note("")
-      cmd("upgrade").
-        text("Upgrade tool").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "upgrade")
-        } children(
-          arg[String]("<from version>") action { (x, c) =>
-            c.copy(upgrade = c.upgrade.copy(from = x))
-          } text("The version upgraded from."),
-          arg[String]("<to version>") action { (x, c) =>
-            c.copy(upgrade = c.upgrade.copy(to = x))
-          } text("The version upgraded to."),
-          arg[Int]("<old App ID>") action { (x, c) =>
-            c.copy(upgrade = c.upgrade.copy(oldAppId = x))
-          } text("Old App ID."),
-          arg[Int]("<new App ID>") action { (x, c) =>
-            c.copy(upgrade = c.upgrade.copy(newAppId = x))
-          } text("New App ID.")
-        )
-      note("")
-      cmd("app").
-        text("Manage apps.\n").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "app")
-        } children(
-          cmd("new").
-            text("Create a new app key to app ID mapping.").
-            action { (_, c) =>
-              c.copy(commands = c.commands :+ "new")
-            } children(
-              opt[Int]("id") action { (x, c) =>
-                c.copy(app = c.app.copy(id = Some(x)))
-              },
-              opt[String]("description") action { (x, c) =>
-                c.copy(app = c.app.copy(description = Some(x)))
-              },
-              opt[String]("access-key") action { (x, c) =>
-                c.copy(accessKey = c.accessKey.copy(accessKey = x))
-              },
-              arg[String]("<name>") action { (x, c) =>
-                c.copy(app = c.app.copy(name = x))
-              }
-            ),
-          note(""),
-          cmd("list").
-            text("List all apps.").
-            action { (_, c) =>
-              c.copy(commands = c.commands :+ "list")
-            },
-          note(""),
-          cmd("show").
-            text("Show details of an app.").
-            action { (_, c) =>
-              c.copy(commands = c.commands :+ "show")
-            } children (
-              arg[String]("<name>") action { (x, c) =>
-                c.copy(app = c.app.copy(name = x))
-              } text("Name of the app to be shown.")
-            ),
-          note(""),
-          cmd("delete").
-            text("Delete an app.").
-            action { (_, c) =>
-              c.copy(commands = c.commands :+ "delete")
-            } children(
-              arg[String]("<name>") action { (x, c) =>
-                c.copy(app = c.app.copy(name = x))
-              } text("Name of the app to be deleted."),
-              opt[Unit]("force") abbr("f") action { (x, c) =>
-                c.copy(app = c.app.copy(force = true))
-              } text("Delete an app without prompting for confirmation")
-            ),
-          note(""),
-          cmd("data-delete").
-            text("Delete data of an app").
-            action { (_, c) =>
-              c.copy(commands = c.commands :+ "data-delete")
-            } children(
-              arg[String]("<name>") action { (x, c) =>
-                c.copy(app = c.app.copy(name = x))
-              } text("Name of the app whose data to be deleted."),
-              opt[String]("channel") action { (x, c) =>
-                c.copy(app = c.app.copy(dataDeleteChannel = Some(x)))
-              } text("Name of channel whose data to be deleted."),
-              opt[Unit]("all") action { (x, c) =>
-                c.copy(app = c.app.copy(all = true))
-              } text("Delete data of all channels including default"),
-              opt[Unit]("force") abbr("f") action { (x, c) =>
-                c.copy(app = c.app.copy(force = true))
-              } text("Delete data of an app without prompting for confirmation")
-            ),
-          note(""),
-          cmd("channel-new").
-            text("Create a new channel for the app.").
-            action { (_, c) =>
-              c.copy(commands = c.commands :+ "channel-new")
-            } children (
-              arg[String]("<name>") action { (x, c) =>
-                c.copy(app = c.app.copy(name = x))
-              } text("App name."),
-              arg[String]("<channel>") action { (x, c) =>
-                c.copy(app = c.app.copy(channel = x))
-              } text ("Channel name to be created.")
-            ),
-          note(""),
-          cmd("channel-delete").
-            text("Delete a channel of the app.").
-            action { (_, c) =>
-              c.copy(commands = c.commands :+ "channel-delete")
-            } children (
-              arg[String]("<name>") action { (x, c) =>
-                c.copy(app = c.app.copy(name = x))
-              } text("App name."),
-              arg[String]("<channel>") action { (x, c) =>
-                c.copy(app = c.app.copy(channel = x))
-              } text ("Channel name to be deleted."),
-              opt[Unit]("force") abbr("f") action { (x, c) =>
-                c.copy(app = c.app.copy(force = true))
-              } text("Delete a channel of the app without prompting for confirmation")
-            )
-        )
-      note("")
-      cmd("accesskey").
-        text("Manage app access keys.\n").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "accesskey")
-        } children(
-          cmd("new").
-            text("Add allowed event(s) to an access key.").
-            action { (_, c) =>
-              c.copy(commands = c.commands :+ "new")
-            } children(
-              opt[String]("key") action { (x, c) =>
-                c.copy(accessKey = c.accessKey.copy(accessKey = x))
-              },
-              arg[String]("<app name>") action { (x, c) =>
-                c.copy(app = c.app.copy(name = x))
-              },
-              arg[String]("[<event1> <event2> ...]") unbounded() optional()
-                action { (x, c) =>
-                  c.copy(accessKey = c.accessKey.copy(
-                    events = c.accessKey.events :+ x))
-                }
-            ),
-          cmd("list").
-            text("List all access keys of an app.").
-            action { (_, c) =>
-              c.copy(commands = c.commands :+ "list")
-            } children(
-              arg[String]("<app name>") optional() action { (x, c) =>
-                c.copy(app = c.app.copy(name = x))
-              } text("App name.")
-            ),
-          note(""),
-          cmd("delete").
-            text("Delete an access key.").
-            action { (_, c) =>
-              c.copy(commands = c.commands :+ "delete")
-            } children(
-              arg[String]("<access key>") action { (x, c) =>
-                c.copy(accessKey = c.accessKey.copy(accessKey = x))
-              } text("The access key to be deleted.")
-            )
-        )
-      cmd("template").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "template")
-        } children(
-          cmd("get").
-            action { (_, c) =>
-              c.copy(commands = c.commands :+ "get")
-            } children(
-              arg[String]("<template ID>") required() action { (x, c) =>
-                c.copy(template = c.template.copy(repository = x))
-              },
-              arg[String]("<new engine directory>") action { (x, c) =>
-                c.copy(template = c.template.copy(directory = x))
-              },
-              opt[String]("version") action { (x, c) =>
-                c.copy(template = c.template.copy(version = Some(x)))
-              },
-              opt[String]("name") action { (x, c) =>
-                c.copy(template = c.template.copy(name = Some(x)))
-              },
-              opt[String]("package") action { (x, c) =>
-                c.copy(template = c.template.copy(packageName = Some(x)))
-              },
-              opt[String]("email") action { (x, c) =>
-                c.copy(template = c.template.copy(email = Some(x)))
-              }
-            ),
-          cmd("list").
-            action { (_, c) =>
-              c.copy(commands = c.commands :+ "list")
-            }
-        )
-      cmd("export").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "export")
-        } children(
-          opt[Int]("appid") required() action { (x, c) =>
-            c.copy(export = c.export.copy(appId = x))
-          },
-          opt[String]("output") required() action { (x, c) =>
-            c.copy(export = c.export.copy(outputPath = x))
-          },
-          opt[String]("format") action { (x, c) =>
-            c.copy(export = c.export.copy(format = x))
-          },
-          opt[String]("channel") action { (x, c) =>
-            c.copy(export = c.export.copy(channel = Some(x)))
-          }
-        )
-      cmd("import").
-        action { (_, c) =>
-          c.copy(commands = c.commands :+ "import")
-        } children(
-          opt[Int]("appid") required() action { (x, c) =>
-            c.copy(imprt = c.imprt.copy(appId = x))
-          },
-          opt[String]("input") required() action { (x, c) =>
-            c.copy(imprt = c.imprt.copy(inputPath = x))
-          },
-          opt[String]("channel") action { (x, c) =>
-            c.copy(imprt = c.imprt.copy(channel = Some(x)))
-          }
-        )
-    }
-
-    val separatorIndex = args.indexWhere(_ == "--")
-    val (consoleArgs, theRest) =
-      if (separatorIndex == -1) {
-        (args, Array[String]())
-      } else {
-        args.splitAt(separatorIndex)
-      }
-    val allPassThroughArgs = theRest.drop(1)
-    val secondSepIdx = allPassThroughArgs.indexWhere(_ == "--")
-    val (sparkPassThroughArgs, driverPassThroughArgs) =
-      if (secondSepIdx == -1) {
-        (allPassThroughArgs, Array[String]())
-      } else {
-        val t = allPassThroughArgs.splitAt(secondSepIdx)
-        (t._1, t._2.drop(1))
-      }
-
-    parser.parse(consoleArgs, ConsoleArgs()) map { pca =>
-      val ca = pca.copy(common = pca.common.copy(
-        sparkPassThrough = sparkPassThroughArgs,
-        driverPassThrough = driverPassThroughArgs))
-      WorkflowUtils.modifyLogging(ca.common.verbose)
-      val rv: Int = ca.commands match {
-        case Seq("") =>
-          System.err.println(help())
-          1
-        case Seq("version") =>
-          version(ca)
-          0
-        case Seq("build") =>
-          regenerateManifestJson(ca.common.manifestJson)
-          build(ca)
-        case Seq("unregister") =>
-          unregister(ca)
-          0
-        case Seq("train") =>
-          regenerateManifestJson(ca.common.manifestJson)
-          train(ca)
-        case Seq("eval") =>
-          regenerateManifestJson(ca.common.manifestJson)
-          train(ca)
-        case Seq("deploy") =>
-          deploy(ca)
-        case Seq("undeploy") =>
-          undeploy(ca)
-        case Seq("dashboard") =>
-          dashboard(ca)
-          0
-        case Seq("eventserver") =>
-          eventserver(ca)
-          0
-        case Seq("adminserver") =>
-          adminserver(ca)
-          0
-        case Seq("run") =>
-          generateManifestJson(ca.common.manifestJson)
-          run(ca)
-        case Seq("status") =>
-          status(ca)
-        case Seq("upgrade") =>
-          upgrade(ca)
-          0
-        case Seq("app", "new") =>
-          App.create(ca)
-        case Seq("app", "list") =>
-          App.list(ca)
-        case Seq("app", "show") =>
-          App.show(ca)
-        case Seq("app", "delete") =>
-          App.delete(ca)
-        case Seq("app", "data-delete") =>
-          App.dataDelete(ca)
-        case Seq("app", "channel-new") =>
-          App.channelNew(ca)
-        case Seq("app", "channel-delete") =>
-          App.channelDelete(ca)
-        case Seq("accesskey", "new") =>
-          AccessKey.create(ca)
-        case Seq("accesskey", "list") =>
-          AccessKey.list(ca)
-        case Seq("accesskey", "delete") =>
-          AccessKey.delete(ca)
-        case Seq("template", "get") =>
-          Template.get(ca)
-        case Seq("template", "list") =>
-          Template.list(ca)
-        case Seq("export") =>
-          Export.eventsToFile(ca)
-        case Seq("import") =>
-          Import.fileToEvents(ca)
-        case _ =>
-          System.err.println(help(ca.commands))
-          1
-      }
-      sys.exit(rv)
-    } getOrElse {
-      val command = args.toSeq.filterNot(_.startsWith("--")).head
-      System.err.println(help(Seq(command)))
-      sys.exit(1)
-    }
-  }
-
-  def help(commands: Seq[String] = Seq()): String = {
-    if (commands.isEmpty) {
-      mainHelp
-    } else {
-      val stripped =
-        (if (commands.head == "help") commands.drop(1) else commands).
-          mkString("-")
-      helpText.getOrElse(stripped, s"Help is unavailable for ${stripped}.")
-    }
-  }
-
-  val mainHelp = txt.main().toString
-
-  val helpText = Map(
-    "" -> mainHelp,
-    "status" -> txt.status().toString,
-    "upgrade" -> txt.upgrade().toString,
-    "version" -> txt.version().toString,
-    "template" -> txt.template().toString,
-    "build" -> txt.build().toString,
-    "train" -> txt.train().toString,
-    "deploy" -> txt.deploy().toString,
-    "eventserver" -> txt.eventserver().toString,
-    "adminserver" -> txt.adminserver().toString,
-    "app" -> txt.app().toString,
-    "accesskey" -> txt.accesskey().toString,
-    "import" -> txt.imprt().toString,
-    "export" -> txt.export().toString,
-    "run" -> txt.run().toString,
-    "eval" -> txt.eval().toString,
-    "dashboard" -> txt.dashboard().toString)
-
-  def version(ca: ConsoleArgs): Unit = println(BuildInfo.version)
-
-  def build(ca: ConsoleArgs): Int = {
-    Template.verifyTemplateMinVersion(new File("template.json"))
-    compile(ca)
-    info("Looking for an engine...")
-    val jarFiles = jarFilesForScala
-    if (jarFiles.isEmpty) {
-      error("No engine found. Your build might have failed. Aborting.")
-      return 1
-    }
-    jarFiles foreach { f => info(s"Found ${f.getName}")}
-    RegisterEngine.registerEngine(
-      ca.common.manifestJson,
-      jarFiles,
-      false)
-    info("Your engine is ready for training.")
-    0
-  }
-
-  def unregister(ca: ConsoleArgs): Unit = {
-    RegisterEngine.unregisterEngine(ca.common.manifestJson)
-  }
-
-  def train(ca: ConsoleArgs): Int = {
-    Template.verifyTemplateMinVersion(new File("template.json"))
-    withRegisteredManifest(
-      ca.common.manifestJson,
-      ca.common.engineId,
-      ca.common.engineVersion) { em =>
-      RunWorkflow.newRunWorkflow(ca, em)
-    }
-  }
-
-  def deploy(ca: ConsoleArgs): Int = {
-    Template.verifyTemplateMinVersion(new File("template.json"))
-    withRegisteredManifest(
-      ca.common.manifestJson,
-      ca.common.engineId,
-      ca.common.engineVersion) { em =>
-      val variantJson = parse(Source.fromFile(ca.common.variantJson).mkString)
-      val variantId = variantJson \ "id" match {
-        case JString(s) => s
-        case _ =>
-          error("Unable to read engine variant ID from " +
-            s"${ca.common.variantJson.getCanonicalPath}. Aborting.")
-          return 1
-      }
-      val engineInstances = storage.Storage.getMetaDataEngineInstances
-      val engineInstance = ca.engineInstanceId map { eid =>
-        engineInstances.get(eid)
-      } getOrElse {
-        engineInstances.getLatestCompleted(em.id, em.version, variantId)
-      }
-      engineInstance map { r =>
-        RunServer.newRunServer(ca, em, r.id)
-      } getOrElse {
-        ca.engineInstanceId map { eid =>
-          error(
-            s"Invalid engine instance ID ${ca.engineInstanceId}. Aborting.")
-        } getOrElse {
-          error(
-            s"No valid engine instance found for engine ${em.id} " +
-              s"${em.version}.\nTry running 'train' before 'deploy'. Aborting.")
-        }
-        1
-      }
-    }
-  }
-
-  def dashboard(ca: ConsoleArgs): Unit = {
-    info(s"Creating dashboard at ${ca.dashboard.ip}:${ca.dashboard.port}")
-    Dashboard.createDashboard(DashboardConfig(
-      ip = ca.dashboard.ip,
-      port = ca.dashboard.port))
-  }
-
-  def eventserver(ca: ConsoleArgs): Unit = {
-    info(
-      s"Creating Event Server at ${ca.eventServer.ip}:${ca.eventServer.port}")
-    EventServer.createEventServer(EventServerConfig(
-      ip = ca.eventServer.ip,
-      port = ca.eventServer.port,
-      stats = ca.eventServer.stats))
-  }
-
-  def adminserver(ca: ConsoleArgs): Unit = {
-    info(
-      s"Creating Admin Server at ${ca.adminServer.ip}:${ca.adminServer.port}")
-    AdminServer.createAdminServer(AdminServerConfig(
-      ip = ca.adminServer.ip,
-      port = ca.adminServer.port
-    ))
-  }
-
-  def undeploy(ca: ConsoleArgs): Int = {
-    val serverUrl = s"http://${ca.deploy.ip}:${ca.deploy.port}"
-    info(
-      s"Undeploying any existing engine instance at ${serverUrl}")
-    try {
-      val code = Http(s"${serverUrl}/stop").asString.code
-      code match {
-        case 200 => 0
-        case 404 =>
-          error(s"Another process is using ${serverUrl}. Unable to undeploy.")
-          1
-        case _ =>
-          error(s"Another process is using ${serverUrl}, or an existing " +
-            s"engine server is not responding properly (HTTP ${code}). " +
-            "Unable to undeploy.")
-            1
-      }
-    } catch {
-      case e: java.net.ConnectException =>
-        warn(s"Nothing at ${serverUrl}")
-        0
-      case _: Throwable =>
-        error("Another process might be occupying " +
-          s"${ca.deploy.ip}:${ca.deploy.port}. Unable to undeploy.")
-        1
-    }
-  }
-
-  def compile(ca: ConsoleArgs): Unit = {
-    // only add pioVersion to sbt if project/pio.sbt exists
-    if (new File("project", "pio-build.sbt").exists || ca.build.forceGeneratePIOSbt) {
-      FileUtils.writeLines(
-        new File("pio.sbt"),
-        Seq(
-          "// Generated automatically by pio build.",
-          "// Changes in this file will be overridden.",
-          "",
-          "pioVersion := \"" + BuildInfo.version + "\""))
-    }
-    implicit val formats = Utils.json4sDefaultFormats
-    try {
-      val engineFactory =
-        (parse(Source.fromFile("engine.json").mkString) \ "engineFactory").
-          extract[String]
-      WorkflowUtils.checkUpgrade("build", engineFactory)
-    } catch {
-      case e: Throwable => WorkflowUtils.checkUpgrade("build")
-    }
-    val sbt = detectSbt(ca)
-    info(s"Using command '${sbt}' at the current working directory to build.")
-    info("If the path above is incorrect, this process will fail.")
-    val asm =
-      if (ca.build.sbtAssemblyPackageDependency) {
-        " assemblyPackageDependency"
-      } else {
-        ""
-      }
-    val clean = if (ca.build.sbtClean) " clean" else ""
-    val buildCmd = s"${sbt} ${ca.build.sbtExtra.getOrElse("")}${clean} " +
-      (if (ca.build.uberJar) "assembly" else s"package${asm}")
-    val core = new File(s"pio-assembly-${BuildInfo.version}.jar")
-    if (ca.build.uberJar) {
-      info(s"Uber JAR enabled. Putting ${core.getName} in lib.")
-      val dst = new File("lib")
-      dst.mkdir()
-      FileUtils.copyFileToDirectory(
-        coreAssembly(ca.common.pioHome.get),
-        dst,
-        true)
-    } else {
-      if (new File("engine.json").exists()) {
-        info(s"Uber JAR disabled. Making sure lib/${core.getName} is absent.")
-        new File("lib", core.getName).delete()
-      } else {
-        info("Uber JAR disabled, but current working directory does not look " +
-          s"like an engine project directory. Please delete lib/${core.getName} manually.")
-      }
-    }
-    info(s"Going to run: ${buildCmd}")
-    try {
-      val r =
-        if (ca.common.verbose) {
-          buildCmd.!(ProcessLogger(line => info(line), line => error(line)))
-        } else {
-          buildCmd.!(ProcessLogger(
-            line => outputSbtError(line),
-            line => outputSbtError(line)))
-        }
-      if (r != 0) {
-        error(s"Return code of previous step is ${r}. Aborting.")
-        sys.exit(1)
-      }
-      info("Build finished successfully.")
-    } catch {
-      case e: java.io.IOException =>
-        error(s"${e.getMessage}")
-        sys.exit(1)
-    }
-  }
-
-  private def outputSbtError(line: String): Unit = {
-    """\[.*error.*\]""".r findFirstIn line foreach { _ => error(line) }
-  }
-
-  def run(ca: ConsoleArgs): Int = {
-    compile(ca)
-
-    val extraFiles = WorkflowUtils.thirdPartyConfFiles
-
-    val jarFiles = jarFilesForScala
-    jarFiles foreach { f => info(s"Found JAR: ${f.getName}") }
-    val allJarFiles = jarFiles.map(_.getCanonicalPath)
-    val cmd = s"${getSparkHome(ca.common.sparkHome)}/bin/spark-submit --jars " +
-      s"${allJarFiles.mkString(",")} " +
-      (if (extraFiles.size > 0) {
-        s"--files ${extraFiles.mkString(",")} "
-      } else {
-        ""
-      }) +
-      "--class " +
-      s"${ca.mainClass.get} ${ca.common.sparkPassThrough.mkString(" ")} " +
-      coreAssembly(ca.common.pioHome.get) + " " +
-      ca.common.driverPassThrough.mkString(" ")
-    val proc = Process(
-      cmd,
-      None,
-      "SPARK_YARN_USER_ENV" -> sys.env.filter(kv => kv._1.startsWith("PIO_")).
-        map(kv => s"${kv._1}=${kv._2}").mkString(","))
-    info(s"Submission command: ${cmd}")
-    val r = proc.!
-    if (r != 0) {
-      error(s"Return code of previous step is ${r}. Aborting.")
-      return 1
-    }
-    r
-  }
-
-  def status(ca: ConsoleArgs): Int = {
-    info("Inspecting PredictionIO...")
-    ca.common.pioHome map { pioHome =>
-      info(s"PredictionIO ${BuildInfo.version} is installed at $pioHome")
-    } getOrElse {
-      error("Unable to locate PredictionIO installation. Aborting.")
-      return 1
-    }
-    info("Inspecting Apache Spark...")
-    val sparkHome = getSparkHome(ca.common.sparkHome)
-    if (new File(s"$sparkHome/bin/spark-submit").exists) {
-      info(s"Apache Spark is installed at $sparkHome")
-      val sparkMinVersion = "1.3.0"
-      val sparkReleaseFile = new File(s"$sparkHome/RELEASE")
-      if (sparkReleaseFile.exists) {
-        val sparkReleaseStrings =
-          Source.fromFile(sparkReleaseFile).mkString.split(' ')
-        if (sparkReleaseStrings.length < 2) {
-          warn(stripMarginAndNewlines(
-            s"""|Apache Spark version information cannot be found (RELEASE file
-                |is empty). This is a known issue for certain vendors (e.g.
-                |Cloudera). Please make sure you are using a version of at least
-                |$sparkMinVersion."""))
-        } else {
-          val sparkReleaseVersion = sparkReleaseStrings(1)
-          val parsedMinVersion = Version.apply(sparkMinVersion)
-          val parsedCurrentVersion = Version.apply(sparkReleaseVersion)
-          if (parsedCurrentVersion >= parsedMinVersion) {
-            info(stripMarginAndNewlines(
-              s"""|Apache Spark $sparkReleaseVersion detected (meets minimum
-                  |requirement of $sparkMinVersion)"""))
-          } else {
-            error(stripMarginAndNewlines(
-              s"""|Apache Spark $sparkReleaseVersion detected (does not meet
-                  |minimum requirement. Aborting."""))
-          }
-        }
-      } else {
-        warn(stripMarginAndNewlines(
-          s"""|Apache Spark version information cannot be found. If you are
-              |using a developmental tree, please make sure you are using a
-              |version of at least $sparkMinVersion."""))
-      }
-    } else {
-      error("Unable to locate a proper Apache Spark installation. Aborting.")
-      return 1
-    }
-    info("Inspecting storage backend connections...")
-    try {
-      storage.Storage.verifyAllDataObjects()
-    } catch {
-      case e: Throwable =>
-        error("Unable to connect to all storage backends successfully. The " +
-          "following shows the error message from the storage backend.")
-        error(s"${e.getMessage} (${e.getClass.getName})", e)
-        error("Dumping configuration of initialized storage backend sources. " +
-          "Please make sure they are correct.")
-        storage.Storage.config.get("sources") map { src =>
-          src foreach { case (s, p) =>
-            error(s"Source Name: $s; Type: ${p.getOrElse("type", "(error)")}; " +
-              s"Configuration: ${p.getOrElse("config", "(error)")}")
-          }
-        } getOrElse {
-          error("No properly configured storage backend sources.")
-        }
-        return 1
-    }
-    info("(sleeping 5 seconds for all messages to show up...)")
-    Thread.sleep(5000)
-    info("Your system is all ready to go.")
-    0
-  }
-
-  def upgrade(ca: ConsoleArgs): Unit = {
-    (ca.upgrade.from, ca.upgrade.to) match {
-      case ("0.8.2", "0.8.3") => {
-        Upgrade_0_8_3.runMain(ca.upgrade.oldAppId, ca.upgrade.newAppId)
-      }
-      case _ =>
-        println(s"Upgrade from version ${ca.upgrade.from} to ${ca.upgrade.to}"
-          + s" is not supported.")
-    }
-  }
-
-  def coreAssembly(pioHome: String): File = {
-    val core = s"pio-assembly-${BuildInfo.version}.jar"
-    val coreDir =
-      if (new File(pioHome + File.separator + "RELEASE").exists) {
-        new File(pioHome + File.separator + "lib")
-      } else {
-        new File(pioHome + File.separator + "assembly")
-      }
-    val coreFile = new File(coreDir, core)
-    if (coreFile.exists) {
-      coreFile
-    } else {
-      error(s"PredictionIO Core Assembly (${coreFile.getCanonicalPath}) does " +
-        "not exist. Aborting.")
-      sys.exit(1)
-    }
-  }
-
-  val manifestAutogenTag = "pio-autogen-manifest"
-
-  def regenerateManifestJson(json: File): Unit = {
-    val cwd = sys.props("user.dir")
-    val ha = java.security.MessageDigest.getInstance("SHA-1").
-      digest(cwd.getBytes).map("%02x".format(_)).mkString
-    if (json.exists) {
-      val em = readManifestJson(json)
-      if (em.description == Some(manifestAutogenTag) && ha != em.version) {
-        warn("This engine project directory contains an auto-generated " +
-          "manifest that has been copied/moved from another location. ")
-        warn("Regenerating the manifest to reflect the updated location. " +
-          "This will dissociate with all previous engine instances.")
-        generateManifestJson(json)
-      } else {
-        info(s"Using existing engine manifest JSON at ${json.getCanonicalPath}")
-      }
-    } else {
-      generateManifestJson(json)
-    }
-  }
-
-  def generateManifestJson(json: File): Unit = {
-    val cwd = sys.props("user.dir")
-    implicit val formats = Utils.json4sDefaultFormats +
-      new EngineManifestSerializer
-    val rand = Random.alphanumeric.take(32).mkString
-    val ha = java.security.MessageDigest.getInstance("SHA-1").
-      digest(cwd.getBytes).map("%02x".format(_)).mkString
-    val em = EngineManifest(
-      id = rand,
-      version = ha,
-      name = new File(cwd).getName,
-      description = Some(manifestAutogenTag),
-      files = Seq(),
-      engineFactory = "")
-    try {
-      FileUtils.writeStringToFile(json, write(em), "ISO-8859-1")
-    } catch {
-      case e: java.io.IOException =>
-        error(s"Cannot generate ${json} automatically (${e.getMessage}). " +
-          "Aborting.")
-        sys.exit(1)
-    }
-  }
-
-  def readManifestJson(json: File): EngineManifest = {
-    implicit val formats = Utils.json4sDefaultFormats +
-      new EngineManifestSerializer
-    try {
-      read[EngineManifest](Source.fromFile(json).mkString)
-    } catch {
-      case e: java.io.FileNotFoundException =>
-        error(s"${json.getCanonicalPath} does not exist. Aborting.")
-        sys.exit(1)
-      case e: MappingException =>
-        error(s"${json.getCanonicalPath} has invalid content: " +
-          e.getMessage)
-        sys.exit(1)
-    }
-  }
-
-  def withRegisteredManifest(
-      json: File,
-      engineId: Option[String],
-      engineVersion: Option[String])(
-      op: EngineManifest => Int): Int = {
-    val ej = readManifestJson(json)
-    val id = engineId getOrElse ej.id
-    val version = engineVersion getOrElse ej.version
-    storage.Storage.getMetaDataEngineManifests.get(id, version) map {
-      op
-    } getOrElse {
-      error(s"Engine ${id} ${version} cannot be found in the system.")
-      error("Possible reasons:")
-      error("- the engine is not yet built by the 'build' command;")
-      error("- the meta data store is offline.")
-      1
-    }
-  }
-
-  def jarFilesAt(path: File): Array[File] = recursiveListFiles(path) filter {
-    _.getName.toLowerCase.endsWith(".jar")
-  }
-
-  def jarFilesForScala: Array[File] = {
-    val libFiles = jarFilesForScalaFilter(jarFilesAt(new File("lib")))
-    val targetFiles = jarFilesForScalaFilter(jarFilesAt(new File("target" +
-      File.separator + s"scala-${scalaVersionNoPatch}")))
-    // Use libFiles is target is empty.
-    if (targetFiles.size > 0) targetFiles else libFiles
-  }
-
-  def jarFilesForScalaFilter(jars: Array[File]): Array[File] =
-    jars.filterNot { f =>
-      f.getName.toLowerCase.endsWith("-javadoc.jar") ||
-      f.getName.toLowerCase.endsWith("-sources.jar")
-    }
-
-  def recursiveListFiles(f: File): Array[File] = {
-    Option(f.listFiles) map { these =>
-      these ++ these.filter(_.isDirectory).flatMap(recursiveListFiles)
-    } getOrElse Array[File]()
-  }
-
-  def getSparkHome(sparkHome: Option[String]): String = {
-    sparkHome getOrElse {
-      sys.env.getOrElse("SPARK_HOME", ".")
-    }
-  }
-
-  def versionNoPatch(fullVersion: String): String = {
-    val v = """^(\d+\.\d+)""".r
-    val versionNoPatch = for {
-      v(np) <- v findFirstIn fullVersion
-    } yield np
-    versionNoPatch.getOrElse(fullVersion)
-  }
-
-  def scalaVersionNoPatch: String = versionNoPatch(BuildInfo.scalaVersion)
-
-  def detectSbt(ca: ConsoleArgs): String = {
-    ca.build.sbt map {
-      _.getCanonicalPath
-    } getOrElse {
-      val f = new File(Seq(ca.common.pioHome.get, "sbt", "sbt").mkString(
-        File.separator))
-      if (f.exists) f.getCanonicalPath else "sbt"
-    }
-  }
-
-  def stripMarginAndNewlines(string: String): String =
-    string.stripMargin.replaceAll("\n", " ")
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/console/Export.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/console/Export.scala b/tools/src/main/scala/io/prediction/tools/console/Export.scala
deleted file mode 100644
index 55540cf..0000000
--- a/tools/src/main/scala/io/prediction/tools/console/Export.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools.console
-
-import io.prediction.tools.Runner
-
-case class ExportArgs(
-  appId: Int = 0,
-  channel: Option[String] = None,
-  outputPath: String = "",
-  format: String = "json")
-
-object Export {
-  def eventsToFile(ca: ConsoleArgs): Int = {
-    val channelArg = ca.export.channel
-      .map(ch => Seq("--channel", ch)).getOrElse(Nil)
-    Runner.runOnSpark(
-      "io.prediction.tools.export.EventsToFile",
-      Seq(
-        "--appid",
-        ca.export.appId.toString,
-        "--output",
-        ca.export.outputPath,
-        "--format",
-        ca.export.format) ++ channelArg,
-      ca,
-      Nil)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/console/Import.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/console/Import.scala b/tools/src/main/scala/io/prediction/tools/console/Import.scala
deleted file mode 100644
index 18a6437..0000000
--- a/tools/src/main/scala/io/prediction/tools/console/Import.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools.console
-
-import io.prediction.tools.Runner
-
-case class ImportArgs(
-  appId: Int = 0,
-  channel: Option[String] = None,
-  inputPath: String = "")
-
-object Import {
-  def fileToEvents(ca: ConsoleArgs): Int = {
-    val channelArg = ca.imprt.channel
-      .map(ch => Seq("--channel", ch)).getOrElse(Nil)
-    Runner.runOnSpark(
-      "io.prediction.tools.imprt.FileToEvents",
-      Seq(
-        "--appid",
-        ca.imprt.appId.toString,
-        "--input",
-        ca.imprt.inputPath) ++ channelArg,
-      ca,
-      Nil)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/console/Template.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/console/Template.scala b/tools/src/main/scala/io/prediction/tools/console/Template.scala
deleted file mode 100644
index a0b4376..0000000
--- a/tools/src/main/scala/io/prediction/tools/console/Template.scala
+++ /dev/null
@@ -1,429 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools.console
-
-import java.io.BufferedInputStream
-import java.io.BufferedOutputStream
-import java.io.File
-import java.io.FileInputStream
-import java.io.FileOutputStream
-import java.net.ConnectException
-import java.net.URI
-import java.util.zip.ZipInputStream
-
-import grizzled.slf4j.Logging
-import io.prediction.controller.Utils
-import io.prediction.core.BuildInfo
-import org.apache.commons.io.FileUtils
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.read
-import org.json4s.native.Serialization.write
-import semverfi._
-
-import scala.io.Source
-import scala.sys.process._
-import scalaj.http._
-
-case class TemplateArgs(
-  directory: String = "",
-  repository: String = "",
-  version: Option[String] = None,
-  name: Option[String] = None,
-  packageName: Option[String] = None,
-  email: Option[String] = None)
-
-case class GitHubTag(
-  name: String,
-  zipball_url: String,
-  tarball_url: String,
-  commit: GitHubCommit)
-
-case class GitHubCommit(
-  sha: String,
-  url: String)
-
-case class GitHubCache(
-  headers: Map[String, String],
-  body: String)
-
-case class TemplateEntry(
-  repo: String)
-
-case class TemplateMetaData(
-  pioVersionMin: Option[String] = None)
-
-object Template extends Logging {
-  implicit val formats = Utils.json4sDefaultFormats
-
-  def templateMetaData(templateJson: File): TemplateMetaData = {
-    if (!templateJson.exists) {
-      warn(s"$templateJson does not exist. Template metadata will not be available. " +
-        "(This is safe to ignore if you are not working on a template.)")
-      TemplateMetaData()
-    } else {
-      val jsonString = Source.fromFile(templateJson)(scala.io.Codec.ISO8859).mkString
-      val json = try {
-        parse(jsonString)
-      } catch {
-        case e: org.json4s.ParserUtil.ParseException =>
-          warn(s"$templateJson cannot be parsed. Template metadata will not be available.")
-          return TemplateMetaData()
-      }
-      val pioVersionMin = json \ "pio" \ "version" \ "min"
-      pioVersionMin match {
-        case JString(s) => TemplateMetaData(pioVersionMin = Some(s))
-        case _ => TemplateMetaData()
-      }
-    }
-  }
-
-  /** Creates a wrapper that provides the functionality of scalaj.http.Http()
-    * with automatic proxy settings handling. The proxy settings will first
-    * come from "git" followed by system properties "http.proxyHost" and
-    * "http.proxyPort".
-    *
-    * @param url URL to be connected
-    * @return
-    */
-  def httpOptionalProxy(url: String): HttpRequest = {
-    val gitProxy = try {
-      Some(Process("git config --global http.proxy").lines.toList(0))
-    } catch {
-      case e: Throwable => None
-    }
-
-    val (host, port) = gitProxy map { p =>
-      val proxyUri = new URI(p)
-      (Option(proxyUri.getHost),
-        if (proxyUri.getPort == -1) None else Some(proxyUri.getPort))
-    } getOrElse {
-      (sys.props.get("http.proxyHost"),
-        sys.props.get("http.proxyPort").map { p =>
-          try {
-            Some(p.toInt)
-          } catch {
-            case e: NumberFormatException => None
-          }
-        } getOrElse None)
-    }
-
-    (host, port) match {
-      case (Some(h), Some(p)) => Http(url).proxy(h, p)
-      case _ => Http(url)
-    }
-  }
-
-  def getGitHubRepos(
-      repos: Seq[String],
-      apiType: String,
-      repoFilename: String): Map[String, GitHubCache] = {
-    val reposCache = try {
-      val cache =
-        Source.fromFile(repoFilename)(scala.io.Codec.ISO8859).mkString
-        read[Map[String, GitHubCache]](cache)
-    } catch {
-      case e: Throwable => Map[String, GitHubCache]()
-    }
-    val newReposCache = reposCache ++ (try {
-      repos.map { repo =>
-        val url = s"https://api.github.com/repos/$repo/$apiType"
-        val http = httpOptionalProxy(url)
-        val response = reposCache.get(repo).map { cache =>
-          cache.headers.get("ETag").map { etag =>
-            http.header("If-None-Match", etag).asString
-          } getOrElse {
-            http.asString
-          }
-        } getOrElse {
-          http.asString
-        }
-
-        val body = if (response.code == 304) {
-          reposCache(repo).body
-        } else {
-          response.body
-        }
-
-        repo -> GitHubCache(headers = response.headers, body = body)
-      }.toMap
-    } catch {
-      case e: ConnectException =>
-        githubConnectErrorMessage(e)
-        Map()
-    })
-    FileUtils.writeStringToFile(
-      new File(repoFilename),
-      write(newReposCache),
-      "ISO-8859-1")
-    newReposCache
-  }
-
-  def sub(repo: String, name: String, email: String, org: String): Unit = {
-    val data = Map(
-      "repo" -> repo,
-      "name" -> name,
-      "email" -> email,
-      "org" -> org)
-    try {
-      httpOptionalProxy("https://update.prediction.io/templates.subscribe").
-        postData("json=" + write(data)).asString
-    } catch {
-      case e: Throwable => error("Unable to subscribe.")
-    }
-  }
-
-  def meta(repo: String, name: String, org: String): Unit = {
-    try {
-      httpOptionalProxy(
-        s"https://meta.prediction.io/templates/$repo/$org/$name").asString
-    } catch {
-      case e: Throwable => debug("Template metadata unavailable.")
-    }
-  }
-
-  def list(ca: ConsoleArgs): Int = {
-    val templatesUrl = "https://templates.prediction.io/index.json"
-    try {
-      val templatesJson = Source.fromURL(templatesUrl).mkString("")
-      val templates = read[List[TemplateEntry]](templatesJson)
-      println("The following is a list of template IDs registered on " +
-        "PredictionIO Template Gallery:")
-      println()
-      templates.sortBy(_.repo.toLowerCase).foreach { template =>
-        println(template.repo)
-      }
-      println()
-      println("Notice that it is possible use any GitHub repository as your " +
-        "engine template ID (e.g. YourOrg/YourTemplate).")
-      0
-    } catch {
-      case e: Throwable =>
-        error(s"Unable to list templates from $templatesUrl " +
-          s"(${e.getMessage}). Aborting.")
-        1
-    }
-  }
-
-  def githubConnectErrorMessage(e: ConnectException): Unit = {
-    error(s"Unable to connect to GitHub (Reason: ${e.getMessage}). " +
-      "Please check your network configuration and proxy settings.")
-  }
-
-  def get(ca: ConsoleArgs): Int = {
-    val repos =
-      getGitHubRepos(Seq(ca.template.repository), "tags", ".templates-cache")
-
-    repos.get(ca.template.repository).map { repo =>
-      try {
-        read[List[GitHubTag]](repo.body)
-      } catch {
-        case e: MappingException =>
-          error(s"Either ${ca.template.repository} is not a valid GitHub " +
-            "repository, or it does not have any tag. Aborting.")
-          return 1
-      }
-    } getOrElse {
-      error(s"Failed to retrieve ${ca.template.repository}. Aborting.")
-      return 1
-    }
-
-    val name = ca.template.name getOrElse {
-      try {
-        Process("git config --global user.name").lines.toList(0)
-      } catch {
-        case e: Throwable =>
-          readLine("Please enter author's name: ")
-      }
-    }
-
-    val organization = ca.template.packageName getOrElse {
-      readLine(
-        "Please enter the template's Scala package name (e.g. com.mycompany): ")
-    }
-
-    val email = ca.template.email getOrElse {
-      try {
-        Process("git config --global user.email").lines.toList(0)
-      } catch {
-        case e: Throwable =>
-          readLine("Please enter author's e-mail address: ")
-      }
-    }
-
-    println(s"Author's name:         $name")
-    println(s"Author's e-mail:       $email")
-    println(s"Author's organization: $organization")
-
-    var subscribe = readLine("Would you like to be informed about new bug " +
-      "fixes and security updates of this template? (Y/n) ")
-    var valid = false
-
-    do {
-      subscribe match {
-        case "" | "Y" | "y" =>
-          sub(ca.template.repository, name, email, organization)
-          valid = true
-        case "n" | "N" =>
-          meta(ca.template.repository, name, organization)
-          valid = true
-        case _ =>
-          println("Please answer 'y' or 'n'")
-          subscribe = readLine("(Y/n)? ")
-      }
-    } while (!valid)
-
-    val repo = repos(ca.template.repository)
-
-    println(s"Retrieving ${ca.template.repository}")
-    val tags = read[List[GitHubTag]](repo.body)
-    println(s"There are ${tags.size} tags")
-
-    if (tags.size == 0) {
-      println(s"${ca.template.repository} does not have any tag. Aborting.")
-      return 1
-    }
-
-    val tag = ca.template.version.map { v =>
-      tags.find(_.name == v).getOrElse {
-        println(s"${ca.template.repository} does not have tag $v. Aborting.")
-        return 1
-      }
-    } getOrElse tags.head
-
-    println(s"Using tag ${tag.name}")
-    val url =
-      s"https://github.com/${ca.template.repository}/archive/${tag.name}.zip"
-    println(s"Going to download $url")
-    val trial = try {
-      httpOptionalProxy(url).asBytes
-    } catch {
-      case e: ConnectException =>
-        githubConnectErrorMessage(e)
-        return 1
-    }
-    val finalTrial = try {
-      trial.location.map { loc =>
-        println(s"Redirecting to $loc")
-        httpOptionalProxy(loc).asBytes
-      } getOrElse trial
-    } catch {
-      case e: ConnectException =>
-        githubConnectErrorMessage(e)
-        return 1
-    }
-    val zipFilename =
-      s"${ca.template.repository.replace('/', '-')}-${tag.name}.zip"
-    FileUtils.writeByteArrayToFile(
-      new File(zipFilename),
-      finalTrial.body)
-    val zis = new ZipInputStream(
-      new BufferedInputStream(new FileInputStream(zipFilename)))
-    val bufferSize = 4096
-    val filesToModify = collection.mutable.ListBuffer[String]()
-    var ze = zis.getNextEntry
-    while (ze != null) {
-      val filenameSegments = ze.getName.split(File.separatorChar)
-      val destFilename = (ca.template.directory +: filenameSegments.tail).
-        mkString(File.separator)
-      if (ze.isDirectory) {
-        new File(destFilename).mkdirs
-      } else {
-        val os = new BufferedOutputStream(
-          new FileOutputStream(destFilename),
-          bufferSize)
-        val data = Array.ofDim[Byte](bufferSize)
-        var count = zis.read(data, 0, bufferSize)
-        while (count != -1) {
-          os.write(data, 0, count)
-          count = zis.read(data, 0, bufferSize)
-        }
-        os.flush()
-        os.close()
-
-        val nameOnly = new File(destFilename).getName
-
-        if (organization != "" &&
-          (nameOnly.endsWith(".scala") ||
-            nameOnly == "build.sbt" ||
-            nameOnly == "engine.json")) {
-          filesToModify += destFilename
-        }
-      }
-      ze = zis.getNextEntry
-    }
-    zis.close()
-    new File(zipFilename).delete
-
-    val engineJsonFile =
-      new File(ca.template.directory, "engine.json")
-
-    val engineJson = try {
-      Some(parse(Source.fromFile(engineJsonFile).mkString))
-    } catch {
-      case e: java.io.IOException =>
-        error("Unable to read engine.json. Skipping automatic package " +
-          "name replacement.")
-        None
-      case e: MappingException =>
-        error("Unable to parse engine.json. Skipping automatic package " +
-          "name replacement.")
-        None
-    }
-
-    val engineFactory = engineJson.map { ej =>
-      (ej \ "engineFactory").extractOpt[String]
-    } getOrElse None
-
-    engineFactory.map { ef =>
-      val pkgName = ef.split('.').dropRight(1).mkString(".")
-      println(s"Replacing $pkgName with $organization...")
-
-      filesToModify.foreach { ftm =>
-        println(s"Processing $ftm...")
-        val fileContent = Source.fromFile(ftm).getLines()
-        val processedLines =
-          fileContent.map(_.replaceAllLiterally(pkgName, organization))
-        FileUtils.writeStringToFile(
-          new File(ftm),
-          processedLines.mkString("\n"))
-      }
-    } getOrElse {
-      error("engineFactory is not found in engine.json. Skipping automatic " +
-        "package name replacement.")
-    }
-
-    verifyTemplateMinVersion(new File(ca.template.directory, "template.json"))
-
-    println(s"Engine template ${ca.template.repository} is now ready at " +
-      ca.template.directory)
-
-    0
-  }
-
-  def verifyTemplateMinVersion(templateJsonFile: File): Unit = {
-    val metadata = templateMetaData(templateJsonFile)
-
-    metadata.pioVersionMin.foreach { pvm =>
-      if (Version(BuildInfo.version) < Version(pvm)) {
-        error(s"This engine template requires at least PredictionIO $pvm. " +
-          s"The template may not work with PredictionIO ${BuildInfo.version}.")
-        sys.exit(1)
-      }
-    }
-  }
-
-}


[27/34] incubator-predictionio git commit: change all to org.apache.predictionio except docs

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/DataSource.scala b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/DataSource.scala
index c15c2ba..abccb67 100644
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.ecommercerecommendation
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Engine.scala b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Engine.scala
index 1b453de..cc2e015 100644
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.ecommercerecommendation
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   user: String,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Preparator.scala b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Preparator.scala
index e575a08..82ec3ec 100644
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.ecommercerecommendation
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Serving.scala b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Serving.scala
index 21cf2df..4cc20da 100644
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.ecommercerecommendation
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-ecommercerecommendation/weighted-items/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/build.sbt b/examples/scala-parallel-ecommercerecommendation/weighted-items/build.sbt
index f4d98ee..650c7c3 100644
--- a/examples/scala-parallel-ecommercerecommendation/weighted-items/build.sbt
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-ecommercerecommendation"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % pioVersion.value  % "provided",
+  "org.apache.predictionio"    %% "core"          % pioVersion.value  % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-ecommercerecommendation/weighted-items/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/project/pio-build.sbt b/examples/scala-parallel-ecommercerecommendation/weighted-items/project/pio-build.sbt
index 8346a96..9aed0ee 100644
--- a/examples/scala-parallel-ecommercerecommendation/weighted-items/project/pio-build.sbt
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/project/pio-build.sbt
@@ -1 +1 @@
-addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")
+addSbtPlugin("org.apache.predictionio" % "pio-build" % "0.9.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/ALSAlgorithm.scala
index 019937b..cb546a9 100644
--- a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/ALSAlgorithm.scala
@@ -1,10 +1,10 @@
 package org.template.ecommercerecommendation
 
-import io.prediction.controller.P2LAlgorithm
-import io.prediction.controller.Params
-import io.prediction.data.storage.BiMap
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/DataSource.scala b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/DataSource.scala
index c102b72..a5e78a5 100644
--- a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.ecommercerecommendation
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Engine.scala b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Engine.scala
index 1b453de..cc2e015 100644
--- a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.ecommercerecommendation
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   user: String,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Preparator.scala b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Preparator.scala
index 4dd45cf..51258ba 100644
--- a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.ecommercerecommendation
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Serving.scala b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Serving.scala
index 21cf2df..4cc20da 100644
--- a/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-ecommercerecommendation/weighted-items/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.ecommercerecommendation
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-prepartor/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-prepartor/build.sbt b/examples/scala-parallel-recommendation/custom-prepartor/build.sbt
index b3f4bd4..191f575 100644
--- a/examples/scala-parallel-recommendation/custom-prepartor/build.sbt
+++ b/examples/scala-parallel-recommendation/custom-prepartor/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-recommendation-custom-preparator"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % pioVersion.value % "provided",
+  "org.apache.predictionio"    %% "core"          % pioVersion.value % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-prepartor/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-prepartor/project/pio-build.sbt b/examples/scala-parallel-recommendation/custom-prepartor/project/pio-build.sbt
index 8346a96..9aed0ee 100644
--- a/examples/scala-parallel-recommendation/custom-prepartor/project/pio-build.sbt
+++ b/examples/scala-parallel-recommendation/custom-prepartor/project/pio-build.sbt
@@ -1 +1 @@
-addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")
+addSbtPlugin("org.apache.predictionio" % "pio-build" % "0.9.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/ALSAlgorithm.scala
index 0c0db22..6256e9b 100644
--- a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/ALSAlgorithm.scala
@@ -1,8 +1,8 @@
 package org.template.recommendation
 
-import io.prediction.controller.PAlgorithm
-import io.prediction.controller.Params
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.PAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/ALSModel.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/ALSModel.scala b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/ALSModel.scala
index f0c7b7b..243c1d1 100644
--- a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/ALSModel.scala
+++ b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/ALSModel.scala
@@ -5,9 +5,9 @@ package org.apache.spark.mllib.recommendation
 
 import org.template.recommendation.ALSAlgorithmParams
 
-import io.prediction.controller.IPersistentModel
-import io.prediction.controller.IPersistentModelLoader
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.IPersistentModel
+import org.apache.predictionio.controller.IPersistentModelLoader
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/DataSource.scala b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/DataSource.scala
index bad619c..c730d96 100644
--- a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.recommendation
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Engine.scala b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Engine.scala
index edb7767..4702fe3 100644
--- a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.recommendation
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   user: String,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Preparator.scala b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Preparator.scala
index 5e83fd4..e23a6d7 100644
--- a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Preparator.scala
@@ -1,13 +1,13 @@
 package org.template.recommendation
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
 
 import scala.io.Source // ADDED
-import io.prediction.controller.Params // ADDED
+import org.apache.predictionio.controller.Params // ADDED
 
 // ADDED CustomPreparatorParams case class
 case class CustomPreparatorParams(

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Serving.scala b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Serving.scala
index 132755e..ad9058d 100644
--- a/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-recommendation/custom-prepartor/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.recommendation
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-query/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-query/build.sbt b/examples/scala-parallel-recommendation/custom-query/build.sbt
index 5ba1880..0177e29 100644
--- a/examples/scala-parallel-recommendation/custom-query/build.sbt
+++ b/examples/scala-parallel-recommendation/custom-query/build.sbt
@@ -4,11 +4,11 @@ assemblySettings
 
 name := "template-scala-parallel-recommendation-custom-query"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 def provided  (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "provided")
 
 libraryDependencies ++= provided(
-  "io.prediction"    %% "core"          % "0.8.6",
+  "org.apache.predictionio"    %% "core"          % "0.8.6",
   "org.apache.spark" %% "spark-core"    % "1.2.0",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-query/data/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-query/data/build.sbt b/examples/scala-parallel-recommendation/custom-query/data/build.sbt
index a4b18c9..d4cf9b5 100644
--- a/examples/scala-parallel-recommendation/custom-query/data/build.sbt
+++ b/examples/scala-parallel-recommendation/custom-query/data/build.sbt
@@ -5,4 +5,4 @@ organization := "org.template.recommendation"
 def provided  (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "provided")
 
 libraryDependencies ++= provided(
-  "io.prediction" % "client" % "0.8.3" withSources() withJavadoc())
+  "org.apache.predictionio" % "client" % "0.8.3" withSources() withJavadoc())

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-query/data/src/main/scala/org/template/recommendation/ImportDataScript.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-query/data/src/main/scala/org/template/recommendation/ImportDataScript.scala b/examples/scala-parallel-recommendation/custom-query/data/src/main/scala/org/template/recommendation/ImportDataScript.scala
index 04abc2d..0278571 100644
--- a/examples/scala-parallel-recommendation/custom-query/data/src/main/scala/org/template/recommendation/ImportDataScript.scala
+++ b/examples/scala-parallel-recommendation/custom-query/data/src/main/scala/org/template/recommendation/ImportDataScript.scala
@@ -1,6 +1,6 @@
 package org.template.recommendation
 
-import io.prediction.{Event, EventClient
+import org.apache.predictionio.{Event, EventClient
 }
 import scala.collection.JavaConverters._
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSAlgorithm.scala
index 3a2e903..748c310 100644
--- a/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSAlgorithm.scala
@@ -1,8 +1,8 @@
 package org.template.recommendation
 
-import io.prediction.controller.PAlgorithm
-import io.prediction.controller.Params
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.PAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext._
 import org.apache.spark.mllib.recommendation.ALS

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSModel.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSModel.scala b/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSModel.scala
index 8e6201a..d5a2adb 100644
--- a/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSModel.scala
+++ b/examples/scala-parallel-recommendation/custom-query/src/main/scala/ALSModel.scala
@@ -1,8 +1,8 @@
 package org.template.recommendation
 
-import io.prediction.controller.IPersistentModel
-import io.prediction.controller.IPersistentModelLoader
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.IPersistentModel
+import org.apache.predictionio.controller.IPersistentModelLoader
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.rdd.RDD

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-query/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-query/src/main/scala/DataSource.scala b/examples/scala-parallel-recommendation/custom-query/src/main/scala/DataSource.scala
index 0221e25..afb7cdf 100644
--- a/examples/scala-parallel-recommendation/custom-query/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-recommendation/custom-query/src/main/scala/DataSource.scala
@@ -1,10 +1,10 @@
 package org.template.recommendation
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.{DataMap, Event, Storage}
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.{DataMap, Event, Storage}
 
 import org.apache.spark.SparkContext
 import org.apache.spark.rdd.RDD

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-query/src/main/scala/Engine.scala b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Engine.scala
index 8982d94..751dc9b 100644
--- a/examples/scala-parallel-recommendation/custom-query/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.recommendation
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(user: String, num: Int, creationYear: Option[Int] = None)
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-query/src/main/scala/Preparator.scala b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Preparator.scala
index 02a9ce5..3625b95 100644
--- a/examples/scala-parallel-recommendation/custom-query/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.recommendation
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.rdd.RDD

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-query/src/main/scala/Serving.scala b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Serving.scala
index 6b3df76..19f591a 100644
--- a/examples/scala-parallel-recommendation/custom-query/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-recommendation/custom-query/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.recommendation
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving extends LServing[Query, PredictedResult] {
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-serving/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-serving/build.sbt b/examples/scala-parallel-recommendation/custom-serving/build.sbt
index 6e2892e..de3bac1 100644
--- a/examples/scala-parallel-recommendation/custom-serving/build.sbt
+++ b/examples/scala-parallel-recommendation/custom-serving/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-recommendation"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % pioVersion.value % "provided",
+  "org.apache.predictionio"    %% "core"          % pioVersion.value % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-serving/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-serving/project/pio-build.sbt b/examples/scala-parallel-recommendation/custom-serving/project/pio-build.sbt
index 8346a96..9aed0ee 100644
--- a/examples/scala-parallel-recommendation/custom-serving/project/pio-build.sbt
+++ b/examples/scala-parallel-recommendation/custom-serving/project/pio-build.sbt
@@ -1 +1 @@
-addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")
+addSbtPlugin("org.apache.predictionio" % "pio-build" % "0.9.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/ALSAlgorithm.scala
index 0c0db22..6256e9b 100644
--- a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/ALSAlgorithm.scala
@@ -1,8 +1,8 @@
 package org.template.recommendation
 
-import io.prediction.controller.PAlgorithm
-import io.prediction.controller.Params
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.PAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/ALSModel.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/ALSModel.scala b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/ALSModel.scala
index f0c7b7b..243c1d1 100644
--- a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/ALSModel.scala
+++ b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/ALSModel.scala
@@ -5,9 +5,9 @@ package org.apache.spark.mllib.recommendation
 
 import org.template.recommendation.ALSAlgorithmParams
 
-import io.prediction.controller.IPersistentModel
-import io.prediction.controller.IPersistentModelLoader
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.IPersistentModel
+import org.apache.predictionio.controller.IPersistentModelLoader
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/DataSource.scala b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/DataSource.scala
index bad619c..c730d96 100644
--- a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.recommendation
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Engine.scala b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Engine.scala
index edb7767..4702fe3 100644
--- a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.recommendation
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   user: String,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Preparator.scala b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Preparator.scala
index 7b6c7c9..df0f721 100644
--- a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.recommendation
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Serving.scala b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Serving.scala
index 99cd180..42c1b21 100644
--- a/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-recommendation/custom-serving/src/main/scala/Serving.scala
@@ -1,10 +1,10 @@
 package org.template.recommendation
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 import scala.io.Source
 
-import io.prediction.controller.Params  // ADDED
+import org.apache.predictionio.controller.Params  // ADDED
 
 // ADDED ServingParams to specify the blacklisting file location.
 case class ServingParams(filepath: String) extends Params

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/filter-by-category/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/filter-by-category/build.sbt b/examples/scala-parallel-recommendation/filter-by-category/build.sbt
index 55e7f2f..de3bac1 100644
--- a/examples/scala-parallel-recommendation/filter-by-category/build.sbt
+++ b/examples/scala-parallel-recommendation/filter-by-category/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-recommendation"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % pioVersion.value % "provided",
+  "org.apache.predictionio"    %% "core"          % pioVersion.value % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
-  "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")
\ No newline at end of file
+  "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/filter-by-category/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/filter-by-category/project/pio-build.sbt b/examples/scala-parallel-recommendation/filter-by-category/project/pio-build.sbt
index 8346a96..9aed0ee 100644
--- a/examples/scala-parallel-recommendation/filter-by-category/project/pio-build.sbt
+++ b/examples/scala-parallel-recommendation/filter-by-category/project/pio-build.sbt
@@ -1 +1 @@
-addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")
+addSbtPlugin("org.apache.predictionio" % "pio-build" % "0.9.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/ALSAlgorithm.scala
index b3107e2..72a3bc5 100644
--- a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/ALSAlgorithm.scala
@@ -1,8 +1,8 @@
 package org.template.recommendation
 
-import io.prediction.controller.PAlgorithm
-import io.prediction.controller.Params
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.PAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/ALSModel.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/ALSModel.scala b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/ALSModel.scala
index 56d4f57..26bbe17 100644
--- a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/ALSModel.scala
+++ b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/ALSModel.scala
@@ -6,9 +6,9 @@ package org.apache.spark.mllib.recommendation
 import org.jblas.DoubleMatrix
 import org.template.recommendation.ALSAlgorithmParams
 
-import io.prediction.controller.IPersistentModel
-import io.prediction.controller.IPersistentModelLoader
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.IPersistentModel
+import org.apache.predictionio.controller.IPersistentModelLoader
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/DataSource.scala b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/DataSource.scala
index b5ff72a..75a4dfd 100644
--- a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.recommendation
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Engine.scala b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Engine.scala
index c922d05..96f5a1c 100644
--- a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.recommendation
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   user: String,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Preparator.scala b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Preparator.scala
index ca6efd8..4a0bb79 100644
--- a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.recommendation
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Serving.scala b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Serving.scala
index c86fe29..319b7dc 100644
--- a/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-recommendation/filter-by-category/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.recommendation
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/add-and-return-item-properties/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/add-and-return-item-properties/build.sbt b/examples/scala-parallel-similarproduct/add-and-return-item-properties/build.sbt
index 558c1e7..62b1b9b 100644
--- a/examples/scala-parallel-similarproduct/add-and-return-item-properties/build.sbt
+++ b/examples/scala-parallel-similarproduct/add-and-return-item-properties/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-similarproduct"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % pioVersion.value % "provided",
+  "org.apache.predictionio"    %% "core"          % pioVersion.value % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
-  "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")
\ No newline at end of file
+  "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/add-and-return-item-properties/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/add-and-return-item-properties/project/pio-build.sbt b/examples/scala-parallel-similarproduct/add-and-return-item-properties/project/pio-build.sbt
index 8346a96..9aed0ee 100644
--- a/examples/scala-parallel-similarproduct/add-and-return-item-properties/project/pio-build.sbt
+++ b/examples/scala-parallel-similarproduct/add-and-return-item-properties/project/pio-build.sbt
@@ -1 +1 @@
-addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")
+addSbtPlugin("org.apache.predictionio" % "pio-build" % "0.9.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/ALSAlgorithm.scala
index 1effe07..1041abc 100644
--- a/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/ALSAlgorithm.scala
@@ -1,8 +1,8 @@
 package org.template.similarproduct
 
-import io.prediction.controller.P2LAlgorithm
-import io.prediction.controller.Params
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/DataSource.scala b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/DataSource.scala
index 748c132..4c65aec 100644
--- a/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.similarproduct
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Engine.scala b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Engine.scala
index 518467b..276f696 100644
--- a/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.similarproduct
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   items: List[String],

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Preparator.scala b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Preparator.scala
index 6408909..17f621e 100644
--- a/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.similarproduct
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Serving.scala b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Serving.scala
index a6b75a4..4b940fe 100644
--- a/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-similarproduct/add-and-return-item-properties/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.similarproduct
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/add-rateevent/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/add-rateevent/build.sbt b/examples/scala-parallel-similarproduct/add-rateevent/build.sbt
index a52f698..62b1b9b 100644
--- a/examples/scala-parallel-similarproduct/add-rateevent/build.sbt
+++ b/examples/scala-parallel-similarproduct/add-rateevent/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-similarproduct"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % pioVersion.value % "provided",
+  "org.apache.predictionio"    %% "core"          % pioVersion.value % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/add-rateevent/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/add-rateevent/project/pio-build.sbt b/examples/scala-parallel-similarproduct/add-rateevent/project/pio-build.sbt
index 8346a96..9aed0ee 100644
--- a/examples/scala-parallel-similarproduct/add-rateevent/project/pio-build.sbt
+++ b/examples/scala-parallel-similarproduct/add-rateevent/project/pio-build.sbt
@@ -1 +1 @@
-addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")
+addSbtPlugin("org.apache.predictionio" % "pio-build" % "0.9.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/ALSAlgorithm.scala
index 45afed1..04f27f5 100644
--- a/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/ALSAlgorithm.scala
@@ -1,8 +1,8 @@
 package org.template.similarproduct
 
-import io.prediction.controller.P2LAlgorithm
-import io.prediction.controller.Params
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/DataSource.scala b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/DataSource.scala
index 3578552..39de636 100644
--- a/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.similarproduct
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Engine.scala b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Engine.scala
index 8d594fc..7975530 100644
--- a/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.similarproduct
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   items: List[String],

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Preparator.scala b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Preparator.scala
index 1103919..f59606c 100644
--- a/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.similarproduct
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Serving.scala b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Serving.scala
index 7a7f124..8543796 100644
--- a/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.similarproduct
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/filterbyyear/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/filterbyyear/build.sbt b/examples/scala-parallel-similarproduct/filterbyyear/build.sbt
index 00e9d6f..f23adb6 100644
--- a/examples/scala-parallel-similarproduct/filterbyyear/build.sbt
+++ b/examples/scala-parallel-similarproduct/filterbyyear/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-similarproduct"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.8.6" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.8.6" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
-  "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")
\ No newline at end of file
+  "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/ALSAlgorithm.scala
index 6462552..3fd6f9c 100644
--- a/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/ALSAlgorithm.scala
@@ -1,8 +1,8 @@
 package com.test
 
-import io.prediction.controller.P2LAlgorithm
-import io.prediction.controller.Params
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/DataSource.scala b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/DataSource.scala
index a957579..5328de7 100644
--- a/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package com.test
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Engine.scala b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Engine.scala
index d6717a6..be8a997 100644
--- a/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package com.test
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   items: List[String],

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Preparator.scala b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Preparator.scala
index 088d71e..d525ac6 100644
--- a/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package com.test
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Serving.scala b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Serving.scala
index 2e6922d..1b0f4d0 100644
--- a/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-similarproduct/filterbyyear/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package com.test
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/multi/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/multi/build.sbt b/examples/scala-parallel-similarproduct/multi/build.sbt
index 14afc11..ea02365 100644
--- a/examples/scala-parallel-similarproduct/multi/build.sbt
+++ b/examples/scala-parallel-similarproduct/multi/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-similarproduct-multi"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % pioVersion.value % "provided",
+  "org.apache.predictionio"    %% "core"          % pioVersion.value % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/multi/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/multi/project/pio-build.sbt b/examples/scala-parallel-similarproduct/multi/project/pio-build.sbt
index 8346a96..9aed0ee 100644
--- a/examples/scala-parallel-similarproduct/multi/project/pio-build.sbt
+++ b/examples/scala-parallel-similarproduct/multi/project/pio-build.sbt
@@ -1 +1 @@
-addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")
+addSbtPlugin("org.apache.predictionio" % "pio-build" % "0.9.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/multi/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/multi/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-similarproduct/multi/src/main/scala/ALSAlgorithm.scala
index 15f75f5..7bd5bc8 100644
--- a/examples/scala-parallel-similarproduct/multi/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/multi/src/main/scala/ALSAlgorithm.scala
@@ -1,10 +1,10 @@
 package org.template.similarproduct
 
-import io.prediction.controller.PAlgorithm
-import io.prediction.controller.Params
-import io.prediction.controller.IPersistentModel
-import io.prediction.controller.IPersistentModelLoader
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.PAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.IPersistentModel
+import org.apache.predictionio.controller.IPersistentModelLoader
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/multi/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/multi/src/main/scala/DataSource.scala b/examples/scala-parallel-similarproduct/multi/src/main/scala/DataSource.scala
index 7567c83..a9f63c1 100644
--- a/examples/scala-parallel-similarproduct/multi/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-similarproduct/multi/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.similarproduct
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/multi/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/multi/src/main/scala/Engine.scala b/examples/scala-parallel-similarproduct/multi/src/main/scala/Engine.scala
index 51a708c..b5fd57c 100644
--- a/examples/scala-parallel-similarproduct/multi/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-similarproduct/multi/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.similarproduct
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   items: List[String],

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/multi/src/main/scala/LikeAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/multi/src/main/scala/LikeAlgorithm.scala b/examples/scala-parallel-similarproduct/multi/src/main/scala/LikeAlgorithm.scala
index fa4435b..f841187 100644
--- a/examples/scala-parallel-similarproduct/multi/src/main/scala/LikeAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/multi/src/main/scala/LikeAlgorithm.scala
@@ -1,6 +1,6 @@
 package org.template.similarproduct
 
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/multi/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/multi/src/main/scala/Preparator.scala b/examples/scala-parallel-similarproduct/multi/src/main/scala/Preparator.scala
index 5a20ae8..e723a9e 100644
--- a/examples/scala-parallel-similarproduct/multi/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-similarproduct/multi/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.similarproduct
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/multi/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/multi/src/main/scala/Serving.scala b/examples/scala-parallel-similarproduct/multi/src/main/scala/Serving.scala
index d3823a5..d9b7045 100644
--- a/examples/scala-parallel-similarproduct/multi/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-similarproduct/multi/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.similarproduct
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 import breeze.stats.mean
 import breeze.stats.meanAndVariance

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/no-set-user/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/no-set-user/build.sbt b/examples/scala-parallel-similarproduct/no-set-user/build.sbt
index a52f698..62b1b9b 100644
--- a/examples/scala-parallel-similarproduct/no-set-user/build.sbt
+++ b/examples/scala-parallel-similarproduct/no-set-user/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-similarproduct"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % pioVersion.value % "provided",
+  "org.apache.predictionio"    %% "core"          % pioVersion.value % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/no-set-user/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/no-set-user/project/pio-build.sbt b/examples/scala-parallel-similarproduct/no-set-user/project/pio-build.sbt
index 8346a96..9aed0ee 100644
--- a/examples/scala-parallel-similarproduct/no-set-user/project/pio-build.sbt
+++ b/examples/scala-parallel-similarproduct/no-set-user/project/pio-build.sbt
@@ -1 +1 @@
-addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")
+addSbtPlugin("org.apache.predictionio" % "pio-build" % "0.9.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/ALSAlgorithm.scala
index 799a4bc..b08fee9 100644
--- a/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/ALSAlgorithm.scala
@@ -1,8 +1,8 @@
 package org.template.similarproduct
 
-import io.prediction.controller.P2LAlgorithm
-import io.prediction.controller.Params
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/DataSource.scala b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/DataSource.scala
index b24bc00..77ba891 100644
--- a/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.similarproduct
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Engine.scala b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Engine.scala
index 8d594fc..7975530 100644
--- a/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.similarproduct
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   items: List[String],

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Preparator.scala b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Preparator.scala
index 83d8e84..80a73f5 100644
--- a/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.similarproduct
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Serving.scala b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Serving.scala
index e7dadab..0e1057c 100644
--- a/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-similarproduct/no-set-user/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.similarproduct
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/recommended-user/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/recommended-user/build.sbt b/examples/scala-parallel-similarproduct/recommended-user/build.sbt
index 9ba8a52..9d6dfe4 100644
--- a/examples/scala-parallel-similarproduct/recommended-user/build.sbt
+++ b/examples/scala-parallel-similarproduct/recommended-user/build.sbt
@@ -2,9 +2,9 @@ assemblySettings
 
 name := "template-scala-parallel-recommendeduser"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % pioVersion.value % "provided",
+  "org.apache.predictionio"    %% "core"          % pioVersion.value % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/recommended-user/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/recommended-user/project/pio-build.sbt b/examples/scala-parallel-similarproduct/recommended-user/project/pio-build.sbt
index 8346a96..9aed0ee 100644
--- a/examples/scala-parallel-similarproduct/recommended-user/project/pio-build.sbt
+++ b/examples/scala-parallel-similarproduct/recommended-user/project/pio-build.sbt
@@ -1 +1 @@
-addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")
+addSbtPlugin("org.apache.predictionio" % "pio-build" % "0.9.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/ALSAlgorithm.scala
index c355bb6..fe6f126 100644
--- a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/ALSAlgorithm.scala
@@ -1,8 +1,8 @@
 package org.template.recommendeduser
 
 import grizzled.slf4j.Logger
-import io.prediction.controller.{P2LAlgorithm, Params}
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.{P2LAlgorithm, Params}
+import org.apache.predictionio.data.storage.BiMap
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._
 import org.apache.spark.mllib.recommendation.{ALS, Rating => MLlibRating}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/DataSource.scala b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/DataSource.scala
index 768d79b..a82aefe 100644
--- a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/DataSource.scala
@@ -1,8 +1,8 @@
 package org.template.recommendeduser
 
 import grizzled.slf4j.Logger
-import io.prediction.controller.{EmptyActualResult, EmptyEvaluationInfo, PDataSource, Params}
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.{EmptyActualResult, EmptyEvaluationInfo, PDataSource, Params}
+import org.apache.predictionio.data.storage.Storage
 import org.apache.spark.SparkContext
 import org.apache.spark.rdd.RDD
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Engine.scala b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Engine.scala
index 8122789..86a913f 100644
--- a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.recommendeduser
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   users: List[String],

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Preparator.scala b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Preparator.scala
index 26a4147..507d3b3 100644
--- a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.recommendeduser
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 import org.apache.spark.SparkContext
 import org.apache.spark.rdd.RDD
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Serving.scala b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Serving.scala
index 89cd696..eb7f940 100644
--- a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.recommendeduser
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/tools/src/main/scala/org/apache/predictionio/tools/admin/README.md
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/predictionio/tools/admin/README.md b/tools/src/main/scala/org/apache/predictionio/tools/admin/README.md
index 475a3de..273eea4 100644
--- a/tools/src/main/scala/org/apache/predictionio/tools/admin/README.md
+++ b/tools/src/main/scala/org/apache/predictionio/tools/admin/README.md
@@ -9,7 +9,7 @@ $ sbt/sbt "tools/compile"
 $ set -a
 $ source conf/pio-env.sh
 $ set +a
-$ sbt/sbt "tools/run-main io.prediction.tools.admin.AdminRun"
+$ sbt/sbt "tools/run-main org.apache.predictionio.tools.admin.AdminRun"
 ```
 
 ### Unit test (Very minimal)
@@ -18,7 +18,7 @@ $ sbt/sbt "tools/run-main io.prediction.tools.admin.AdminRun"
 $ set -a
 $ source conf/pio-env.sh
 $ set +a
-$ sbt/sbt "tools/test-only io.prediction.tools.admin.AdminAPISpec"
+$ sbt/sbt "tools/test-only org.apache.predictionio.tools.admin.AdminAPISpec"
 ```
 
 ### Start with pio command adminserver

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/buildSbt.scala.txt
----------------------------------------------------------------------
diff --git a/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/buildSbt.scala.txt b/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/buildSbt.scala.txt
index 9343010..c0a16bd 100644
--- a/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/buildSbt.scala.txt
+++ b/tools/src/main/twirl/org/apache/predictionio/tools/templates/scala/buildSbt.scala.txt
@@ -10,5 +10,5 @@ organization := "myorg"
 version := "0.0.1-SNAPSHOT"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "@{pioVersion}" % "provided",
+  "org.apache.predictionio"    %% "core"          % "@{pioVersion}" % "provided",
   "org.apache.spark" %% "spark-core"    % "@{sparkVersion}" % "provided")


[12/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/api/EventServer.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/api/EventServer.scala b/data/src/main/scala/org/apache/predictionio/data/api/EventServer.scala
new file mode 100644
index 0000000..7174ec8
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/api/EventServer.scala
@@ -0,0 +1,640 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.api
+
+import akka.event.Logging
+import sun.misc.BASE64Decoder
+
+import java.util.concurrent.TimeUnit
+
+import akka.actor._
+import akka.io.IO
+import akka.pattern.ask
+import akka.util.Timeout
+import org.apache.predictionio.data.Utils
+import org.apache.predictionio.data.storage.AccessKeys
+import org.apache.predictionio.data.storage.Channels
+import org.apache.predictionio.data.storage.DateTimeJson4sSupport
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.EventJson4sSupport
+import org.apache.predictionio.data.storage.BatchEventsJson4sSupport
+import org.apache.predictionio.data.storage.LEvents
+import org.apache.predictionio.data.storage.Storage
+import org.json4s.DefaultFormats
+import org.json4s.Formats
+import org.json4s.JObject
+import org.json4s.native.JsonMethods.parse
+import spray.can.Http
+import spray.http.FormData
+import spray.http.MediaTypes
+import spray.http.StatusCodes
+import spray.httpx.Json4sSupport
+import spray.routing._
+import spray.routing.authentication.Authentication
+
+import scala.concurrent.ExecutionContext
+import scala.concurrent.Future
+import scala.util.{Try, Success, Failure}
+
+class  EventServiceActor(
+    val eventClient: LEvents,
+    val accessKeysClient: AccessKeys,
+    val channelsClient: Channels,
+    val config: EventServerConfig) extends HttpServiceActor {
+
+  object Json4sProtocol extends Json4sSupport {
+    implicit def json4sFormats: Formats = DefaultFormats +
+      new EventJson4sSupport.APISerializer +
+      new BatchEventsJson4sSupport.APISerializer +
+      // NOTE: don't use Json4s JodaTimeSerializers since it has issues,
+      // some format not converted, or timezone not correct
+      new DateTimeJson4sSupport.Serializer
+  }
+
+
+  val MaxNumberOfEventsPerBatchRequest = 50
+
+  val logger = Logging(context.system, this)
+
+  // we use the enclosing ActorContext's or ActorSystem's dispatcher for our
+  // Futures
+  implicit def executionContext: ExecutionContext = context.dispatcher
+
+  implicit val timeout = Timeout(5, TimeUnit.SECONDS)
+
+  val rejectionHandler = Common.rejectionHandler
+
+  val jsonPath = """(.+)\.json$""".r
+  val formPath = """(.+)\.form$""".r
+
+  val pluginContext = EventServerPluginContext(logger)
+
+  private lazy val base64Decoder = new BASE64Decoder
+
+  case class AuthData(appId: Int, channelId: Option[Int], events: Seq[String])
+
+  /* with accessKey in query/header, return appId if succeed */
+  def withAccessKey: RequestContext => Future[Authentication[AuthData]] = {
+    ctx: RequestContext =>
+      val accessKeyParamOpt = ctx.request.uri.query.get("accessKey")
+      val channelParamOpt = ctx.request.uri.query.get("channel")
+      Future {
+        // with accessKey in query, return appId if succeed
+        accessKeyParamOpt.map { accessKeyParam =>
+          accessKeysClient.get(accessKeyParam).map { k =>
+            channelParamOpt.map { ch =>
+              val channelMap =
+                channelsClient.getByAppid(k.appid)
+                .map(c => (c.name, c.id)).toMap
+              if (channelMap.contains(ch)) {
+                Right(AuthData(k.appid, Some(channelMap(ch)), k.events))
+              } else {
+                Left(ChannelRejection(s"Invalid channel '$ch'."))
+              }
+            }.getOrElse{
+              Right(AuthData(k.appid, None, k.events))
+            }
+          }.getOrElse(FailedAuth)
+        }.getOrElse {
+          // with accessKey in header, return appId if succeed
+          ctx.request.headers.find(_.name == "Authorization").map { authHeader \u21d2
+            authHeader.value.split("Basic ") match {
+              case Array(_, value) \u21d2
+                val appAccessKey =
+                  new String(base64Decoder.decodeBuffer(value)).trim.split(":")(0)
+                accessKeysClient.get(appAccessKey) match {
+                  case Some(k) \u21d2 Right(AuthData(k.appid, None, k.events))
+                  case None \u21d2 FailedAuth
+                }
+
+              case _ \u21d2 FailedAuth
+            }
+          }.getOrElse(MissedAuth)
+        }
+      }
+  }
+
+  private val FailedAuth = Left(
+    AuthenticationFailedRejection(
+      AuthenticationFailedRejection.CredentialsRejected, List()
+    )
+  )
+
+  private val MissedAuth = Left(
+    AuthenticationFailedRejection(
+      AuthenticationFailedRejection.CredentialsMissing, List()
+    )
+  )
+
+  lazy val statsActorRef = actorRefFactory.actorSelection("/user/StatsActor")
+  lazy val pluginsActorRef = actorRefFactory.actorSelection("/user/PluginsActor")
+
+  val route: Route =
+    pathSingleSlash {
+      import Json4sProtocol._
+
+      get {
+        respondWithMediaType(MediaTypes.`application/json`) {
+          complete(Map("status" -> "alive"))
+        }
+      }
+    } ~
+    path("plugins.json") {
+      import Json4sProtocol._
+      get {
+        respondWithMediaType(MediaTypes.`application/json`) {
+          complete {
+            Map("plugins" -> Map(
+              "inputblockers" -> pluginContext.inputBlockers.map { case (n, p) =>
+                n -> Map(
+                  "name" -> p.pluginName,
+                  "description" -> p.pluginDescription,
+                  "class" -> p.getClass.getName)
+              },
+              "inputsniffers" -> pluginContext.inputSniffers.map { case (n, p) =>
+                n -> Map(
+                  "name" -> p.pluginName,
+                  "description" -> p.pluginDescription,
+                  "class" -> p.getClass.getName)
+              }
+            ))
+          }
+        }
+      }
+    } ~
+    path("plugins" / Segments) { segments =>
+      get {
+        handleExceptions(Common.exceptionHandler) {
+          authenticate(withAccessKey) { authData =>
+            respondWithMediaType(MediaTypes.`application/json`) {
+              complete {
+                val pluginArgs = segments.drop(2)
+                val pluginType = segments(0)
+                val pluginName = segments(1)
+                pluginType match {
+                  case EventServerPlugin.inputBlocker =>
+                    pluginContext.inputBlockers(pluginName).handleREST(
+                      authData.appId,
+                      authData.channelId,
+                      pluginArgs)
+                  case EventServerPlugin.inputSniffer =>
+                    pluginsActorRef ? PluginsActor.HandleREST(
+                      appId = authData.appId,
+                      channelId = authData.channelId,
+                      pluginName = pluginName,
+                      pluginArgs = pluginArgs) map {
+                      _.asInstanceOf[String]
+                    }
+                }
+              }
+            }
+          }
+        }
+      }
+    } ~
+    path("events" / jsonPath ) { eventId =>
+
+      import Json4sProtocol._
+
+      get {
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              respondWithMediaType(MediaTypes.`application/json`) {
+                complete {
+                  logger.debug(s"GET event ${eventId}.")
+                  val data = eventClient.futureGet(eventId, appId, channelId).map { eventOpt =>
+                    eventOpt.map( event =>
+                      (StatusCodes.OK, event)
+                    ).getOrElse(
+                      (StatusCodes.NotFound, Map("message" -> "Not Found"))
+                    )
+                  }
+                  data
+                }
+              }
+            }
+          }
+        }
+      } ~
+      delete {
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              respondWithMediaType(MediaTypes.`application/json`) {
+                complete {
+                  logger.debug(s"DELETE event ${eventId}.")
+                  val data = eventClient.futureDelete(eventId, appId, channelId).map { found =>
+                    if (found) {
+                      (StatusCodes.OK, Map("message" -> "Found"))
+                    } else {
+                      (StatusCodes.NotFound, Map("message" -> "Not Found"))
+                    }
+                  }
+                  data
+                }
+              }
+            }
+          }
+        }
+      }
+    } ~
+    path("events.json") {
+
+      import Json4sProtocol._
+
+      post {
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              val events = authData.events
+              entity(as[Event]) { event =>
+                complete {
+                  if (events.isEmpty || authData.events.contains(event.event)) {
+                    pluginContext.inputBlockers.values.foreach(
+                      _.process(EventInfo(
+                        appId = appId,
+                        channelId = channelId,
+                        event = event), pluginContext))
+                    val data = eventClient.futureInsert(event, appId, channelId).map { id =>
+                      pluginsActorRef ! EventInfo(
+                        appId = appId,
+                        channelId = channelId,
+                        event = event)
+                      val result = (StatusCodes.Created, Map("eventId" -> s"${id}"))
+                      if (config.stats) {
+                        statsActorRef ! Bookkeeping(appId, result._1, event)
+                      }
+                      result
+                    }
+                    data
+                  } else {
+                    (StatusCodes.Forbidden,
+                      Map("message" -> s"${event.event} events are not allowed"))
+                  }
+                }
+              }
+            }
+          }
+        }
+      } ~
+      get {
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              parameters(
+                'startTime.as[Option[String]],
+                'untilTime.as[Option[String]],
+                'entityType.as[Option[String]],
+                'entityId.as[Option[String]],
+                'event.as[Option[String]],
+                'targetEntityType.as[Option[String]],
+                'targetEntityId.as[Option[String]],
+                'limit.as[Option[Int]],
+                'reversed.as[Option[Boolean]]) {
+                (startTimeStr, untilTimeStr, entityType, entityId,
+                  eventName,  // only support one event name
+                  targetEntityType, targetEntityId,
+                  limit, reversed) =>
+                respondWithMediaType(MediaTypes.`application/json`) {
+                  complete {
+                    logger.debug(
+                      s"GET events of appId=${appId} " +
+                      s"st=${startTimeStr} ut=${untilTimeStr} " +
+                      s"et=${entityType} eid=${entityId} " +
+                      s"li=${limit} rev=${reversed} ")
+
+                    require(!((reversed == Some(true))
+                      && (entityType.isEmpty || entityId.isEmpty)),
+                      "the parameter reversed can only be used with" +
+                      " both entityType and entityId specified.")
+
+                    val parseTime = Future {
+                      val startTime = startTimeStr.map(Utils.stringToDateTime(_))
+                      val untilTime = untilTimeStr.map(Utils.stringToDateTime(_))
+                      (startTime, untilTime)
+                    }
+
+
+                    parseTime.flatMap { case (startTime, untilTime) =>
+                      val data = eventClient.futureFind(
+                        appId = appId,
+                        channelId = channelId,
+                        startTime = startTime,
+                        untilTime = untilTime,
+                        entityType = entityType,
+                        entityId = entityId,
+                        eventNames = eventName.map(List(_)),
+                        targetEntityType = targetEntityType.map(Some(_)),
+                        targetEntityId = targetEntityId.map(Some(_)),
+                        limit = limit.orElse(Some(20)),
+                        reversed = reversed)
+                        .map { eventIter =>
+                          if (eventIter.hasNext) {
+                            (StatusCodes.OK, eventIter.toArray)
+                          } else {
+                            (StatusCodes.NotFound,
+                              Map("message" -> "Not Found"))
+                          }
+                        }
+                      data
+                    }.recover {
+                      case e: Exception =>
+                        (StatusCodes.BadRequest, Map("message" -> s"${e}"))
+                    }
+                  }
+                }
+              }
+            }
+          }
+        }
+      }
+    } ~
+    path("batch" / "events.json") {
+
+      import Json4sProtocol._
+
+      post {
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              val allowedEvents = authData.events
+              val handleEvent: PartialFunction[Try[Event], Future[Map[String, Any]]] = {
+                case Success(event) => {
+                  if (allowedEvents.isEmpty || allowedEvents.contains(event.event)) {
+                    pluginContext.inputBlockers.values.foreach(
+                      _.process(EventInfo(
+                        appId = appId,
+                        channelId = channelId,
+                        event = event), pluginContext))
+                    val data = eventClient.futureInsert(event, appId, channelId).map { id =>
+                      pluginsActorRef ! EventInfo(
+                        appId = appId,
+                        channelId = channelId,
+                        event = event)
+                      val status = StatusCodes.Created
+                      val result = Map(
+                        "status" -> status.intValue,
+                        "eventId" -> s"${id}")
+                      if (config.stats) {
+                        statsActorRef ! Bookkeeping(appId, status, event)
+                      }
+                      result
+                    }.recover { case exception =>
+                      Map(
+                        "status" -> StatusCodes.InternalServerError.intValue,
+                        "message" -> s"${exception.getMessage()}")
+                    }
+                    data
+                  } else {
+                    Future.successful(Map(
+                      "status" -> StatusCodes.Forbidden.intValue,
+                      "message" -> s"${event.event} events are not allowed"))
+                  }
+                }
+                case Failure(exception) => {
+                  Future.successful(Map(
+                    "status" -> StatusCodes.BadRequest.intValue,
+                    "message" -> s"${exception.getMessage()}"))
+                }
+              }
+
+              entity(as[Seq[Try[Event]]]) { events =>
+                complete {
+                  if (events.length <= MaxNumberOfEventsPerBatchRequest) {
+                    Future.traverse(events)(handleEvent)
+                  } else {
+                    (StatusCodes.BadRequest,
+                      Map("message" -> (s"Batch request must have less than or equal to " +
+                        s"${MaxNumberOfEventsPerBatchRequest} events")))
+                  }
+                }
+              }
+            }
+          }
+        }
+      }
+    } ~
+    path("stats.json") {
+
+      import Json4sProtocol._
+
+      get {
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              respondWithMediaType(MediaTypes.`application/json`) {
+                if (config.stats) {
+                  complete {
+                    statsActorRef ? GetStats(appId) map {
+                      _.asInstanceOf[Map[String, StatsSnapshot]]
+                    }
+                  }
+                } else {
+                  complete(
+                    StatusCodes.NotFound,
+                    parse("""{"message": "To see stats, launch Event Server """ +
+                      """with --stats argument."}"""))
+                }
+              }
+            }
+          }
+        }
+      }  // stats.json get
+    } ~
+    path("webhooks" / jsonPath ) { web =>
+      import Json4sProtocol._
+
+      post {
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              respondWithMediaType(MediaTypes.`application/json`) {
+                entity(as[JObject]) { jObj =>
+                  complete {
+                    Webhooks.postJson(
+                      appId = appId,
+                      channelId = channelId,
+                      web = web,
+                      data = jObj,
+                      eventClient = eventClient,
+                      log = logger,
+                      stats = config.stats,
+                      statsActorRef = statsActorRef)
+                  }
+                }
+              }
+            }
+          }
+        }
+      } ~
+      get {
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              respondWithMediaType(MediaTypes.`application/json`) {
+                complete {
+                  Webhooks.getJson(
+                    appId = appId,
+                    channelId = channelId,
+                    web = web,
+                    log = logger)
+                }
+              }
+            }
+          }
+        }
+      }
+    } ~
+    path("webhooks" / formPath ) { web =>
+      post {
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              respondWithMediaType(MediaTypes.`application/json`) {
+                entity(as[FormData]){ formData =>
+                  // logger.debug(formData.toString)
+                  complete {
+                    // respond with JSON
+                    import Json4sProtocol._
+
+                    Webhooks.postForm(
+                      appId = appId,
+                      channelId = channelId,
+                      web = web,
+                      data = formData,
+                      eventClient = eventClient,
+                      log = logger,
+                      stats = config.stats,
+                      statsActorRef = statsActorRef)
+                  }
+                }
+              }
+            }
+          }
+        }
+      } ~
+      get {
+        handleExceptions(Common.exceptionHandler) {
+          handleRejections(rejectionHandler) {
+            authenticate(withAccessKey) { authData =>
+              val appId = authData.appId
+              val channelId = authData.channelId
+              respondWithMediaType(MediaTypes.`application/json`) {
+                complete {
+                  // respond with JSON
+                  import Json4sProtocol._
+
+                  Webhooks.getForm(
+                    appId = appId,
+                    channelId = channelId,
+                    web = web,
+                    log = logger)
+                }
+              }
+            }
+          }
+        }
+      }
+
+    }
+
+  def receive: Actor.Receive = runRoute(route)
+}
+
+
+
+/* message */
+case class StartServer(host: String, port: Int)
+
+class EventServerActor(
+    val eventClient: LEvents,
+    val accessKeysClient: AccessKeys,
+    val channelsClient: Channels,
+    val config: EventServerConfig) extends Actor with ActorLogging {
+  val child = context.actorOf(
+    Props(classOf[EventServiceActor],
+      eventClient,
+      accessKeysClient,
+      channelsClient,
+      config),
+    "EventServiceActor")
+  implicit val system = context.system
+
+  def receive: Actor.Receive = {
+    case StartServer(host, portNum) => {
+      IO(Http) ! Http.Bind(child, interface = host, port = portNum)
+    }
+    case m: Http.Bound => log.info("Bound received. EventServer is ready.")
+    case m: Http.CommandFailed => log.error("Command failed.")
+    case _ => log.error("Unknown message.")
+  }
+}
+
+case class EventServerConfig(
+  ip: String = "localhost",
+  port: Int = 7070,
+  plugins: String = "plugins",
+  stats: Boolean = false)
+
+object EventServer {
+  def createEventServer(config: EventServerConfig): Unit = {
+    implicit val system = ActorSystem("EventServerSystem")
+
+    val eventClient = Storage.getLEvents()
+    val accessKeysClient = Storage.getMetaDataAccessKeys()
+    val channelsClient = Storage.getMetaDataChannels()
+
+    val serverActor = system.actorOf(
+      Props(
+        classOf[EventServerActor],
+        eventClient,
+        accessKeysClient,
+        channelsClient,
+        config),
+      "EventServerActor"
+    )
+    if (config.stats) system.actorOf(Props[StatsActor], "StatsActor")
+    system.actorOf(Props[PluginsActor], "PluginsActor")
+    serverActor ! StartServer(config.ip, config.port)
+    system.awaitTermination()
+  }
+}
+
+object Run {
+  def main(args: Array[String]) {
+    EventServer.createEventServer(EventServerConfig(
+      ip = "0.0.0.0",
+      port = 7070))
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/api/EventServerPlugin.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/api/EventServerPlugin.scala b/data/src/main/scala/org/apache/predictionio/data/api/EventServerPlugin.scala
new file mode 100644
index 0000000..c4918c2
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/api/EventServerPlugin.scala
@@ -0,0 +1,33 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.api
+
+trait EventServerPlugin {
+  val pluginName: String
+  val pluginDescription: String
+  val pluginType: String
+
+  def start(context: EventServerPluginContext): Unit
+
+  def process(eventInfo: EventInfo, context: EventServerPluginContext)
+
+  def handleREST(appId: Int, channelId: Option[Int], arguments: Seq[String]): String
+}
+
+object EventServerPlugin {
+  val inputBlocker = "inputblocker"
+  val inputSniffer = "inputsniffer"
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/api/EventServerPluginContext.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/api/EventServerPluginContext.scala b/data/src/main/scala/org/apache/predictionio/data/api/EventServerPluginContext.scala
new file mode 100644
index 0000000..db5743b
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/api/EventServerPluginContext.scala
@@ -0,0 +1,49 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.api
+
+import java.util.ServiceLoader
+
+import akka.event.LoggingAdapter
+import grizzled.slf4j.Logging
+
+import scala.collection.JavaConversions._
+import scala.collection.mutable
+
+class EventServerPluginContext(
+    val plugins: mutable.Map[String, mutable.Map[String, EventServerPlugin]],
+    val log: LoggingAdapter) {
+  def inputBlockers: Map[String, EventServerPlugin] =
+    plugins.getOrElse(EventServerPlugin.inputBlocker, Map()).toMap
+
+  def inputSniffers: Map[String, EventServerPlugin] =
+    plugins.getOrElse(EventServerPlugin.inputSniffer, Map()).toMap
+}
+
+object EventServerPluginContext extends Logging {
+  def apply(log: LoggingAdapter): EventServerPluginContext = {
+    val plugins = mutable.Map[String, mutable.Map[String, EventServerPlugin]](
+      EventServerPlugin.inputBlocker -> mutable.Map(),
+      EventServerPlugin.inputSniffer -> mutable.Map())
+    val serviceLoader = ServiceLoader.load(classOf[EventServerPlugin])
+    serviceLoader foreach { service =>
+      plugins(service.pluginType) += service.pluginName -> service
+    }
+    new EventServerPluginContext(
+      plugins,
+      log)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/api/PluginsActor.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/api/PluginsActor.scala b/data/src/main/scala/org/apache/predictionio/data/api/PluginsActor.scala
new file mode 100644
index 0000000..e6c1ae8
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/api/PluginsActor.scala
@@ -0,0 +1,52 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.api
+
+import akka.actor.Actor
+import akka.event.Logging
+
+class PluginsActor() extends Actor {
+  implicit val system = context.system
+  val log = Logging(system, this)
+
+  val pluginContext = EventServerPluginContext(log)
+
+  def receive: PartialFunction[Any, Unit] = {
+    case e: EventInfo =>
+      pluginContext.inputSniffers.values.foreach(_.process(e, pluginContext))
+    case h: PluginsActor.HandleREST =>
+      try {
+        sender() ! pluginContext.inputSniffers(h.pluginName).handleREST(
+          h.appId,
+          h.channelId,
+          h.pluginArgs)
+      } catch {
+        case e: Exception =>
+          sender() ! s"""{"message":"${e.getMessage}"}"""
+      }
+    case _ =>
+      log.error("Unknown message sent to Event Server input sniffer plugin host.")
+  }
+}
+
+object PluginsActor {
+  case class HandleREST(
+    pluginName: String,
+    appId: Int,
+    channelId: Option[Int],
+    pluginArgs: Seq[String])
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/api/Stats.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/api/Stats.scala b/data/src/main/scala/org/apache/predictionio/data/api/Stats.scala
new file mode 100644
index 0000000..231d101
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/api/Stats.scala
@@ -0,0 +1,79 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.api
+
+import org.apache.predictionio.data.storage.Event
+
+import spray.http.StatusCode
+
+import scala.collection.mutable.{ HashMap => MHashMap }
+import scala.collection.mutable
+
+import com.github.nscala_time.time.Imports.DateTime
+
+case class EntityTypesEvent(
+  val entityType: String,
+  val targetEntityType: Option[String],
+  val event: String) {
+
+  def this(e: Event) = this(
+    e.entityType,
+    e.targetEntityType,
+    e.event)
+}
+
+case class KV[K, V](key: K, value: V)
+
+case class StatsSnapshot(
+  val startTime: DateTime,
+  val endTime: Option[DateTime],
+  val basic: Seq[KV[EntityTypesEvent, Long]],
+  val statusCode: Seq[KV[StatusCode, Long]]
+)
+
+
+class Stats(val startTime: DateTime) {
+  private[this] var _endTime: Option[DateTime] = None
+  var statusCodeCount = MHashMap[(Int, StatusCode), Long]().withDefaultValue(0L)
+  var eteCount = MHashMap[(Int, EntityTypesEvent), Long]().withDefaultValue(0L)
+
+  def cutoff(endTime: DateTime) {
+    _endTime = Some(endTime)
+  }
+
+  def update(appId: Int, statusCode: StatusCode, event: Event) {
+    statusCodeCount((appId, statusCode)) += 1
+    eteCount((appId, new EntityTypesEvent(event))) += 1
+  }
+
+  def extractByAppId[K, V](appId: Int, m: mutable.Map[(Int, K), V])
+  : Seq[KV[K, V]] = {
+    m
+    .toSeq
+    .flatMap { case (k, v) =>
+      if (k._1 == appId) { Seq(KV(k._2, v)) } else { Seq() }
+    }
+  }
+
+  def get(appId: Int): StatsSnapshot = {
+    StatsSnapshot(
+      startTime,
+      _endTime,
+      extractByAppId(appId, eteCount),
+      extractByAppId(appId, statusCodeCount)
+    )
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/api/StatsActor.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/api/StatsActor.scala b/data/src/main/scala/org/apache/predictionio/data/api/StatsActor.scala
new file mode 100644
index 0000000..a8ed3e7
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/api/StatsActor.scala
@@ -0,0 +1,74 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.api
+
+import org.apache.predictionio.data.storage.Event
+
+import spray.http.StatusCode
+
+import akka.actor.Actor
+import akka.event.Logging
+
+import com.github.nscala_time.time.Imports.DateTime
+
+/* message to StatsActor */
+case class Bookkeeping(val appId: Int, statusCode: StatusCode, event: Event)
+
+/* message to StatsActor */
+case class GetStats(val appId: Int)
+
+class StatsActor extends Actor {
+  implicit val system = context.system
+  val log = Logging(system, this)
+
+  def getCurrent: DateTime = {
+    DateTime.now.
+      withMinuteOfHour(0).
+      withSecondOfMinute(0).
+      withMillisOfSecond(0)
+  }
+
+  var longLiveStats = new Stats(DateTime.now)
+  var hourlyStats = new Stats(getCurrent)
+
+  var prevHourlyStats = new Stats(getCurrent.minusHours(1))
+  prevHourlyStats.cutoff(hourlyStats.startTime)
+
+  def bookkeeping(appId: Int, statusCode: StatusCode, event: Event) {
+    val current = getCurrent
+    // If the current hour is different from the stats start time, we create
+    // another stats instance, and move the current to prev.
+    if (current != hourlyStats.startTime) {
+      prevHourlyStats = hourlyStats
+      prevHourlyStats.cutoff(current)
+      hourlyStats = new Stats(current)
+    }
+
+    hourlyStats.update(appId, statusCode, event)
+    longLiveStats.update(appId, statusCode, event)
+  }
+
+  def receive: Actor.Receive = {
+    case Bookkeeping(appId, statusCode, event) =>
+      bookkeeping(appId, statusCode, event)
+    case GetStats(appId) => sender() ! Map(
+      "time" -> DateTime.now,
+      "currentHour" -> hourlyStats.get(appId),
+      "prevHour" -> prevHourlyStats.get(appId),
+      "longLive" -> longLiveStats.get(appId))
+    case _ => log.error("Unknown message.")
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/api/Webhooks.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/api/Webhooks.scala b/data/src/main/scala/org/apache/predictionio/data/api/Webhooks.scala
new file mode 100644
index 0000000..04ff78f
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/api/Webhooks.scala
@@ -0,0 +1,151 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.api
+
+import org.apache.predictionio.data.webhooks.JsonConnector
+import org.apache.predictionio.data.webhooks.FormConnector
+import org.apache.predictionio.data.webhooks.ConnectorUtil
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.EventJson4sSupport
+import org.apache.predictionio.data.storage.LEvents
+
+import spray.routing._
+import spray.routing.Directives._
+import spray.http.StatusCodes
+import spray.http.StatusCode
+import spray.http.FormData
+import spray.httpx.Json4sSupport
+
+import org.json4s.Formats
+import org.json4s.DefaultFormats
+import org.json4s.JObject
+
+import akka.event.LoggingAdapter
+import akka.actor.ActorSelection
+
+import scala.concurrent.{ExecutionContext, Future}
+
+
+private[prediction] object Webhooks {
+
+  def postJson(
+    appId: Int,
+    channelId: Option[Int],
+    web: String,
+    data: JObject,
+    eventClient: LEvents,
+    log: LoggingAdapter,
+    stats: Boolean,
+    statsActorRef: ActorSelection
+  )(implicit ec: ExecutionContext): Future[(StatusCode, Map[String, String])] = {
+
+    val eventFuture = Future {
+      WebhooksConnectors.json.get(web).map { connector =>
+        ConnectorUtil.toEvent(connector, data)
+      }
+    }
+
+    eventFuture.flatMap { eventOpt =>
+      if (eventOpt.isEmpty) {
+        Future successful {
+          val message = s"webhooks connection for ${web} is not supported."
+          (StatusCodes.NotFound, Map("message" -> message))
+        }
+      } else {
+        val event = eventOpt.get
+        val data = eventClient.futureInsert(event, appId, channelId).map { id =>
+          val result = (StatusCodes.Created, Map("eventId" -> s"${id}"))
+
+          if (stats) {
+            statsActorRef ! Bookkeeping(appId, result._1, event)
+          }
+          result
+        }
+        data
+      }
+    }
+  }
+
+  def getJson(
+    appId: Int,
+    channelId: Option[Int],
+    web: String,
+    log: LoggingAdapter
+  )(implicit ec: ExecutionContext): Future[(StatusCode, Map[String, String])] = {
+    Future {
+      WebhooksConnectors.json.get(web).map { connector =>
+        (StatusCodes.OK, Map("message" -> "Ok"))
+      }.getOrElse {
+        val message = s"webhooks connection for ${web} is not supported."
+        (StatusCodes.NotFound, Map("message" -> message))
+      }
+    }
+  }
+
+  def postForm(
+    appId: Int,
+    channelId: Option[Int],
+    web: String,
+    data: FormData,
+    eventClient: LEvents,
+    log: LoggingAdapter,
+    stats: Boolean,
+    statsActorRef: ActorSelection
+  )(implicit ec: ExecutionContext): Future[(StatusCode, Map[String, String])] = {
+    val eventFuture = Future {
+      WebhooksConnectors.form.get(web).map { connector =>
+        ConnectorUtil.toEvent(connector, data.fields.toMap)
+      }
+    }
+
+    eventFuture.flatMap { eventOpt =>
+      if (eventOpt.isEmpty) {
+        Future {
+          val message = s"webhooks connection for ${web} is not supported."
+          (StatusCodes.NotFound, Map("message" -> message))
+        }
+      } else {
+        val event = eventOpt.get
+        val data = eventClient.futureInsert(event, appId, channelId).map { id =>
+          val result = (StatusCodes.Created, Map("eventId" -> s"${id}"))
+
+          if (stats) {
+            statsActorRef ! Bookkeeping(appId, result._1, event)
+          }
+          result
+        }
+        data
+      }
+    }
+  }
+
+  def getForm(
+    appId: Int,
+    channelId: Option[Int],
+    web: String,
+    log: LoggingAdapter
+  )(implicit ec: ExecutionContext): Future[(StatusCode, Map[String, String])] = {
+    Future {
+      WebhooksConnectors.form.get(web).map { connector =>
+        (StatusCodes.OK, Map("message" -> "Ok"))
+      }.getOrElse {
+        val message = s"webhooks connection for ${web} is not supported."
+        (StatusCodes.NotFound, Map("message" -> message))
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/api/WebhooksConnectors.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/api/WebhooksConnectors.scala b/data/src/main/scala/org/apache/predictionio/data/api/WebhooksConnectors.scala
new file mode 100644
index 0000000..c2578ee
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/api/WebhooksConnectors.scala
@@ -0,0 +1,34 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.api
+
+import org.apache.predictionio.data.webhooks.JsonConnector
+import org.apache.predictionio.data.webhooks.FormConnector
+
+import org.apache.predictionio.data.webhooks.segmentio.SegmentIOConnector
+import org.apache.predictionio.data.webhooks.mailchimp.MailChimpConnector
+
+private[prediction] object WebhooksConnectors {
+
+  val json: Map[String, JsonConnector] = Map(
+    "segmentio" -> SegmentIOConnector
+  )
+
+  val form: Map[String, FormConnector] = Map(
+    "mailchimp" -> MailChimpConnector
+  )
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/package.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/package.scala b/data/src/main/scala/org/apache/predictionio/data/package.scala
new file mode 100644
index 0000000..9284787
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/package.scala
@@ -0,0 +1,21 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio
+
+/** Provides data access for PredictionIO and any engines running on top of
+  * PredictionIO
+  */
+package object data {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/AccessKeys.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/AccessKeys.scala b/data/src/main/scala/org/apache/predictionio/data/storage/AccessKeys.scala
new file mode 100644
index 0000000..3285de9
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/AccessKeys.scala
@@ -0,0 +1,71 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import java.security.SecureRandom
+
+import org.apache.predictionio.annotation.DeveloperApi
+import org.apache.commons.codec.binary.Base64
+
+/** :: DeveloperApi ::
+  * Stores mapping of access keys, app IDs, and lists of allowed event names
+  *
+  * @param key Access key
+  * @param appid App ID
+  * @param events List of allowed events for this particular app key
+  * @group Meta Data
+  */
+@DeveloperApi
+case class AccessKey(
+  key: String,
+  appid: Int,
+  events: Seq[String])
+
+/** :: DeveloperApi ::
+  * Base trait of the [[AccessKey]] data access object
+  *
+  * @group Meta Data
+  */
+@DeveloperApi
+trait AccessKeys {
+  /** Insert a new [[AccessKey]]. If the key field is empty, a key will be
+    * generated.
+    */
+  def insert(k: AccessKey): Option[String]
+
+  /** Get an [[AccessKey]] by key */
+  def get(k: String): Option[AccessKey]
+
+  /** Get all [[AccessKey]]s */
+  def getAll(): Seq[AccessKey]
+
+  /** Get all [[AccessKey]]s for a particular app ID */
+  def getByAppid(appid: Int): Seq[AccessKey]
+
+  /** Update an [[AccessKey]] */
+  def update(k: AccessKey): Unit
+
+  /** Delete an [[AccessKey]] */
+  def delete(k: String): Unit
+
+  /** Default implementation of key generation */
+  def generateKey: String = {
+    val sr = SecureRandom.getInstanceStrong
+    val srBytes = Array.fill(48)(0.toByte)
+    sr.nextBytes(srBytes)
+    Base64.encodeBase64URLSafeString(srBytes)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/Apps.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/Apps.scala b/data/src/main/scala/org/apache/predictionio/data/storage/Apps.scala
new file mode 100644
index 0000000..b68e1b6
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/Apps.scala
@@ -0,0 +1,58 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.apache.predictionio.annotation.DeveloperApi
+
+/** :: DeveloperApi ::
+  * Stores mapping of app IDs and names
+  *
+  * @param id ID of the app.
+  * @param name Name of the app.
+  * @param description Long description of the app.
+  * @group Meta Data
+  */
+@DeveloperApi
+case class App(
+  id: Int,
+  name: String,
+  description: Option[String])
+
+/** :: DeveloperApi ::
+  * Base trait of the [[App]] data access object
+  *
+  * @group Meta Data
+  */
+@DeveloperApi
+trait Apps {
+  /** Insert a new [[App]]. Returns a generated app ID if the supplied app ID is 0. */
+  def insert(app: App): Option[Int]
+
+  /** Get an [[App]] by app ID */
+  def get(id: Int): Option[App]
+
+  /** Get an [[App]] by app name */
+  def getByName(name: String): Option[App]
+
+  /** Get all [[App]]s */
+  def getAll(): Seq[App]
+
+  /** Update an [[App]] */
+  def update(app: App): Unit
+
+  /** Delete an [[App]] */
+  def delete(id: Int): Unit
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/BiMap.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/BiMap.scala b/data/src/main/scala/org/apache/predictionio/data/storage/BiMap.scala
new file mode 100644
index 0000000..ad845b3
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/BiMap.scala
@@ -0,0 +1,164 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import scala.collection.immutable.HashMap
+
+import org.apache.spark.rdd.RDD
+
+/** Immutable Bi-directional Map
+  *
+  */
+class BiMap[K, V] private[prediction] (
+  private val m: Map[K, V],
+  private val i: Option[BiMap[V, K]] = None
+  ) extends Serializable {
+
+  // NOTE: make inverse's inverse point back to current BiMap
+  val inverse: BiMap[V, K] = i.getOrElse {
+    val rev = m.map(_.swap)
+    require((rev.size == m.size),
+      s"Failed to create reversed map. Cannot have duplicated values.")
+    new BiMap(rev, Some(this))
+  }
+
+  def get(k: K): Option[V] = m.get(k)
+
+  def getOrElse(k: K, default: => V): V = m.getOrElse(k, default)
+
+  def contains(k: K): Boolean = m.contains(k)
+
+  def apply(k: K): V = m.apply(k)
+
+  /** Converts to a map.
+    * @return a map of type immutable.Map[K, V]
+    */
+  def toMap: Map[K, V] = m
+
+  /** Converts to a sequence.
+    * @return a sequence containing all elements of this map
+    */
+  def toSeq: Seq[(K, V)] = m.toSeq
+
+  def size: Int = m.size
+
+  def take(n: Int): BiMap[K, V] = BiMap(m.take(n))
+
+  override def toString: String = m.toString
+}
+
+object BiMap {
+
+  def apply[K, V](x: Map[K, V]): BiMap[K, V] = new BiMap(x)
+
+  /** Create a BiMap[String, Long] from a set of String. The Long index starts
+    * from 0.
+    * @param keys a set of String
+    * @return a String to Long BiMap
+    */
+  def stringLong(keys: Set[String]): BiMap[String, Long] = {
+    val hm = HashMap(keys.toSeq.zipWithIndex.map(t => (t._1, t._2.toLong)) : _*)
+    new BiMap(hm)
+  }
+
+  /** Create a BiMap[String, Long] from an array of String.
+    * NOTE: the the array cannot have duplicated element.
+    * The Long index starts from 0.
+    * @param keys a set of String
+    * @return a String to Long BiMap
+    */
+  def stringLong(keys: Array[String]): BiMap[String, Long] = {
+    val hm = HashMap(keys.zipWithIndex.map(t => (t._1, t._2.toLong)) : _*)
+    new BiMap(hm)
+  }
+
+  /** Create a BiMap[String, Long] from RDD[String]. The Long index starts
+    * from 0.
+    * @param keys RDD of String
+    * @return a String to Long BiMap
+    */
+  def stringLong(keys: RDD[String]): BiMap[String, Long] = {
+    stringLong(keys.distinct.collect)
+  }
+
+  /** Create a BiMap[String, Int] from a set of String. The Int index starts
+    * from 0.
+    * @param keys a set of String
+    * @return a String to Int BiMap
+    */
+  def stringInt(keys: Set[String]): BiMap[String, Int] = {
+    val hm = HashMap(keys.toSeq.zipWithIndex : _*)
+    new BiMap(hm)
+  }
+
+  /** Create a BiMap[String, Int] from an array of String.
+    * NOTE: the the array cannot have duplicated element.
+    * The Int index starts from 0.
+    * @param keys a set of String
+    * @return a String to Int BiMap
+    */
+  def stringInt(keys: Array[String]): BiMap[String, Int] = {
+    val hm = HashMap(keys.zipWithIndex : _*)
+    new BiMap(hm)
+  }
+
+  /** Create a BiMap[String, Int] from RDD[String]. The Int index starts
+    * from 0.
+    * @param keys RDD of String
+    * @return a String to Int BiMap
+    */
+  def stringInt(keys: RDD[String]): BiMap[String, Int] = {
+    stringInt(keys.distinct.collect)
+  }
+
+  private[this] def stringDoubleImpl(keys: Seq[String])
+  : BiMap[String, Double] = {
+    val ki = keys.zipWithIndex.map(e => (e._1, e._2.toDouble))
+    new BiMap(HashMap(ki : _*))
+  }
+
+  /** Create a BiMap[String, Double] from a set of String. The Double index
+    * starts from 0.
+    * @param keys a set of String
+    * @return a String to Double BiMap
+    */
+  def stringDouble(keys: Set[String]): BiMap[String, Double] = {
+    // val hm = HashMap(keys.toSeq.zipWithIndex.map(_.toDouble) : _*)
+    // new BiMap(hm)
+    stringDoubleImpl(keys.toSeq)
+  }
+
+  /** Create a BiMap[String, Double] from an array of String.
+    * NOTE: the the array cannot have duplicated element.
+    * The Double index starts from 0.
+    * @param keys a set of String
+    * @return a String to Double BiMap
+    */
+  def stringDouble(keys: Array[String]): BiMap[String, Double] = {
+    // val hm = HashMap(keys.zipWithIndex.mapValues(_.toDouble) : _*)
+    // new BiMap(hm)
+    stringDoubleImpl(keys.toSeq)
+  }
+
+  /** Create a BiMap[String, Double] from RDD[String]. The Double index starts
+    * from 0.
+    * @param keys RDD of String
+    * @return a String to Double BiMap
+    */
+  def stringDouble(keys: RDD[String]): BiMap[String, Double] = {
+    stringDoubleImpl(keys.distinct.collect)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/Channels.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/Channels.scala b/data/src/main/scala/org/apache/predictionio/data/storage/Channels.scala
new file mode 100644
index 0000000..e602e1e
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/Channels.scala
@@ -0,0 +1,79 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.apache.predictionio.annotation.DeveloperApi
+
+/** :: DeveloperApi ::
+  * Stores mapping of channel IDs, names and app ID
+  *
+  * @param id ID of the channel
+  * @param name Name of the channel (must be unique within the same app)
+  * @param appid ID of the app which this channel belongs to
+  * @group Meta Data
+  */
+@DeveloperApi
+case class Channel(
+  id: Int,
+  name: String, // must be unique within the same app
+  appid: Int
+) {
+  require(Channel.isValidName(name),
+    "Invalid channel name: ${name}. ${Channel.nameConstraint}")
+}
+
+/** :: DeveloperApi ::
+  * Companion object of [[Channel]]
+  *
+  * @group Meta Data
+  */
+@DeveloperApi
+object Channel {
+  /** Examine whether the supplied channel name is valid. A valid channel name
+    * must consists of 1 to 16 alphanumeric and '-' characters.
+    *
+    * @param s Channel name to examine
+    * @return true if channel name is valid, false otherwise
+    */
+  def isValidName(s: String): Boolean = {
+    // note: update channelNameConstraint if this rule is changed
+    s.matches("^[a-zA-Z0-9-]{1,16}$")
+  }
+
+  /** For consistent error message display */
+  val nameConstraint: String =
+    "Only alphanumeric and - characters are allowed and max length is 16."
+}
+
+/** :: DeveloperApi ::
+  * Base trait of the [[Channel]] data access object
+  *
+  * @group Meta Data
+  */
+@DeveloperApi
+trait Channels {
+  /** Insert a new [[Channel]]. Returns a generated channel ID if original ID is 0. */
+  def insert(channel: Channel): Option[Int]
+
+  /** Get a [[Channel]] by channel ID */
+  def get(id: Int): Option[Channel]
+
+  /** Get all [[Channel]] by app ID */
+  def getByAppid(appid: Int): Seq[Channel]
+
+  /** Delete a [[Channel]] */
+  def delete(id: Int): Unit
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/DataMap.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/DataMap.scala b/data/src/main/scala/org/apache/predictionio/data/storage/DataMap.scala
new file mode 100644
index 0000000..93b6f51
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/DataMap.scala
@@ -0,0 +1,241 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.json4s._
+import org.json4s.native.JsonMethods.parse
+
+import scala.collection.GenTraversableOnce
+import scala.collection.JavaConversions
+
+/** Exception class for [[DataMap]]
+  *
+  * @group Event Data
+  */
+case class DataMapException(msg: String, cause: Exception)
+  extends Exception(msg, cause) {
+  def this(msg: String) = this(msg, null)
+}
+
+/** A DataMap stores properties of the event or entity. Internally it is a Map
+  * whose keys are property names and values are corresponding JSON values
+  * respectively. Use the [[get]] method to retrieve the value of a mandatory
+  * property or use [[getOpt]] to retrieve the value of an optional property.
+  *
+  * @param fields Map of property name to JValue
+  * @group Event Data
+  */
+class DataMap (
+  val fields: Map[String, JValue]
+) extends Serializable {
+  @transient lazy implicit private val formats = DefaultFormats +
+    new DateTimeJson4sSupport.Serializer
+
+  /** Check the existence of a required property name. Throw an exception if
+    * it does not exist.
+    *
+    * @param name The property name
+    */
+  def require(name: String): Unit = {
+    if (!fields.contains(name)) {
+      throw new DataMapException(s"The field $name is required.")
+    }
+  }
+
+  /** Check if this DataMap contains a specific property.
+    *
+    * @param name The property name
+    * @return Return true if the property exists, else false.
+    */
+  def contains(name: String): Boolean = {
+    fields.contains(name)
+  }
+
+  /** Get the value of a mandatory property. Exception is thrown if the property
+    * does not exist.
+    *
+    * @tparam T The type of the property value
+    * @param name The property name
+    * @return Return the property value of type T
+    */
+  def get[T: Manifest](name: String): T = {
+    require(name)
+    fields(name) match {
+      case JNull => throw new DataMapException(
+        s"The required field $name cannot be null.")
+      case x: JValue => x.extract[T]
+    }
+  }
+
+  /** Get the value of an optional property. Return None if the property does
+    * not exist.
+    *
+    * @tparam T The type of the property value
+    * @param name The property name
+    * @return Return the property value of type Option[T]
+    */
+  def getOpt[T: Manifest](name: String): Option[T] = {
+    // either the field doesn't exist or its value is null
+    fields.get(name).flatMap(_.extract[Option[T]])
+  }
+
+  /** Get the value of an optional property. Return default value if the
+    * property does not exist.
+    *
+    * @tparam T The type of the property value
+    * @param name The property name
+    * @param default The default property value of type T
+    * @return Return the property value of type T
+    */
+  def getOrElse[T: Manifest](name: String, default: T): T = {
+    getOpt[T](name).getOrElse(default)
+  }
+
+  /** Java-friendly method for getting the value of a property. Return null if the
+    * property does not exist.
+    *
+    * @tparam T The type of the property value
+    * @param name The property name
+    * @param clazz The class of the type of the property value
+    * @return Return the property value of type T
+    */
+  def get[T](name: String, clazz: java.lang.Class[T]): T = {
+    val manifest =  new Manifest[T] {
+      override def erasure: Class[_] = clazz
+      override def runtimeClass: Class[_] = clazz
+    }
+
+    fields.get(name) match {
+      case None => null.asInstanceOf[T]
+      case Some(JNull) => null.asInstanceOf[T]
+      case Some(x) => x.extract[T](formats, manifest)
+    }
+  }
+
+  /** Java-friendly method for getting a list of values of a property. Return null if the
+    * property does not exist.
+    *
+    * @param name The property name
+    * @return Return the list of property values
+    */
+  def getStringList(name: String): java.util.List[String] = {
+    fields.get(name) match {
+      case None => null
+      case Some(JNull) => null
+      case Some(x) =>
+        JavaConversions.seqAsJavaList(x.extract[List[String]](formats, manifest[List[String]]))
+    }
+  }
+
+  /** Return a new DataMap with elements containing elements from the left hand
+    * side operand followed by elements from the right hand side operand.
+    *
+    * @param that Right hand side DataMap
+    * @return A new DataMap
+    */
+  def ++ (that: DataMap): DataMap = DataMap(this.fields ++ that.fields)
+
+  /** Creates a new DataMap from this DataMap by removing all elements of
+    * another collection.
+    *
+    * @param that A collection containing the removed property names
+    * @return A new DataMap
+    */
+  def -- (that: GenTraversableOnce[String]): DataMap =
+    DataMap(this.fields -- that)
+
+  /** Tests whether the DataMap is empty.
+    *
+    * @return true if the DataMap is empty, false otherwise.
+    */
+  def isEmpty: Boolean = fields.isEmpty
+
+  /** Collects all property names of this DataMap in a set.
+    *
+    * @return a set containing all property names of this DataMap.
+    */
+  def keySet: Set[String] = this.fields.keySet
+
+  /** Converts this DataMap to a List.
+    *
+    * @return a list of (property name, JSON value) tuples.
+    */
+  def toList(): List[(String, JValue)] = fields.toList
+
+  /** Converts this DataMap to a JObject.
+    *
+    * @return the JObject initialized by this DataMap.
+    */
+  def toJObject(): JObject = JObject(toList())
+
+  /** Converts this DataMap to case class of type T.
+    *
+    * @return the object of type T.
+    */
+  def extract[T: Manifest]: T = {
+    toJObject().extract[T]
+  }
+
+  override
+  def toString: String = s"DataMap($fields)"
+
+  override
+  def hashCode: Int = 41 + fields.hashCode
+
+  override
+  def equals(other: Any): Boolean = other match {
+    case that: DataMap => that.canEqual(this) && this.fields.equals(that.fields)
+    case _ => false
+  }
+
+  def canEqual(other: Any): Boolean = other.isInstanceOf[DataMap]
+}
+
+/** Companion object of the [[DataMap]] class
+  *
+  * @group Event Data
+  */
+object DataMap {
+  /** Create an empty DataMap
+    * @return an empty DataMap
+    */
+  def apply(): DataMap = new DataMap(Map[String, JValue]())
+
+  /** Create an DataMap from a Map of String to JValue
+    * @param fields a Map of String to JValue
+    * @return a new DataMap initialized by fields
+    */
+  def apply(fields: Map[String, JValue]): DataMap = new DataMap(fields)
+
+  /** Create an DataMap from a JObject
+    * @param jObj JObject
+    * @return a new DataMap initialized by a JObject
+    */
+  def apply(jObj: JObject): DataMap = {
+    if (jObj == null) {
+      apply()
+    } else {
+      new DataMap(jObj.obj.toMap)
+    }
+  }
+
+  /** Create an DataMap from a JSON String
+    * @param js JSON String. eg """{ "a": 1, "b": "foo" }"""
+    * @return a new DataMap initialized by a JSON string
+    */
+  def apply(js: String): DataMap = apply(parse(js).asInstanceOf[JObject])
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/DateTimeJson4sSupport.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/DateTimeJson4sSupport.scala b/data/src/main/scala/org/apache/predictionio/data/storage/DateTimeJson4sSupport.scala
new file mode 100644
index 0000000..b3789a4
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/DateTimeJson4sSupport.scala
@@ -0,0 +1,47 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.apache.predictionio.annotation.DeveloperApi
+import org.apache.predictionio.data.{Utils => DataUtils}
+import org.joda.time.DateTime
+import org.json4s._
+
+/** :: DeveloperApi ::
+  * JSON4S serializer for Joda-Time
+  *
+  * @group Common
+  */
+@DeveloperApi
+object DateTimeJson4sSupport {
+
+  @transient lazy implicit val formats = DefaultFormats
+
+  /** Serialize DateTime to JValue */
+  def serializeToJValue: PartialFunction[Any, JValue] = {
+    case d: DateTime => JString(DataUtils.dateTimeToString(d))
+  }
+
+  /** Deserialize JValue to DateTime */
+  def deserializeFromJValue: PartialFunction[JValue, DateTime] = {
+    case jv: JValue => DataUtils.stringToDateTime(jv.extract[String])
+  }
+
+  /** Custom JSON4S serializer for Joda-Time */
+  class Serializer extends CustomSerializer[DateTime](format => (
+    deserializeFromJValue, serializeToJValue))
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/EngineInstances.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/EngineInstances.scala b/data/src/main/scala/org/apache/predictionio/data/storage/EngineInstances.scala
new file mode 100644
index 0000000..bc71f3f
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/EngineInstances.scala
@@ -0,0 +1,177 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import com.github.nscala_time.time.Imports._
+import org.apache.predictionio.annotation.DeveloperApi
+import org.json4s._
+
+/** :: DeveloperApi ::
+  * Stores parameters, model, and other information for each engine instance
+  *
+  * @param id Engine instance ID.
+  * @param status Status of the engine instance.
+  * @param startTime Start time of the training/evaluation.
+  * @param endTime End time of the training/evaluation.
+  * @param engineId Engine ID of the instance.
+  * @param engineVersion Engine version of the instance.
+  * @param engineVariant Engine variant ID of the instance.
+  * @param engineFactory Engine factory class for the instance.
+  * @param batch A batch label of the engine instance.
+  * @param env The environment in which the instance was created.
+  * @param sparkConf Custom Spark configuration of the instance.
+  * @param dataSourceParams Data source parameters of the instance.
+  * @param preparatorParams Preparator parameters of the instance.
+  * @param algorithmsParams Algorithms parameters of the instance.
+  * @param servingParams Serving parameters of the instance.
+  * @group Meta Data
+  */
+@DeveloperApi
+case class EngineInstance(
+  id: String,
+  status: String,
+  startTime: DateTime,
+  endTime: DateTime,
+  engineId: String,
+  engineVersion: String,
+  engineVariant: String,
+  engineFactory: String,
+  batch: String,
+  env: Map[String, String],
+  sparkConf: Map[String, String],
+  dataSourceParams: String,
+  preparatorParams: String,
+  algorithmsParams: String,
+  servingParams: String)
+
+/** :: DeveloperApi ::
+  * Base trait of the [[EngineInstance]] data access object
+  *
+  * @group Meta Data
+  */
+@DeveloperApi
+trait EngineInstances {
+  /** Insert a new [[EngineInstance]] */
+  def insert(i: EngineInstance): String
+
+  /** Get an [[EngineInstance]] by ID */
+  def get(id: String): Option[EngineInstance]
+
+  /** Get all [[EngineInstance]]s */
+  def getAll(): Seq[EngineInstance]
+
+  /** Get an instance that has started training the latest and has trained to
+    * completion
+    */
+  def getLatestCompleted(
+      engineId: String,
+      engineVersion: String,
+      engineVariant: String): Option[EngineInstance]
+
+  /** Get all instances that has trained to completion */
+  def getCompleted(
+    engineId: String,
+    engineVersion: String,
+    engineVariant: String): Seq[EngineInstance]
+
+  /** Update an [[EngineInstance]] */
+  def update(i: EngineInstance): Unit
+
+  /** Delete an [[EngineInstance]] */
+  def delete(id: String): Unit
+}
+
+/** :: DeveloperApi ::
+  * JSON4S serializer for [[EngineInstance]]
+  *
+  * @group Meta Data
+  */
+@DeveloperApi
+class EngineInstanceSerializer
+    extends CustomSerializer[EngineInstance](
+  format => ({
+    case JObject(fields) =>
+      implicit val formats = DefaultFormats
+      val seed = EngineInstance(
+          id = "",
+          status = "",
+          startTime = DateTime.now,
+          endTime = DateTime.now,
+          engineId = "",
+          engineVersion = "",
+          engineVariant = "",
+          engineFactory = "",
+          batch = "",
+          env = Map(),
+          sparkConf = Map(),
+          dataSourceParams = "",
+          preparatorParams = "",
+          algorithmsParams = "",
+          servingParams = "")
+      fields.foldLeft(seed) { case (i, field) =>
+        field match {
+          case JField("id", JString(id)) => i.copy(id = id)
+          case JField("status", JString(status)) => i.copy(status = status)
+          case JField("startTime", JString(startTime)) =>
+            i.copy(startTime = Utils.stringToDateTime(startTime))
+          case JField("endTime", JString(endTime)) =>
+            i.copy(endTime = Utils.stringToDateTime(endTime))
+          case JField("engineId", JString(engineId)) =>
+            i.copy(engineId = engineId)
+          case JField("engineVersion", JString(engineVersion)) =>
+            i.copy(engineVersion = engineVersion)
+          case JField("engineVariant", JString(engineVariant)) =>
+            i.copy(engineVariant = engineVariant)
+          case JField("engineFactory", JString(engineFactory)) =>
+            i.copy(engineFactory = engineFactory)
+          case JField("batch", JString(batch)) => i.copy(batch = batch)
+          case JField("env", env) =>
+            i.copy(env = Extraction.extract[Map[String, String]](env))
+          case JField("sparkConf", sparkConf) =>
+            i.copy(sparkConf = Extraction.extract[Map[String, String]](sparkConf))
+          case JField("dataSourceParams", JString(dataSourceParams)) =>
+            i.copy(dataSourceParams = dataSourceParams)
+          case JField("preparatorParams", JString(preparatorParams)) =>
+            i.copy(preparatorParams = preparatorParams)
+          case JField("algorithmsParams", JString(algorithmsParams)) =>
+            i.copy(algorithmsParams = algorithmsParams)
+          case JField("servingParams", JString(servingParams)) =>
+            i.copy(servingParams = servingParams)
+          case _ => i
+        }
+      }
+  },
+  {
+    case i: EngineInstance =>
+      JObject(
+        JField("id", JString(i.id)) ::
+        JField("status", JString(i.status)) ::
+        JField("startTime", JString(i.startTime.toString)) ::
+        JField("endTime", JString(i.endTime.toString)) ::
+        JField("engineId", JString(i.engineId)) ::
+        JField("engineVersion", JString(i.engineVersion)) ::
+        JField("engineVariant", JString(i.engineVariant)) ::
+        JField("engineFactory", JString(i.engineFactory)) ::
+        JField("batch", JString(i.batch)) ::
+        JField("env", Extraction.decompose(i.env)(DefaultFormats)) ::
+        JField("sparkConf", Extraction.decompose(i.sparkConf)(DefaultFormats)) ::
+        JField("dataSourceParams", JString(i.dataSourceParams)) ::
+        JField("preparatorParams", JString(i.preparatorParams)) ::
+        JField("algorithmsParams", JString(i.algorithmsParams)) ::
+        JField("servingParams", JString(i.servingParams)) ::
+        Nil)
+  }
+))

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/EngineManifests.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/EngineManifests.scala b/data/src/main/scala/org/apache/predictionio/data/storage/EngineManifests.scala
new file mode 100644
index 0000000..372a2e7
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/EngineManifests.scala
@@ -0,0 +1,117 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.apache.predictionio.annotation.DeveloperApi
+import org.json4s._
+
+/** :: DeveloperApi ::
+  * Provides a way to discover engines by ID and version in a distributed
+  * environment
+  *
+  * @param id Unique identifier of an engine.
+  * @param version Engine version string.
+  * @param name A short and descriptive name for the engine.
+  * @param description A long description of the engine.
+  * @param files Paths to engine files.
+  * @param engineFactory Engine's factory class name.
+  * @group Meta Data
+  */
+@DeveloperApi
+case class EngineManifest(
+  id: String,
+  version: String,
+  name: String,
+  description: Option[String],
+  files: Seq[String],
+  engineFactory: String)
+
+/** :: DeveloperApi ::
+  * Base trait of the [[EngineManifest]] data access object
+  *
+  * @group Meta Data
+  */
+@DeveloperApi
+trait EngineManifests {
+  /** Inserts an [[EngineManifest]] */
+  def insert(engineManifest: EngineManifest): Unit
+
+  /** Get an [[EngineManifest]] by its ID */
+  def get(id: String, version: String): Option[EngineManifest]
+
+  /** Get all [[EngineManifest]] */
+  def getAll(): Seq[EngineManifest]
+
+  /** Updates an [[EngineManifest]] */
+  def update(engineInfo: EngineManifest, upsert: Boolean = false): Unit
+
+  /** Delete an [[EngineManifest]] by its ID */
+  def delete(id: String, version: String): Unit
+}
+
+/** :: DeveloperApi ::
+  * JSON4S serializer for [[EngineManifest]]
+  *
+  * @group Meta Data
+  */
+@DeveloperApi
+class EngineManifestSerializer
+    extends CustomSerializer[EngineManifest](format => (
+  {
+    case JObject(fields) =>
+      val seed = EngineManifest(
+        id = "",
+        version = "",
+        name = "",
+        description = None,
+        files = Nil,
+        engineFactory = "")
+      fields.foldLeft(seed) { case (enginemanifest, field) =>
+        field match {
+          case JField("id", JString(id)) => enginemanifest.copy(id = id)
+          case JField("version", JString(version)) =>
+            enginemanifest.copy(version = version)
+          case JField("name", JString(name)) => enginemanifest.copy(name = name)
+          case JField("description", JString(description)) =>
+            enginemanifest.copy(description = Some(description))
+          case JField("files", JArray(s)) =>
+            enginemanifest.copy(files = s.map(t =>
+              t match {
+                case JString(file) => file
+                case _ => ""
+              }
+            ))
+          case JField("engineFactory", JString(engineFactory)) =>
+            enginemanifest.copy(engineFactory = engineFactory)
+          case _ => enginemanifest
+        }
+      }
+  },
+  {
+    case enginemanifest: EngineManifest =>
+      JObject(
+        JField("id", JString(enginemanifest.id)) ::
+        JField("version", JString(enginemanifest.version)) ::
+        JField("name", JString(enginemanifest.name)) ::
+        JField("description",
+          enginemanifest.description.map(
+            x => JString(x)).getOrElse(JNothing)) ::
+        JField("files",
+          JArray(enginemanifest.files.map(x => JString(x)).toList)) ::
+        JField("engineFactory", JString(enginemanifest.engineFactory)) ::
+        Nil)
+  }
+))

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/EntityMap.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/EntityMap.scala b/data/src/main/scala/org/apache/predictionio/data/storage/EntityMap.scala
new file mode 100644
index 0000000..aa7224c
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/EntityMap.scala
@@ -0,0 +1,98 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+package org.apache.predictionio.data.storage
+
+import org.apache.predictionio.annotation.Experimental
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+/**
+ * :: Experimental ::
+ */
+@Experimental
+class EntityIdIxMap(val idToIx: BiMap[String, Long]) extends Serializable {
+
+  val ixToId: BiMap[Long, String] = idToIx.inverse
+
+  def apply(id: String): Long = idToIx(id)
+
+  def apply(ix: Long): String = ixToId(ix)
+
+  def contains(id: String): Boolean = idToIx.contains(id)
+
+  def contains(ix: Long): Boolean = ixToId.contains(ix)
+
+  def get(id: String): Option[Long] = idToIx.get(id)
+
+  def get(ix: Long): Option[String] = ixToId.get(ix)
+
+  def getOrElse(id: String, default: => Long): Long =
+    idToIx.getOrElse(id, default)
+
+  def getOrElse(ix: Long, default: => String): String =
+    ixToId.getOrElse(ix, default)
+
+  def toMap: Map[String, Long] = idToIx.toMap
+
+  def size: Long = idToIx.size
+
+  def take(n: Int): EntityIdIxMap = new EntityIdIxMap(idToIx.take(n))
+
+  override def toString: String = idToIx.toString
+}
+
+/** :: Experimental :: */
+@Experimental
+object EntityIdIxMap {
+  def apply(keys: RDD[String]): EntityIdIxMap = {
+    new EntityIdIxMap(BiMap.stringLong(keys))
+  }
+}
+
+/** :: Experimental :: */
+@Experimental
+class EntityMap[A](val idToData: Map[String, A],
+  override val idToIx: BiMap[String, Long]) extends EntityIdIxMap(idToIx) {
+
+  def this(idToData: Map[String, A]) = this(
+    idToData,
+    BiMap.stringLong(idToData.keySet)
+  )
+
+  def data(id: String): A = idToData(id)
+
+  def data(ix: Long): A = idToData(ixToId(ix))
+
+  def getData(id: String): Option[A] = idToData.get(id)
+
+  def getData(ix: Long): Option[A] = idToData.get(ixToId(ix))
+
+  def getOrElseData(id: String, default: => A): A =
+    getData(id).getOrElse(default)
+
+  def getOrElseData(ix: Long, default: => A): A =
+    getData(ix).getOrElse(default)
+
+  override def take(n: Int): EntityMap[A] = {
+    val newIdToIx = idToIx.take(n)
+    new EntityMap[A](idToData.filterKeys(newIdToIx.contains(_)), newIdToIx)
+  }
+
+  override def toString: String = {
+    s"idToData: ${idToData.toString} " + s"idToix: ${idToIx.toString}"
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/EvaluationInstances.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/EvaluationInstances.scala b/data/src/main/scala/org/apache/predictionio/data/storage/EvaluationInstances.scala
new file mode 100644
index 0000000..a58e642
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/EvaluationInstances.scala
@@ -0,0 +1,135 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import com.github.nscala_time.time.Imports._
+import org.apache.predictionio.annotation.DeveloperApi
+import org.json4s._
+
+/** :: DeveloperApi ::
+  * Stores meta information for each evaluation instance.
+  *
+  * @param id Instance ID.
+  * @param status Status of this instance.
+  * @param startTime Start time of this instance.
+  * @param endTime End time of this instance.
+  * @param evaluationClass Evaluation class name of this instance.
+  * @param engineParamsGeneratorClass Engine parameters generator class name of this instance.
+  * @param batch Batch label of this instance.
+  * @param env The environment in which this instance was created.
+  * @param evaluatorResults Results of the evaluator.
+  * @param evaluatorResultsHTML HTML results of the evaluator.
+  * @param evaluatorResultsJSON JSON results of the evaluator.
+  * @group Meta Data
+  */
+@DeveloperApi
+case class EvaluationInstance(
+  id: String = "",
+  status: String = "",
+  startTime: DateTime = DateTime.now,
+  endTime: DateTime = DateTime.now,
+  evaluationClass: String = "",
+  engineParamsGeneratorClass: String = "",
+  batch: String = "",
+  env: Map[String, String] = Map(),
+  sparkConf: Map[String, String] = Map(),
+  evaluatorResults: String = "",
+  evaluatorResultsHTML: String = "",
+  evaluatorResultsJSON: String = "")
+
+/** :: DeveloperApi ::
+  * Base trait of the [[EvaluationInstance]] data access object
+  *
+  * @group Meta Data
+  */
+@DeveloperApi
+trait EvaluationInstances {
+  /** Insert a new [[EvaluationInstance]] */
+  def insert(i: EvaluationInstance): String
+
+  /** Get an [[EvaluationInstance]] by ID */
+  def get(id: String): Option[EvaluationInstance]
+
+  /** Get all [[EvaluationInstances]] */
+  def getAll: Seq[EvaluationInstance]
+
+  /** Get instances that are produced by evaluation and have run to completion,
+    * reverse sorted by the start time
+    */
+  def getCompleted: Seq[EvaluationInstance]
+
+  /** Update an [[EvaluationInstance]] */
+  def update(i: EvaluationInstance): Unit
+
+  /** Delete an [[EvaluationInstance]] */
+  def delete(id: String): Unit
+}
+
+/** :: DeveloperApi ::
+  * JSON4S serializer for [[EvaluationInstance]]
+  *
+  * @group Meta Data
+  */
+class EvaluationInstanceSerializer extends CustomSerializer[EvaluationInstance](
+  format => ({
+    case JObject(fields) =>
+      implicit val formats = DefaultFormats
+      fields.foldLeft(EvaluationInstance()) { case (i, field) =>
+        field match {
+          case JField("id", JString(id)) => i.copy(id = id)
+          case JField("status", JString(status)) => i.copy(status = status)
+          case JField("startTime", JString(startTime)) =>
+            i.copy(startTime = Utils.stringToDateTime(startTime))
+          case JField("endTime", JString(endTime)) =>
+            i.copy(endTime = Utils.stringToDateTime(endTime))
+          case JField("evaluationClass", JString(evaluationClass)) =>
+            i.copy(evaluationClass = evaluationClass)
+          case JField("engineParamsGeneratorClass", JString(engineParamsGeneratorClass)) =>
+            i.copy(engineParamsGeneratorClass = engineParamsGeneratorClass)
+          case JField("batch", JString(batch)) => i.copy(batch = batch)
+          case JField("env", env) =>
+            i.copy(env = Extraction.extract[Map[String, String]](env))
+          case JField("sparkConf", sparkConf) =>
+            i.copy(sparkConf = Extraction.extract[Map[String, String]](sparkConf))
+          case JField("evaluatorResults", JString(evaluatorResults)) =>
+            i.copy(evaluatorResults = evaluatorResults)
+          case JField("evaluatorResultsHTML", JString(evaluatorResultsHTML)) =>
+            i.copy(evaluatorResultsHTML = evaluatorResultsHTML)
+          case JField("evaluatorResultsJSON", JString(evaluatorResultsJSON)) =>
+            i.copy(evaluatorResultsJSON = evaluatorResultsJSON)
+          case _ => i
+        }
+      }
+  }, {
+    case i: EvaluationInstance =>
+      JObject(
+        JField("id", JString(i.id)) ::
+          JField("status", JString(i.status)) ::
+          JField("startTime", JString(i.startTime.toString)) ::
+          JField("endTime", JString(i.endTime.toString)) ::
+          JField("evaluationClass", JString(i.evaluationClass)) ::
+          JField("engineParamsGeneratorClass", JString(i.engineParamsGeneratorClass)) ::
+          JField("batch", JString(i.batch)) ::
+          JField("env", Extraction.decompose(i.env)(DefaultFormats)) ::
+          JField("sparkConf", Extraction.decompose(i.sparkConf)(DefaultFormats)) ::
+          JField("evaluatorResults", JString(i.evaluatorResults)) ::
+          JField("evaluatorResultsHTML", JString(i.evaluatorResultsHTML)) ::
+          JField("evaluatorResultsJSON", JString(i.evaluatorResultsJSON)) ::
+          Nil
+      )
+  }
+  )
+)



[23/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/CreateWorkflow.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/CreateWorkflow.scala b/core/src/main/scala/io/prediction/workflow/CreateWorkflow.scala
deleted file mode 100644
index af5aa14..0000000
--- a/core/src/main/scala/io/prediction/workflow/CreateWorkflow.scala
+++ /dev/null
@@ -1,274 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-import java.net.URI
-
-import com.github.nscala_time.time.Imports._
-import com.google.common.io.ByteStreams
-import grizzled.slf4j.Logging
-import io.prediction.controller.Engine
-import io.prediction.core.BaseEngine
-import io.prediction.data.storage.EngineInstance
-import io.prediction.data.storage.EvaluationInstance
-import io.prediction.data.storage.Storage
-import io.prediction.workflow.JsonExtractorOption.JsonExtractorOption
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.FileSystem
-import org.apache.hadoop.fs.Path
-import org.json4s.JValue
-import org.json4s.JString
-import org.json4s.native.JsonMethods.parse
-
-import scala.language.existentials
-
-object CreateWorkflow extends Logging {
-
-  case class WorkflowConfig(
-    deployMode: String = "",
-    batch: String = "",
-    engineId: String = "",
-    engineVersion: String = "",
-    engineVariant: String = "",
-    engineFactory: String = "",
-    engineParamsKey: String = "",
-    evaluationClass: Option[String] = None,
-    engineParamsGeneratorClass: Option[String] = None,
-    env: Option[String] = None,
-    skipSanityCheck: Boolean = false,
-    stopAfterRead: Boolean = false,
-    stopAfterPrepare: Boolean = false,
-    verbosity: Int = 0,
-    verbose: Boolean = false,
-    debug: Boolean = false,
-    logFile: Option[String] = None,
-    jsonExtractor: JsonExtractorOption = JsonExtractorOption.Both)
-
-  case class AlgorithmParams(name: String, params: JValue)
-
-  private def stringFromFile(filePath: String): String = {
-    try {
-      val uri = new URI(filePath)
-      val fs = FileSystem.get(uri, new Configuration())
-      new String(ByteStreams.toByteArray(fs.open(new Path(uri))).map(_.toChar))
-    } catch {
-      case e: java.io.IOException =>
-        error(s"Error reading from file: ${e.getMessage}. Aborting workflow.")
-        sys.exit(1)
-    }
-  }
-
-  val parser = new scopt.OptionParser[WorkflowConfig]("CreateWorkflow") {
-    override def errorOnUnknownArgument: Boolean = false
-    opt[String]("batch") action { (x, c) =>
-      c.copy(batch = x)
-    } text("Batch label of the workflow run.")
-    opt[String]("engine-id") required() action { (x, c) =>
-      c.copy(engineId = x)
-    } text("Engine's ID.")
-    opt[String]("engine-version") required() action { (x, c) =>
-      c.copy(engineVersion = x)
-    } text("Engine's version.")
-    opt[String]("engine-variant") required() action { (x, c) =>
-      c.copy(engineVariant = x)
-    } text("Engine variant JSON.")
-    opt[String]("evaluation-class") action { (x, c) =>
-      c.copy(evaluationClass = Some(x))
-    } text("Class name of the run's evaluator.")
-    opt[String]("engine-params-generator-class") action { (x, c) =>
-      c.copy(engineParamsGeneratorClass = Some(x))
-    } text("Path to evaluator parameters")
-    opt[String]("env") action { (x, c) =>
-      c.copy(env = Some(x))
-    } text("Comma-separated list of environmental variables (in 'FOO=BAR' " +
-      "format) to pass to the Spark execution environment.")
-    opt[Unit]("verbose") action { (x, c) =>
-      c.copy(verbose = true)
-    } text("Enable verbose output.")
-    opt[Unit]("debug") action { (x, c) =>
-      c.copy(debug = true)
-    } text("Enable debug output.")
-    opt[Unit]("skip-sanity-check") action { (x, c) =>
-      c.copy(skipSanityCheck = true)
-    }
-    opt[Unit]("stop-after-read") action { (x, c) =>
-      c.copy(stopAfterRead = true)
-    }
-    opt[Unit]("stop-after-prepare") action { (x, c) =>
-      c.copy(stopAfterPrepare = true)
-    }
-    opt[String]("deploy-mode") action { (x, c) =>
-      c.copy(deployMode = x)
-    }
-    opt[Int]("verbosity") action { (x, c) =>
-      c.copy(verbosity = x)
-    }
-    opt[String]("engine-factory") action { (x, c) =>
-      c.copy(engineFactory = x)
-    }
-    opt[String]("engine-params-key") action { (x, c) =>
-      c.copy(engineParamsKey = x)
-    }
-    opt[String]("log-file") action { (x, c) =>
-      c.copy(logFile = Some(x))
-    }
-    opt[String]("json-extractor") action { (x, c) =>
-      c.copy(jsonExtractor = JsonExtractorOption.withName(x))
-    }
-  }
-
-  def main(args: Array[String]): Unit = {
-    val wfcOpt = parser.parse(args, WorkflowConfig())
-    if (wfcOpt.isEmpty) {
-      logger.error("WorkflowConfig is empty. Quitting")
-      return
-    }
-
-    val wfc = wfcOpt.get
-
-    WorkflowUtils.modifyLogging(wfc.verbose)
-
-    val evaluation = wfc.evaluationClass.map { ec =>
-      try {
-        WorkflowUtils.getEvaluation(ec, getClass.getClassLoader)._2
-      } catch {
-        case e @ (_: ClassNotFoundException | _: NoSuchMethodException) =>
-          error(s"Unable to obtain evaluation $ec. Aborting workflow.", e)
-          sys.exit(1)
-      }
-    }
-
-    val engineParamsGenerator = wfc.engineParamsGeneratorClass.map { epg =>
-      try {
-        WorkflowUtils.getEngineParamsGenerator(epg, getClass.getClassLoader)._2
-      } catch {
-        case e @ (_: ClassNotFoundException | _: NoSuchMethodException) =>
-          error(s"Unable to obtain engine parameters generator $epg. " +
-            "Aborting workflow.", e)
-          sys.exit(1)
-      }
-    }
-
-    val pioEnvVars = wfc.env.map(e =>
-      e.split(',').flatMap(p =>
-        p.split('=') match {
-          case Array(k, v) => List(k -> v)
-          case _ => Nil
-        }
-      ).toMap
-    ).getOrElse(Map())
-
-    if (evaluation.isEmpty) {
-      val variantJson = parse(stringFromFile(wfc.engineVariant))
-      val engineFactory = if (wfc.engineFactory == "") {
-        variantJson \ "engineFactory" match {
-          case JString(s) => s
-          case _ =>
-            error("Unable to read engine factory class name from " +
-              s"${wfc.engineVariant}. Aborting.")
-            sys.exit(1)
-        }
-      } else wfc.engineFactory
-      val variantId = variantJson \ "id" match {
-        case JString(s) => s
-        case _ =>
-          error("Unable to read engine variant ID from " +
-            s"${wfc.engineVariant}. Aborting.")
-          sys.exit(1)
-      }
-      val (engineLanguage, engineFactoryObj) = try {
-        WorkflowUtils.getEngine(engineFactory, getClass.getClassLoader)
-      } catch {
-        case e @ (_: ClassNotFoundException | _: NoSuchMethodException) =>
-          error(s"Unable to obtain engine: ${e.getMessage}. Aborting workflow.")
-          sys.exit(1)
-      }
-
-      val engine: BaseEngine[_, _, _, _] = engineFactoryObj()
-
-      val customSparkConf = WorkflowUtils.extractSparkConf(variantJson)
-      val workflowParams = WorkflowParams(
-        verbose = wfc.verbosity,
-        skipSanityCheck = wfc.skipSanityCheck,
-        stopAfterRead = wfc.stopAfterRead,
-        stopAfterPrepare = wfc.stopAfterPrepare,
-        sparkEnv = WorkflowParams().sparkEnv ++ customSparkConf)
-
-      // Evaluator Not Specified. Do training.
-      if (!engine.isInstanceOf[Engine[_,_,_,_,_,_]]) {
-        throw new NoSuchMethodException(s"Engine $engine is not trainable")
-      }
-
-      val trainableEngine = engine.asInstanceOf[Engine[_, _, _, _, _, _]]
-
-      val engineParams = if (wfc.engineParamsKey == "") {
-        trainableEngine.jValueToEngineParams(variantJson, wfc.jsonExtractor)
-      } else {
-        engineFactoryObj.engineParams(wfc.engineParamsKey)
-      }
-
-      val engineInstance = EngineInstance(
-        id = "",
-        status = "INIT",
-        startTime = DateTime.now,
-        endTime = DateTime.now,
-        engineId = wfc.engineId,
-        engineVersion = wfc.engineVersion,
-        engineVariant = variantId,
-        engineFactory = engineFactory,
-        batch = wfc.batch,
-        env = pioEnvVars,
-        sparkConf = workflowParams.sparkEnv,
-        dataSourceParams =
-          JsonExtractor.paramToJson(wfc.jsonExtractor, engineParams.dataSourceParams),
-        preparatorParams =
-          JsonExtractor.paramToJson(wfc.jsonExtractor, engineParams.preparatorParams),
-        algorithmsParams =
-          JsonExtractor.paramsToJson(wfc.jsonExtractor, engineParams.algorithmParamsList),
-        servingParams =
-          JsonExtractor.paramToJson(wfc.jsonExtractor, engineParams.servingParams))
-
-      val engineInstanceId = Storage.getMetaDataEngineInstances.insert(
-        engineInstance)
-
-      CoreWorkflow.runTrain(
-        env = pioEnvVars,
-        params = workflowParams,
-        engine = trainableEngine,
-        engineParams = engineParams,
-        engineInstance = engineInstance.copy(id = engineInstanceId))
-    } else {
-      val workflowParams = WorkflowParams(
-        verbose = wfc.verbosity,
-        skipSanityCheck = wfc.skipSanityCheck,
-        stopAfterRead = wfc.stopAfterRead,
-        stopAfterPrepare = wfc.stopAfterPrepare,
-        sparkEnv = WorkflowParams().sparkEnv)
-      val evaluationInstance = EvaluationInstance(
-        evaluationClass = wfc.evaluationClass.get,
-        engineParamsGeneratorClass = wfc.engineParamsGeneratorClass.get,
-        batch = wfc.batch,
-        env = pioEnvVars,
-        sparkConf = workflowParams.sparkEnv
-      )
-      Workflow.runEvaluation(
-        evaluation = evaluation.get,
-        engineParamsGenerator = engineParamsGenerator.get,
-        evaluationInstance = evaluationInstance,
-        params = workflowParams)
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/EngineServerPlugin.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/EngineServerPlugin.scala b/core/src/main/scala/io/prediction/workflow/EngineServerPlugin.scala
deleted file mode 100644
index 5b2649c..0000000
--- a/core/src/main/scala/io/prediction/workflow/EngineServerPlugin.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-import io.prediction.data.storage.EngineInstance
-import org.json4s._
-
-trait EngineServerPlugin {
-  val pluginName: String
-  val pluginDescription: String
-  val pluginType: String
-
-  def start(context: EngineServerPluginContext): Unit
-
-  def process(
-    engineInstance: EngineInstance,
-    query: JValue,
-    prediction: JValue,
-    context: EngineServerPluginContext): JValue
-
-  def handleREST(arguments: Seq[String]): String
-}
-
-object EngineServerPlugin {
-  val outputBlocker = "outputblocker"
-  val outputSniffer = "outputsniffer"
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/EngineServerPluginContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/EngineServerPluginContext.scala b/core/src/main/scala/io/prediction/workflow/EngineServerPluginContext.scala
deleted file mode 100644
index eb04c6f..0000000
--- a/core/src/main/scala/io/prediction/workflow/EngineServerPluginContext.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-import java.net.URI
-import java.util.ServiceLoader
-
-import akka.event.LoggingAdapter
-import com.google.common.io.ByteStreams
-import grizzled.slf4j.Logging
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.FileSystem
-import org.apache.hadoop.fs.Path
-import org.json4s.DefaultFormats
-import org.json4s.Formats
-import org.json4s.JObject
-import org.json4s.JValue
-import org.json4s.native.JsonMethods._
-
-import scala.collection.JavaConversions._
-import scala.collection.mutable
-
-class EngineServerPluginContext(
-    val plugins: mutable.Map[String, mutable.Map[String, EngineServerPlugin]],
-    val pluginParams: mutable.Map[String, JValue],
-    val log: LoggingAdapter) {
-  def outputBlockers: Map[String, EngineServerPlugin] =
-    plugins.getOrElse(EngineServerPlugin.outputBlocker, Map()).toMap
-  def outputSniffers: Map[String, EngineServerPlugin] =
-    plugins.getOrElse(EngineServerPlugin.outputSniffer, Map()).toMap
-}
-
-object EngineServerPluginContext extends Logging {
-  implicit val formats: Formats = DefaultFormats
-
-  def apply(log: LoggingAdapter, engineVariant: String): EngineServerPluginContext = {
-    val plugins = mutable.Map[String, mutable.Map[String, EngineServerPlugin]](
-      EngineServerPlugin.outputBlocker -> mutable.Map(),
-      EngineServerPlugin.outputSniffer -> mutable.Map())
-    val pluginParams = mutable.Map[String, JValue]()
-    val serviceLoader = ServiceLoader.load(classOf[EngineServerPlugin])
-    val variantJson = parse(stringFromFile(engineVariant))
-    (variantJson \ "plugins").extractOpt[JObject].foreach { pluginDefs =>
-      pluginDefs.obj.foreach { pluginParams += _ }
-    }
-    serviceLoader foreach { service =>
-      pluginParams.get(service.pluginName) map { params =>
-        if ((params \ "enabled").extractOrElse(false)) {
-          info(s"Plugin ${service.pluginName} is enabled.")
-          plugins(service.pluginType) += service.pluginName -> service
-        } else {
-          info(s"Plugin ${service.pluginName} is disabled.")
-        }
-      } getOrElse {
-        info(s"Plugin ${service.pluginName} is disabled.")
-      }
-    }
-    new EngineServerPluginContext(
-      plugins,
-      pluginParams,
-      log)
-  }
-
-  private def stringFromFile(filePath: String): String = {
-    try {
-      val uri = new URI(filePath)
-      val fs = FileSystem.get(uri, new Configuration())
-      new String(ByteStreams.toByteArray(fs.open(new Path(uri))).map(_.toChar))
-    } catch {
-      case e: java.io.IOException =>
-        error(s"Error reading from file: ${e.getMessage}. Aborting.")
-        sys.exit(1)
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/EngineServerPluginsActor.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/EngineServerPluginsActor.scala b/core/src/main/scala/io/prediction/workflow/EngineServerPluginsActor.scala
deleted file mode 100644
index a346d8e..0000000
--- a/core/src/main/scala/io/prediction/workflow/EngineServerPluginsActor.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-import akka.actor.Actor
-import akka.event.Logging
-import io.prediction.data.storage.EngineInstance
-import org.json4s.JValue
-
-class PluginsActor(engineVariant: String) extends Actor {
-  implicit val system = context.system
-  val log = Logging(system, this)
-
-  val pluginContext = EngineServerPluginContext(log, engineVariant)
-
-  def receive: PartialFunction[Any, Unit] = {
-    case (ei: EngineInstance, q: JValue, p: JValue) =>
-      pluginContext.outputSniffers.values.foreach(_.process(ei, q, p, pluginContext))
-    case h: PluginsActor.HandleREST =>
-      try {
-        sender() ! pluginContext.outputSniffers(h.pluginName).handleREST(h.pluginArgs)
-      } catch {
-        case e: Exception =>
-          sender() ! s"""{"message":"${e.getMessage}"}"""
-      }
-    case _ =>
-      log.error("Unknown message sent to the Engine Server output sniffer plugin host.")
-  }
-}
-
-object PluginsActor {
-  case class HandleREST(pluginName: String, pluginArgs: Seq[String])
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/EvaluationWorkflow.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/EvaluationWorkflow.scala b/core/src/main/scala/io/prediction/workflow/EvaluationWorkflow.scala
deleted file mode 100644
index ed70d87..0000000
--- a/core/src/main/scala/io/prediction/workflow/EvaluationWorkflow.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-import io.prediction.controller.EngineParams
-import io.prediction.controller.Evaluation
-import io.prediction.core.BaseEvaluator
-import io.prediction.core.BaseEvaluatorResult
-import io.prediction.core.BaseEngine
-
-import grizzled.slf4j.Logger
-import org.apache.spark.SparkContext
-
-import scala.language.existentials
-
-object EvaluationWorkflow {
-  @transient lazy val logger = Logger[this.type]
-  def runEvaluation[EI, Q, P, A, R <: BaseEvaluatorResult](
-      sc: SparkContext,
-      evaluation: Evaluation,
-      engine: BaseEngine[EI, Q, P, A],
-      engineParamsList: Seq[EngineParams],
-      evaluator: BaseEvaluator[EI, Q, P, A, R],
-      params: WorkflowParams)
-    : R = {
-    val engineEvalDataSet = engine.batchEval(sc, engineParamsList, params)
-    evaluator.evaluateBase(sc, evaluation, engineEvalDataSet, params)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala b/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala
deleted file mode 100644
index 350a430..0000000
--- a/core/src/main/scala/io/prediction/workflow/FakeWorkflow.scala
+++ /dev/null
@@ -1,106 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-import io.prediction.annotation.Experimental
-// FIXME(yipjustin): Remove wildcard import.
-import io.prediction.core._
-import io.prediction.controller._
-
-import grizzled.slf4j.Logger
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-
-@Experimental
-private[prediction] class FakeEngine
-extends BaseEngine[EmptyParams, EmptyParams, EmptyParams, EmptyParams] {
-  @transient lazy val logger = Logger[this.type]
-
-  def train(
-    sc: SparkContext,
-    engineParams: EngineParams,
-    engineInstanceId: String,
-    params: WorkflowParams): Seq[Any] = {
-    throw new StopAfterReadInterruption()
-  }
-
-  def eval(
-    sc: SparkContext,
-    engineParams: EngineParams,
-    params: WorkflowParams)
-  : Seq[(EmptyParams, RDD[(EmptyParams, EmptyParams, EmptyParams)])] = {
-    return Seq[(EmptyParams, RDD[(EmptyParams, EmptyParams, EmptyParams)])]()
-  }
-}
-
-@Experimental
-private[prediction] class FakeRunner(f: (SparkContext => Unit))
-    extends BaseEvaluator[EmptyParams, EmptyParams, EmptyParams, EmptyParams,
-      FakeEvalResult] {
-  @transient private lazy val logger = Logger[this.type]
-  def evaluateBase(
-    sc: SparkContext,
-    evaluation: Evaluation,
-    engineEvalDataSet:
-        Seq[(EngineParams, Seq[(EmptyParams, RDD[(EmptyParams, EmptyParams, EmptyParams)])])],
-    params: WorkflowParams): FakeEvalResult = {
-    f(sc)
-    FakeEvalResult()
-  }
-}
-
-@Experimental
-private[prediction] case class FakeEvalResult() extends BaseEvaluatorResult {
-  override val noSave: Boolean = true
-}
-
-/** FakeRun allows user to implement custom function under the exact enviroment
-  * as other PredictionIO workflow.
-  *
-  * Useful for developing new features. Only need to extend this trait and
-  * implement a function: (SparkContext => Unit). For example, the code below
-  * can be run with `pio eval HelloWorld`.
-  *
-  * {{{
-  * object HelloWorld extends FakeRun {
-  *   // func defines the function pio runs, must have signature (SparkContext => Unit).
-  *   func = f
-  *
-  *   def f(sc: SparkContext): Unit {
-  *     val logger = Logger[this.type]
-  *     logger.info("HelloWorld")
-  *   }
-  * }
-  * }}}
-  *
-  */
-@Experimental
-trait FakeRun extends Evaluation with EngineParamsGenerator {
-  private[this] var _runner: FakeRunner = _
-
-  def runner: FakeRunner = _runner
-  def runner_=(r: FakeRunner) {
-    engineEvaluator = (new FakeEngine(), r)
-    engineParamsList = Seq(new EngineParams())
-  }
-
-  def func: (SparkContext => Unit) = { (sc: SparkContext) => Unit }
-  def func_=(f: SparkContext => Unit) {
-    runner = new FakeRunner(f)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/JsonExtractor.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/JsonExtractor.scala b/core/src/main/scala/io/prediction/workflow/JsonExtractor.scala
deleted file mode 100644
index 7034063..0000000
--- a/core/src/main/scala/io/prediction/workflow/JsonExtractor.scala
+++ /dev/null
@@ -1,164 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-import com.google.gson.Gson
-import com.google.gson.GsonBuilder
-import com.google.gson.TypeAdapterFactory
-import io.prediction.controller.EngineParams
-import io.prediction.controller.Params
-import io.prediction.controller.Utils
-import io.prediction.workflow.JsonExtractorOption.JsonExtractorOption
-import org.json4s.Extraction
-import org.json4s.Formats
-import org.json4s.JsonAST.{JArray, JValue}
-import org.json4s.native.JsonMethods.compact
-import org.json4s.native.JsonMethods.pretty
-import org.json4s.native.JsonMethods.parse
-import org.json4s.native.JsonMethods.render
-import org.json4s.reflect.TypeInfo
-
-object JsonExtractor {
-
-  def toJValue(
-    extractorOption: JsonExtractorOption,
-    o: Any,
-    json4sFormats: Formats = Utils.json4sDefaultFormats,
-    gsonTypeAdapterFactories: Seq[TypeAdapterFactory] = Seq.empty[TypeAdapterFactory]): JValue = {
-
-    extractorOption match {
-      case JsonExtractorOption.Both =>
-
-          val json4sResult = Extraction.decompose(o)(json4sFormats)
-          json4sResult.children.size match {
-            case 0 => parse(gson(gsonTypeAdapterFactories).toJson(o))
-            case _ => json4sResult
-          }
-      case JsonExtractorOption.Json4sNative =>
-        Extraction.decompose(o)(json4sFormats)
-      case JsonExtractorOption.Gson =>
-        parse(gson(gsonTypeAdapterFactories).toJson(o))
-    }
-  }
-
-  def extract[T](
-    extractorOption: JsonExtractorOption,
-    json: String,
-    clazz: Class[T],
-    json4sFormats: Formats = Utils.json4sDefaultFormats,
-    gsonTypeAdapterFactories: Seq[TypeAdapterFactory] = Seq.empty[TypeAdapterFactory]): T = {
-
-    extractorOption match {
-      case JsonExtractorOption.Both =>
-        try {
-          extractWithJson4sNative(json, json4sFormats, clazz)
-        } catch {
-          case e: Exception =>
-            extractWithGson(json, clazz, gsonTypeAdapterFactories)
-        }
-      case JsonExtractorOption.Json4sNative =>
-        extractWithJson4sNative(json, json4sFormats, clazz)
-      case JsonExtractorOption.Gson =>
-        extractWithGson(json, clazz, gsonTypeAdapterFactories)
-    }
-  }
-
-  def paramToJson(extractorOption: JsonExtractorOption, param: (String, Params)): String = {
-    // to be replaced JValue needs to be done by Json4s, otherwise the tuple JValue will be wrong
-    val toBeReplacedJValue =
-      JsonExtractor.toJValue(JsonExtractorOption.Json4sNative, (param._1, null))
-    val paramJValue = JsonExtractor.toJValue(extractorOption, param._2)
-
-    compact(render(toBeReplacedJValue.replace(param._1 :: Nil, paramJValue)))
-  }
-
-  def paramsToJson(extractorOption: JsonExtractorOption, params: Seq[(String, Params)]): String = {
-    compact(render(paramsToJValue(extractorOption, params)))
-  }
-
-  def engineParamsToJson(extractorOption: JsonExtractorOption, params: EngineParams) : String = {
-    compact(render(engineParamsToJValue(extractorOption, params)))
-  }
-
-  def engineParamstoPrettyJson(
-    extractorOption: JsonExtractorOption,
-    params: EngineParams) : String = {
-
-    pretty(render(engineParamsToJValue(extractorOption, params)))
-  }
-
-  private def engineParamsToJValue(extractorOption: JsonExtractorOption, params: EngineParams) = {
-    var jValue = toJValue(JsonExtractorOption.Json4sNative, params)
-
-    val dataSourceParamsJValue = toJValue(extractorOption, params.dataSourceParams._2)
-    jValue = jValue.replace(
-      "dataSourceParams" :: params.dataSourceParams._1 :: Nil,
-      dataSourceParamsJValue)
-
-    val preparatorParamsJValue = toJValue(extractorOption, params.preparatorParams._2)
-    jValue = jValue.replace(
-      "preparatorParams" :: params.preparatorParams._1 :: Nil,
-      preparatorParamsJValue)
-
-    val algorithmParamsJValue = paramsToJValue(extractorOption, params.algorithmParamsList)
-    jValue = jValue.replace("algorithmParamsList" :: Nil, algorithmParamsJValue)
-
-    val servingParamsJValue = toJValue(extractorOption, params.servingParams._2)
-    jValue = jValue.replace("servingParams" :: params.servingParams._1 :: Nil, servingParamsJValue)
-
-    jValue
-  }
-
-  private
-  def paramsToJValue(extractorOption: JsonExtractorOption, params: Seq[(String, Params)]) = {
-    val jValues = params.map { case (name, param) =>
-      // to be replaced JValue needs to be done by Json4s, otherwise the tuple JValue will be wrong
-      val toBeReplacedJValue =
-        JsonExtractor.toJValue(JsonExtractorOption.Json4sNative, (name, null))
-      val paramJValue = JsonExtractor.toJValue(extractorOption, param)
-
-      toBeReplacedJValue.replace(name :: Nil, paramJValue)
-    }
-
-    JArray(jValues.toList)
-  }
-
-  private def extractWithJson4sNative[T](
-    json: String,
-    formats: Formats,
-    clazz: Class[T]): T = {
-
-    Extraction.extract(parse(json), TypeInfo(clazz, None))(formats).asInstanceOf[T]
-  }
-
-  private def extractWithGson[T](
-    json: String,
-    clazz: Class[T],
-    gsonTypeAdapterFactories: Seq[TypeAdapterFactory]): T = {
-
-    gson(gsonTypeAdapterFactories).fromJson(json, clazz)
-  }
-
-  private def gson(gsonTypeAdapterFactories: Seq[TypeAdapterFactory]): Gson = {
-    val gsonBuilder = new GsonBuilder()
-    gsonTypeAdapterFactories.foreach { typeAdapterFactory =>
-      gsonBuilder.registerTypeAdapterFactory(typeAdapterFactory)
-    }
-
-    gsonBuilder.create()
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/JsonExtractorOption.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/JsonExtractorOption.scala b/core/src/main/scala/io/prediction/workflow/JsonExtractorOption.scala
deleted file mode 100644
index 60272fb..0000000
--- a/core/src/main/scala/io/prediction/workflow/JsonExtractorOption.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-object JsonExtractorOption extends Enumeration {
-  type JsonExtractorOption = Value
-  val Json4sNative = Value
-  val Gson = Value
-  val Both = Value
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/PersistentModelManifest.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/PersistentModelManifest.scala b/core/src/main/scala/io/prediction/workflow/PersistentModelManifest.scala
deleted file mode 100644
index c1c0a6d..0000000
--- a/core/src/main/scala/io/prediction/workflow/PersistentModelManifest.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-case class PersistentModelManifest(className: String)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/Workflow.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/Workflow.scala b/core/src/main/scala/io/prediction/workflow/Workflow.scala
deleted file mode 100644
index c0543ab..0000000
--- a/core/src/main/scala/io/prediction/workflow/Workflow.scala
+++ /dev/null
@@ -1,135 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-import io.prediction.annotation.Experimental
-import io.prediction.controller.EngineParams
-import io.prediction.controller.EngineParamsGenerator
-import io.prediction.controller.Evaluation
-import io.prediction.core.BaseEngine
-import io.prediction.core.BaseEvaluator
-import io.prediction.core.BaseEvaluatorResult
-import io.prediction.data.storage.EvaluationInstance
-
-/** Collection of workflow creation methods.
-  * @group Workflow
-  */
-object Workflow {
-  // evaluator is already instantiated.
-  // This is an undocumented way of using evaluator. Still experimental.
-  // evaluatorParams is used to write into EngineInstance, will be shown in
-  // dashboard.
-  /*
-  def runEval[EI, Q, P, A, ER <: AnyRef](
-      engine: BaseEngine[EI, Q, P, A],
-      engineParams: EngineParams,
-      evaluator: BaseEvaluator[EI, Q, P, A, ER],
-      evaluatorParams: Params,
-      env: Map[String, String] = WorkflowUtils.pioEnvVars,
-      params: WorkflowParams = WorkflowParams()) {
-
-    implicit lazy val formats = Utils.json4sDefaultFormats +
-      new NameParamsSerializer
-
-    val engineInstance = EngineInstance(
-      id = "",
-      status = "INIT",
-      startTime = DateTime.now,
-      endTime = DateTime.now,
-      engineId = "",
-      engineVersion = "",
-      engineVariant = "",
-      engineFactory = "FIXME",
-      evaluatorClass = evaluator.getClass.getName(),
-      batch = params.batch,
-      env = env,
-      sparkConf = params.sparkEnv,
-      dataSourceParams = write(engineParams.dataSourceParams),
-      preparatorParams = write(engineParams.preparatorParams),
-      algorithmsParams = write(engineParams.algorithmParamsList),
-      servingParams = write(engineParams.servingParams),
-      evaluatorParams = write(evaluatorParams),
-      evaluatorResults = "",
-      evaluatorResultsHTML = "",
-      evaluatorResultsJSON = "")
-
-    CoreWorkflow.runEval(
-      engine = engine,
-      engineParams = engineParams,
-      engineInstance = engineInstance,
-      evaluator = evaluator,
-      evaluatorParams = evaluatorParams,
-      env = env,
-      params = params)
-  }
-  */
-
-  def runEvaluation(
-      evaluation: Evaluation,
-      engineParamsGenerator: EngineParamsGenerator,
-      env: Map[String, String] = WorkflowUtils.pioEnvVars,
-      evaluationInstance: EvaluationInstance = EvaluationInstance(),
-      params: WorkflowParams = WorkflowParams()) {
-    runEvaluationTypeless(
-      evaluation = evaluation,
-      engine = evaluation.engine,
-      engineParamsList = engineParamsGenerator.engineParamsList,
-      evaluationInstance = evaluationInstance,
-      evaluator = evaluation.evaluator,
-      env = env,
-      params = params
-    )
-  }
-
-  def runEvaluationTypeless[
-      EI, Q, P, A, EEI, EQ, EP, EA, ER <: BaseEvaluatorResult](
-      evaluation: Evaluation,
-      engine: BaseEngine[EI, Q, P, A],
-      engineParamsList: Seq[EngineParams],
-      evaluationInstance: EvaluationInstance,
-      evaluator: BaseEvaluator[EEI, EQ, EP, EA, ER],
-      env: Map[String, String] = WorkflowUtils.pioEnvVars,
-      params: WorkflowParams = WorkflowParams()) {
-    runEvaluationViaCoreWorkflow(
-      evaluation = evaluation,
-      engine = engine,
-      engineParamsList = engineParamsList,
-      evaluationInstance = evaluationInstance,
-      evaluator = evaluator.asInstanceOf[BaseEvaluator[EI, Q, P, A, ER]],
-      env = env,
-      params = params)
-  }
-
-  /** :: Experimental :: */
-  @Experimental
-  def runEvaluationViaCoreWorkflow[EI, Q, P, A, R <: BaseEvaluatorResult](
-      evaluation: Evaluation,
-      engine: BaseEngine[EI, Q, P, A],
-      engineParamsList: Seq[EngineParams],
-      evaluationInstance: EvaluationInstance,
-      evaluator: BaseEvaluator[EI, Q, P, A, R],
-      env: Map[String, String] = WorkflowUtils.pioEnvVars,
-      params: WorkflowParams = WorkflowParams()) {
-    CoreWorkflow.runEvaluation(
-      evaluation = evaluation,
-      engine = engine,
-      engineParamsList = engineParamsList,
-      evaluationInstance = evaluationInstance,
-      evaluator = evaluator,
-      env = env,
-      params = params)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/WorkflowContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/WorkflowContext.scala b/core/src/main/scala/io/prediction/workflow/WorkflowContext.scala
deleted file mode 100644
index 264c757..0000000
--- a/core/src/main/scala/io/prediction/workflow/WorkflowContext.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-import grizzled.slf4j.Logging
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkConf
-
-import scala.language.existentials
-
-// FIXME: move to better location.
-object WorkflowContext extends Logging {
-  def apply(
-      batch: String = "",
-      executorEnv: Map[String, String] = Map(),
-      sparkEnv: Map[String, String] = Map(),
-      mode: String = ""
-    ): SparkContext = {
-    val conf = new SparkConf()
-    val prefix = if (mode == "") "PredictionIO" else s"PredictionIO ${mode}"
-    conf.setAppName(s"${prefix}: ${batch}")
-    debug(s"Executor environment received: ${executorEnv}")
-    executorEnv.map(kv => conf.setExecutorEnv(kv._1, kv._2))
-    debug(s"SparkConf executor environment: ${conf.getExecutorEnv}")
-    debug(s"Application environment received: ${sparkEnv}")
-    conf.setAll(sparkEnv)
-    val sparkConfString = conf.getAll.toSeq
-    debug(s"SparkConf environment: $sparkConfString")
-    new SparkContext(conf)
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/WorkflowParams.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/WorkflowParams.scala b/core/src/main/scala/io/prediction/workflow/WorkflowParams.scala
deleted file mode 100644
index 88ec54e..0000000
--- a/core/src/main/scala/io/prediction/workflow/WorkflowParams.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-/** Workflow parameters.
-  *
-  * @param batch Batch label of the run.
-  * @param verbose Verbosity level.
-  * @param saveModel Controls whether trained models are persisted.
-  * @param sparkEnv Spark properties that will be set in SparkConf.setAll().
-  * @param skipSanityCheck Skips all data sanity check.
-  * @param stopAfterRead Stops workflow after reading from data source.
-  * @param stopAfterPrepare Stops workflow after data preparation.
-  * @group Workflow
-  */
-case class WorkflowParams(
-  batch: String = "",
-  verbose: Int = 2,
-  saveModel: Boolean = true,
-  sparkEnv: Map[String, String] =
-    Map[String, String]("spark.executor.extraClassPath" -> "."),
-  skipSanityCheck: Boolean = false,
-  stopAfterRead: Boolean = false,
-  stopAfterPrepare: Boolean = false) {
-  // Temporary workaround for WorkflowParamsBuilder for Java. It doesn't support
-  // custom spark environment yet.
-  def this(batch: String, verbose: Int, saveModel: Boolean)
-  = this(batch, verbose, saveModel, Map[String, String]())
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/WorkflowUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/WorkflowUtils.scala b/core/src/main/scala/io/prediction/workflow/WorkflowUtils.scala
deleted file mode 100644
index d93b9eb..0000000
--- a/core/src/main/scala/io/prediction/workflow/WorkflowUtils.scala
+++ /dev/null
@@ -1,419 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-import java.io.File
-import java.io.FileNotFoundException
-
-import io.prediction.controller.EmptyParams
-import io.prediction.controller.EngineFactory
-import io.prediction.controller.EngineParamsGenerator
-import io.prediction.controller.Evaluation
-import io.prediction.controller.Params
-import io.prediction.controller.PersistentModelLoader
-import io.prediction.controller.Utils
-import io.prediction.core.BuildInfo
-
-import com.google.gson.Gson
-import com.google.gson.JsonSyntaxException
-import grizzled.slf4j.Logging
-import io.prediction.workflow.JsonExtractorOption.JsonExtractorOption
-import org.apache.log4j.Level
-import org.apache.log4j.LogManager
-import org.apache.spark.SparkContext
-import org.apache.spark.api.java.JavaRDDLike
-import org.apache.spark.rdd.RDD
-import org.json4s.JsonAST.JValue
-import org.json4s.MappingException
-import org.json4s._
-import org.json4s.native.JsonMethods._
-
-import scala.io.Source
-import scala.language.existentials
-import scala.reflect.runtime.universe
-
-/** Collection of reusable workflow related utilities. */
-object WorkflowUtils extends Logging {
-  @transient private lazy val gson = new Gson
-
-  /** Obtains an Engine object in Scala, or instantiate an Engine in Java.
-    *
-    * @param engine Engine factory name.
-    * @param cl A Java ClassLoader to look for engine-related classes.
-    *
-    * @throws ClassNotFoundException
-    *         Thrown when engine factory class does not exist.
-    * @throws NoSuchMethodException
-    *         Thrown when engine factory's apply() method is not implemented.
-    */
-  def getEngine(engine: String, cl: ClassLoader): (EngineLanguage.Value, EngineFactory) = {
-    val runtimeMirror = universe.runtimeMirror(cl)
-    val engineModule = runtimeMirror.staticModule(engine)
-    val engineObject = runtimeMirror.reflectModule(engineModule)
-    try {
-      (
-        EngineLanguage.Scala,
-        engineObject.instance.asInstanceOf[EngineFactory]
-      )
-    } catch {
-      case e @ (_: NoSuchFieldException | _: ClassNotFoundException) => try {
-        (
-          EngineLanguage.Java,
-          Class.forName(engine).newInstance.asInstanceOf[EngineFactory]
-        )
-      }
-    }
-  }
-
-  def getEngineParamsGenerator(epg: String, cl: ClassLoader):
-    (EngineLanguage.Value, EngineParamsGenerator) = {
-    val runtimeMirror = universe.runtimeMirror(cl)
-    val epgModule = runtimeMirror.staticModule(epg)
-    val epgObject = runtimeMirror.reflectModule(epgModule)
-    try {
-      (
-        EngineLanguage.Scala,
-        epgObject.instance.asInstanceOf[EngineParamsGenerator]
-      )
-    } catch {
-      case e @ (_: NoSuchFieldException | _: ClassNotFoundException) => try {
-        (
-          EngineLanguage.Java,
-          Class.forName(epg).newInstance.asInstanceOf[EngineParamsGenerator]
-        )
-      }
-    }
-  }
-
-  def getEvaluation(evaluation: String, cl: ClassLoader): (EngineLanguage.Value, Evaluation) = {
-    val runtimeMirror = universe.runtimeMirror(cl)
-    val evaluationModule = runtimeMirror.staticModule(evaluation)
-    val evaluationObject = runtimeMirror.reflectModule(evaluationModule)
-    try {
-      (
-        EngineLanguage.Scala,
-        evaluationObject.instance.asInstanceOf[Evaluation]
-      )
-    } catch {
-      case e @ (_: NoSuchFieldException | _: ClassNotFoundException) => try {
-        (
-          EngineLanguage.Java,
-          Class.forName(evaluation).newInstance.asInstanceOf[Evaluation]
-        )
-      }
-    }
-  }
-
-  /** Converts a JSON document to an instance of Params.
-    *
-    * @param language Engine's programming language.
-    * @param json JSON document.
-    * @param clazz Class of the component that is going to receive the resulting
-    *              Params instance as a constructor argument.
-    * @param jsonExtractor JSON extractor option.
-    * @param formats JSON4S serializers for deserialization.
-    *
-    * @throws MappingException Thrown when JSON4S fails to perform conversion.
-    * @throws JsonSyntaxException Thrown when GSON fails to perform conversion.
-    */
-  def extractParams(
-      language: EngineLanguage.Value = EngineLanguage.Scala,
-      json: String,
-      clazz: Class[_],
-      jsonExtractor: JsonExtractorOption,
-      formats: Formats = Utils.json4sDefaultFormats): Params = {
-    implicit val f = formats
-    val pClass = clazz.getConstructors.head.getParameterTypes
-    if (pClass.size == 0) {
-      if (json != "") {
-        warn(s"Non-empty parameters supplied to ${clazz.getName}, but its " +
-          "constructor does not accept any arguments. Stubbing with empty " +
-          "parameters.")
-      }
-      EmptyParams()
-    } else {
-      val apClass = pClass.head
-      try {
-        JsonExtractor.extract(jsonExtractor, json, apClass, f).asInstanceOf[Params]
-      } catch {
-        case e@(_: MappingException | _: JsonSyntaxException) =>
-          error(
-            s"Unable to extract parameters for ${apClass.getName} from " +
-              s"JSON string: $json. Aborting workflow.",
-            e)
-          throw e
-      }
-    }
-  }
-
-  def getParamsFromJsonByFieldAndClass(
-      variantJson: JValue,
-      field: String,
-      classMap: Map[String, Class[_]],
-      engineLanguage: EngineLanguage.Value,
-      jsonExtractor: JsonExtractorOption): (String, Params) = {
-    variantJson findField {
-      case JField(f, _) => f == field
-      case _ => false
-    } map { jv =>
-      implicit lazy val formats = Utils.json4sDefaultFormats + new NameParamsSerializer
-      val np: NameParams = try {
-        jv._2.extract[NameParams]
-      } catch {
-        case e: Exception =>
-          error(s"Unable to extract $field name and params $jv")
-          throw e
-      }
-      val extractedParams = np.params.map { p =>
-        try {
-          if (!classMap.contains(np.name)) {
-            error(s"Unable to find $field class with name '${np.name}'" +
-              " defined in Engine.")
-            sys.exit(1)
-          }
-          WorkflowUtils.extractParams(
-            engineLanguage,
-            compact(render(p)),
-            classMap(np.name),
-            jsonExtractor,
-            formats)
-        } catch {
-          case e: Exception =>
-            error(s"Unable to extract $field params $p")
-            throw e
-        }
-      }.getOrElse(EmptyParams())
-
-      (np.name, extractedParams)
-    } getOrElse("", EmptyParams())
-  }
-
-  /** Grab environmental variables that starts with 'PIO_'. */
-  def pioEnvVars: Map[String, String] =
-    sys.env.filter(kv => kv._1.startsWith("PIO_"))
-
-  /** Converts Java (non-Scala) objects to a JSON4S JValue.
-    *
-    * @param params The Java object to be converted.
-    */
-  def javaObjectToJValue(params: AnyRef): JValue = parse(gson.toJson(params))
-
-  private[prediction] def checkUpgrade(
-      component: String = "core",
-      engine: String = ""): Unit = {
-    val runner = new Thread(new UpgradeCheckRunner(component, engine))
-    runner.start()
-  }
-
-  // Extract debug string by recursively traversing the data.
-  def debugString[D](data: D): String = {
-    val s: String = data match {
-      case rdd: RDD[_] => {
-        debugString(rdd.collect())
-      }
-      case javaRdd: JavaRDDLike[_, _] => {
-        debugString(javaRdd.collect())
-      }
-      case array: Array[_] => {
-        "[" + array.map(debugString).mkString(",") + "]"
-      }
-      case d: AnyRef => {
-        d.toString
-      }
-      case null => "null"
-    }
-    s
-  }
-
-  /** Detect third party software configuration files to be submitted as
-    * extras to Apache Spark. This makes sure all executors receive the same
-    * configuration.
-    */
-  def thirdPartyConfFiles: Seq[String] = {
-    val thirdPartyFiles = Map(
-      "PIO_CONF_DIR" -> "log4j.properties",
-      "ES_CONF_DIR" -> "elasticsearch.yml",
-      "HADOOP_CONF_DIR" -> "core-site.xml",
-      "HBASE_CONF_DIR" -> "hbase-site.xml")
-
-    thirdPartyFiles.keys.toSeq.map { k: String =>
-      sys.env.get(k) map { x =>
-        val p = Seq(x, thirdPartyFiles(k)).mkString(File.separator)
-        if (new File(p).exists) Seq(p) else Seq[String]()
-      } getOrElse Seq[String]()
-    }.flatten
-  }
-
-  def thirdPartyClasspaths: Seq[String] = {
-    val thirdPartyPaths = Seq(
-      "PIO_CONF_DIR",
-      "ES_CONF_DIR",
-      "POSTGRES_JDBC_DRIVER",
-      "MYSQL_JDBC_DRIVER",
-      "HADOOP_CONF_DIR",
-      "HBASE_CONF_DIR")
-    thirdPartyPaths.map(p =>
-      sys.env.get(p).map(Seq(_)).getOrElse(Seq[String]())
-    ).flatten
-  }
-
-  def modifyLogging(verbose: Boolean): Unit = {
-    val rootLoggerLevel = if (verbose) Level.TRACE else Level.INFO
-    val chattyLoggerLevel = if (verbose) Level.INFO else Level.WARN
-
-    LogManager.getRootLogger.setLevel(rootLoggerLevel)
-
-    LogManager.getLogger("org.elasticsearch").setLevel(chattyLoggerLevel)
-    LogManager.getLogger("org.apache.hadoop").setLevel(chattyLoggerLevel)
-    LogManager.getLogger("org.apache.spark").setLevel(chattyLoggerLevel)
-    LogManager.getLogger("org.eclipse.jetty").setLevel(chattyLoggerLevel)
-    LogManager.getLogger("akka").setLevel(chattyLoggerLevel)
-  }
-
-  def extractNameParams(jv: JValue): NameParams = {
-    implicit val formats = Utils.json4sDefaultFormats
-    val nameOpt = (jv \ "name").extract[Option[String]]
-    val paramsOpt = (jv \ "params").extract[Option[JValue]]
-
-    if (nameOpt.isEmpty && paramsOpt.isEmpty) {
-      error("Unable to find 'name' or 'params' fields in" +
-        s" ${compact(render(jv))}.\n" +
-        "Since 0.8.4, the 'params' field is required in engine.json" +
-        " in order to specify parameters for DataSource, Preparator or" +
-        " Serving.\n" +
-        "Please go to https://docs.prediction.io/resources/upgrade/" +
-        " for detailed instruction of how to change engine.json.")
-      sys.exit(1)
-    }
-
-    if (nameOpt.isEmpty) {
-      info(s"No 'name' is found. Default empty String will be used.")
-    }
-
-    if (paramsOpt.isEmpty) {
-      info(s"No 'params' is found. Default EmptyParams will be used.")
-    }
-
-    NameParams(
-      name = nameOpt.getOrElse(""),
-      params = paramsOpt
-    )
-  }
-
-  def extractSparkConf(root: JValue): List[(String, String)] = {
-    def flatten(jv: JValue): List[(List[String], String)] = {
-      jv match {
-        case JObject(fields) =>
-          for ((namePrefix, childJV) <- fields;
-               (name, value) <- flatten(childJV))
-          yield (namePrefix :: name) -> value
-        case JArray(_) => {
-          error("Arrays are not allowed in the sparkConf section of engine.js.")
-          sys.exit(1)
-        }
-        case JNothing => List()
-        case _ => List(List() -> jv.values.toString)
-      }
-    }
-
-    flatten(root \ "sparkConf").map(x =>
-      (x._1.reduce((a, b) => s"$a.$b"), x._2))
-  }
-}
-
-case class NameParams(name: String, params: Option[JValue])
-
-class NameParamsSerializer extends CustomSerializer[NameParams](format => ( {
-  case jv: JValue => WorkflowUtils.extractNameParams(jv)
-}, {
-  case x: NameParams =>
-    JObject(JField("name", JString(x.name)) ::
-      JField("params", x.params.getOrElse(JNothing)) :: Nil)
-}
-  ))
-
-/** Collection of reusable workflow related utilities that touch on Apache
-  * Spark. They are separated to avoid compilation problems with certain code.
-  */
-object SparkWorkflowUtils extends Logging {
-  def getPersistentModel[AP <: Params, M](
-      pmm: PersistentModelManifest,
-      runId: String,
-      params: AP,
-      sc: Option[SparkContext],
-      cl: ClassLoader): M = {
-    val runtimeMirror = universe.runtimeMirror(cl)
-    val pmmModule = runtimeMirror.staticModule(pmm.className)
-    val pmmObject = runtimeMirror.reflectModule(pmmModule)
-    try {
-      pmmObject.instance.asInstanceOf[PersistentModelLoader[AP, M]](
-        runId,
-        params,
-        sc)
-    } catch {
-      case e @ (_: NoSuchFieldException | _: ClassNotFoundException) => try {
-        val loadMethod = Class.forName(pmm.className).getMethod(
-          "load",
-          classOf[String],
-          classOf[Params],
-          classOf[SparkContext])
-        loadMethod.invoke(null, runId, params, sc.orNull).asInstanceOf[M]
-      } catch {
-        case e: ClassNotFoundException =>
-          error(s"Model class ${pmm.className} cannot be found.")
-          throw e
-        case e: NoSuchMethodException =>
-          error(
-            "The load(String, Params, SparkContext) method cannot be found.")
-          throw e
-      }
-    }
-  }
-}
-
-class UpgradeCheckRunner(
-    val component: String,
-    val engine: String) extends Runnable with Logging {
-  val version = BuildInfo.version
-  val versionsHost = "https://direct.prediction.io/"
-
-  def run(): Unit = {
-    val url = if (engine == "") {
-      s"$versionsHost$version/$component.json"
-    } else {
-      s"$versionsHost$version/$component/$engine.json"
-    }
-    try {
-      val upgradeData = Source.fromURL(url)
-    } catch {
-      case e: FileNotFoundException =>
-        debug(s"Update metainfo not found. $url")
-      case e: java.net.UnknownHostException =>
-        debug(s"${e.getClass.getName}: {e.getMessage}")
-    }
-    // TODO: Implement upgrade logic
-  }
-}
-
-class WorkflowInterruption() extends Exception
-
-case class StopAfterReadInterruption() extends WorkflowInterruption
-
-case class StopAfterPrepareInterruption() extends WorkflowInterruption
-
-object EngineLanguage extends Enumeration {
-  val Scala, Java = Value
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/CustomQuerySerializer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/CustomQuerySerializer.scala b/core/src/main/scala/org/apache/predictionio/controller/CustomQuerySerializer.scala
new file mode 100644
index 0000000..2fa5551
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/CustomQuerySerializer.scala
@@ -0,0 +1,37 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.core.BaseQuerySerializer
+
+/** If your query class cannot be automatically serialized/deserialized to/from
+  * JSON, implement a trait by extending this trait, and overriding the
+  * `querySerializer` member with your
+  * [[https://github.com/json4s/json4s#serializing-non-supported-types custom JSON4S serializer]].
+  * Algorithm and serving classes using your query class would only need to mix
+  * in the trait to enable the custom serializer.
+  *
+  * @group Helper
+  */
+trait CustomQuerySerializer extends BaseQuerySerializer
+
+/** DEPRECATED. Use [[CustomQuerySerializer]] instead.
+  *
+  * @group Helper
+  */
+@deprecated("Use CustomQuerySerializer instead.", "0.9.2")
+trait WithQuerySerializer extends CustomQuerySerializer
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/Deployment.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/Deployment.scala b/core/src/main/scala/org/apache/predictionio/controller/Deployment.scala
new file mode 100644
index 0000000..76fe0b3
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/Deployment.scala
@@ -0,0 +1,56 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.core.BaseEngine
+
+import scala.language.implicitConversions
+
+/** Defines a deployment that contains an [[Engine]]
+  *
+  * @group Engine
+  */
+trait Deployment extends EngineFactory {
+  protected[this] var _engine: BaseEngine[_, _, _, _] = _
+  protected[this] var engineSet: Boolean = false
+
+  /** Returns the [[Engine]] of this [[Deployment]] */
+  def apply(): BaseEngine[_, _, _, _] = {
+    assert(engineSet, "Engine not set")
+    _engine
+  }
+
+  /** Returns the [[Engine]] contained in this [[Deployment]]. */
+  private [prediction]
+  def engine: BaseEngine[_, _, _, _] = {
+    assert(engineSet, "Engine not set")
+    _engine
+  }
+
+  /** Sets the [[Engine]] for this [[Deployment]]
+    *
+    * @param engine An implementation of [[Engine]]
+    * @tparam EI Evaluation information class
+    * @tparam Q Query class
+    * @tparam P Predicted result class
+    * @tparam A Actual result class
+    */
+  def engine_=[EI, Q, P, A](engine: BaseEngine[EI, Q, P, A]) {
+    assert(!engineSet, "Engine can be set at most once")
+    _engine = engine
+    engineSet = true
+  }
+}


[11/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/Event.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/Event.scala b/data/src/main/scala/org/apache/predictionio/data/storage/Event.scala
new file mode 100644
index 0000000..6169a02
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/Event.scala
@@ -0,0 +1,164 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.apache.predictionio.annotation.DeveloperApi
+import org.joda.time.DateTime
+import org.joda.time.DateTimeZone
+
+/** Each event in the Event Store can be represented by fields in this case
+  * class.
+  *
+  * @param eventId Unique ID of this event.
+  * @param event Name of this event.
+  * @param entityType Type of the entity associated with this event.
+  * @param entityId ID of the entity associated with this event.
+  * @param targetEntityType Type of the target entity associated with this
+  *                         event.
+  * @param targetEntityId ID of the target entity associated with this event.
+  * @param properties Properties associated with this event.
+  * @param eventTime Time of the happening of this event.
+  * @param tags Tags of this event.
+  * @param prId PredictedResultId of this event.
+  * @param creationTime Time of creation in the system of this event.
+  * @group Event Data
+  */
+case class Event(
+  val eventId: Option[String] = None,
+  val event: String,
+  val entityType: String,
+  val entityId: String,
+  val targetEntityType: Option[String] = None,
+  val targetEntityId: Option[String] = None,
+  val properties: DataMap = DataMap(), // default empty
+  val eventTime: DateTime = DateTime.now,
+  val tags: Seq[String] = Nil,
+  val prId: Option[String] = None,
+  val creationTime: DateTime = DateTime.now
+) {
+  override def toString(): String = {
+    s"Event(id=$eventId,event=$event,eType=$entityType,eId=$entityId," +
+    s"tType=$targetEntityType,tId=$targetEntityId,p=$properties,t=$eventTime," +
+    s"tags=$tags,pKey=$prId,ct=$creationTime)"
+  }
+}
+
+/** :: DeveloperApi ::
+  * Utilities for validating [[Event]]s
+  *
+  * @group Event Data
+  */
+@DeveloperApi
+object EventValidation {
+  /** Default time zone is set to UTC */
+  val defaultTimeZone = DateTimeZone.UTC
+
+  /** Checks whether an event name contains a reserved prefix
+    *
+    * @param name Event name
+    * @return true if event name starts with \$ or pio_, false otherwise
+    */
+  def isReservedPrefix(name: String): Boolean = name.startsWith("$") ||
+    name.startsWith("pio_")
+
+  /** PredictionIO reserves some single entity event names. They are currently
+    * \$set, \$unset, and \$delete.
+    */
+  val specialEvents = Set("$set", "$unset", "$delete")
+
+  /** Checks whether an event name is a special PredictionIO event name
+    *
+    * @param name Event name
+    * @return true if the name is a special event, false otherwise
+    */
+  def isSpecialEvents(name: String): Boolean = specialEvents.contains(name)
+
+  /** Validate an [[Event]], throwing exceptions when the candidate violates any
+    * of the following:
+    *
+    *  - event name must not be empty
+    *  - entityType must not be empty
+    *  - entityId must not be empty
+    *  - targetEntityType must not be Some of empty
+    *  - targetEntityId must not be Some of empty
+    *  - targetEntityType and targetEntityId must be both Some or None
+    *  - properties must not be empty when event is \$unset
+    *  - event name must be a special event if it has a reserved prefix
+    *  - targetEntityType and targetEntityId must be None if the event name has
+    *    a reserved prefix
+    *  - entityType must be a built-in entity type if entityType has a
+    *    reserved prefix
+    *  - targetEntityType must be a built-in entity type if targetEntityType is
+    *    Some and has a reserved prefix
+    *
+    * @param e Event to be validated
+    */
+  def validate(e: Event): Unit = {
+
+    require(!e.event.isEmpty, "event must not be empty.")
+    require(!e.entityType.isEmpty, "entityType must not be empty string.")
+    require(!e.entityId.isEmpty, "entityId must not be empty string.")
+    require(e.targetEntityType.map(!_.isEmpty).getOrElse(true),
+      "targetEntityType must not be empty string")
+    require(e.targetEntityId.map(!_.isEmpty).getOrElse(true),
+      "targetEntityId must not be empty string.")
+    require(!((e.targetEntityType != None) && (e.targetEntityId == None)),
+      "targetEntityType and targetEntityId must be specified together.")
+    require(!((e.targetEntityType == None) && (e.targetEntityId != None)),
+      "targetEntityType and targetEntityId must be specified together.")
+    require(!((e.event == "$unset") && e.properties.isEmpty),
+      "properties cannot be empty for $unset event")
+    require(!isReservedPrefix(e.event) || isSpecialEvents(e.event),
+      s"${e.event} is not a supported reserved event name.")
+    require(!isSpecialEvents(e.event) ||
+      ((e.targetEntityType == None) && (e.targetEntityId == None)),
+      s"Reserved event ${e.event} cannot have targetEntity")
+    require(!isReservedPrefix(e.entityType) ||
+      isBuiltinEntityTypes(e.entityType),
+      s"The entityType ${e.entityType} is not allowed. " +
+        s"'pio_' is a reserved name prefix.")
+    require(e.targetEntityType.map{ t =>
+      (!isReservedPrefix(t) || isBuiltinEntityTypes(t))}.getOrElse(true),
+      s"The targetEntityType ${e.targetEntityType.get} is not allowed. " +
+        s"'pio_' is a reserved name prefix.")
+    validateProperties(e)
+  }
+
+  /** Defines built-in entity types. The current built-in type is pio_pr. */
+  val builtinEntityTypes: Set[String] = Set("pio_pr")
+
+  /** Defines built-in properties. This is currently empty. */
+  val builtinProperties: Set[String] = Set()
+
+  /** Checks whether an entity type is a built-in entity type */
+  def isBuiltinEntityTypes(name: String): Boolean = builtinEntityTypes.contains(name)
+
+  /** Validate event properties, throwing exceptions when the candidate violates
+    * any of the following:
+    *
+    *  - property name must not contain a reserved prefix
+    *
+    * @param e Event to be validated
+    */
+  def validateProperties(e: Event): Unit = {
+    e.properties.keySet.foreach { k =>
+      require(!isReservedPrefix(k) || builtinProperties.contains(k),
+        s"The property ${k} is not allowed. " +
+          s"'pio_' is a reserved name prefix.")
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/EventJson4sSupport.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/EventJson4sSupport.scala b/data/src/main/scala/org/apache/predictionio/data/storage/EventJson4sSupport.scala
new file mode 100644
index 0000000..7d4fce3
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/EventJson4sSupport.scala
@@ -0,0 +1,236 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.apache.predictionio.annotation.DeveloperApi
+import org.apache.predictionio.data.{Utils => DataUtils}
+import org.joda.time.DateTime
+import org.json4s._
+import scala.util.{Try, Success, Failure}
+
+/** :: DeveloperApi ::
+  * Support library for dealing with [[Event]] and JSON4S
+  *
+  * @group Event Data
+  */
+@DeveloperApi
+object EventJson4sSupport {
+  /** This is set to org.json4s.DefaultFormats. Do not use JSON4S to serialize
+    * or deserialize Joda-Time DateTime because it has some issues with timezone
+    * (as of version 3.2.10)
+    */
+  implicit val formats = DefaultFormats
+
+  /** :: DeveloperApi ::
+    * Convert JSON from Event Server to [[Event]]
+    *
+    * @return deserialization routine used by [[APISerializer]]
+    */
+  @DeveloperApi
+  def readJson: PartialFunction[JValue, Event] = {
+    case JObject(x) => {
+      val fields = new DataMap(x.toMap)
+      // use get() if required in json
+      // use getOpt() if not required in json
+      try {
+        val event = fields.get[String]("event")
+        val entityType = fields.get[String]("entityType")
+        val entityId = fields.get[String]("entityId")
+        val targetEntityType = fields.getOpt[String]("targetEntityType")
+        val targetEntityId = fields.getOpt[String]("targetEntityId")
+        val properties = fields.getOrElse[Map[String, JValue]](
+          "properties", Map())
+        // default currentTime expressed as UTC timezone
+        lazy val currentTime = DateTime.now(EventValidation.defaultTimeZone)
+        val eventTime = fields.getOpt[String]("eventTime")
+          .map{ s =>
+            try {
+              DataUtils.stringToDateTime(s)
+            } catch {
+              case _: Exception =>
+                throw new MappingException(s"Fail to extract eventTime ${s}")
+            }
+          }.getOrElse(currentTime)
+
+        // disable tags from API for now.
+        val tags = List()
+      // val tags = fields.getOpt[Seq[String]]("tags").getOrElse(List())
+
+        val prId = fields.getOpt[String]("prId")
+
+        // don't allow user set creationTime from API for now.
+        val creationTime = currentTime
+      // val creationTime = fields.getOpt[String]("creationTime")
+      //   .map{ s =>
+      //     try {
+      //       DataUtils.stringToDateTime(s)
+      //     } catch {
+      //       case _: Exception =>
+      //         throw new MappingException(s"Fail to extract creationTime ${s}")
+      //     }
+      //   }.getOrElse(currentTime)
+
+
+        val newEvent = Event(
+          event = event,
+          entityType = entityType,
+          entityId = entityId,
+          targetEntityType = targetEntityType,
+          targetEntityId = targetEntityId,
+          properties = DataMap(properties),
+          eventTime = eventTime,
+          prId = prId,
+          creationTime = creationTime
+        )
+        EventValidation.validate(newEvent)
+        newEvent
+      } catch {
+        case e: Exception => throw new MappingException(e.toString, e)
+      }
+    }
+  }
+
+  /** :: DeveloperApi ::
+    * Convert [[Event]] to JSON for use by the Event Server
+    *
+    * @return serialization routine used by [[APISerializer]]
+    */
+  @DeveloperApi
+  def writeJson: PartialFunction[Any, JValue] = {
+    case d: Event => {
+      JObject(
+        JField("eventId",
+          d.eventId.map( eid => JString(eid)).getOrElse(JNothing)) ::
+        JField("event", JString(d.event)) ::
+        JField("entityType", JString(d.entityType)) ::
+        JField("entityId", JString(d.entityId)) ::
+        JField("targetEntityType",
+          d.targetEntityType.map(JString(_)).getOrElse(JNothing)) ::
+        JField("targetEntityId",
+          d.targetEntityId.map(JString(_)).getOrElse(JNothing)) ::
+        JField("properties", d.properties.toJObject) ::
+        JField("eventTime", JString(DataUtils.dateTimeToString(d.eventTime))) ::
+        // disable tags from API for now
+        // JField("tags", JArray(d.tags.toList.map(JString(_)))) ::
+        // disable tags from API for now
+        JField("prId",
+          d.prId.map(JString(_)).getOrElse(JNothing)) ::
+        // don't show creationTime for now
+        JField("creationTime",
+          JString(DataUtils.dateTimeToString(d.creationTime))) ::
+        Nil)
+    }
+  }
+
+  /** :: DeveloperApi ::
+    * Convert JSON4S JValue to [[Event]]
+    *
+    * @return deserialization routine used by [[DBSerializer]]
+    */
+  @DeveloperApi
+  def deserializeFromJValue: PartialFunction[JValue, Event] = {
+    case jv: JValue => {
+      val event = (jv \ "event").extract[String]
+      val entityType = (jv \ "entityType").extract[String]
+      val entityId = (jv \ "entityId").extract[String]
+      val targetEntityType = (jv \ "targetEntityType").extract[Option[String]]
+      val targetEntityId = (jv \ "targetEntityId").extract[Option[String]]
+      val properties = (jv \ "properties").extract[JObject]
+      val eventTime = DataUtils.stringToDateTime(
+        (jv \ "eventTime").extract[String])
+      val tags = (jv \ "tags").extract[Seq[String]]
+      val prId = (jv \ "prId").extract[Option[String]]
+      val creationTime = DataUtils.stringToDateTime(
+        (jv \ "creationTime").extract[String])
+      Event(
+        event = event,
+        entityType = entityType,
+        entityId = entityId,
+        targetEntityType = targetEntityType,
+        targetEntityId = targetEntityId,
+        properties = DataMap(properties),
+        eventTime = eventTime,
+        tags = tags,
+        prId = prId,
+        creationTime = creationTime)
+    }
+  }
+
+  /** :: DeveloperApi ::
+    * Convert [[Event]] to JSON4S JValue
+    *
+    * @return serialization routine used by [[DBSerializer]]
+    */
+  @DeveloperApi
+  def serializeToJValue: PartialFunction[Any, JValue] = {
+    case d: Event => {
+      JObject(
+        JField("event", JString(d.event)) ::
+        JField("entityType", JString(d.entityType)) ::
+        JField("entityId", JString(d.entityId)) ::
+        JField("targetEntityType",
+          d.targetEntityType.map(JString(_)).getOrElse(JNothing)) ::
+        JField("targetEntityId",
+          d.targetEntityId.map(JString(_)).getOrElse(JNothing)) ::
+        JField("properties", d.properties.toJObject) ::
+        JField("eventTime", JString(DataUtils.dateTimeToString(d.eventTime))) ::
+        JField("tags", JArray(d.tags.toList.map(JString(_)))) ::
+        JField("prId",
+          d.prId.map(JString(_)).getOrElse(JNothing)) ::
+        JField("creationTime",
+          JString(DataUtils.dateTimeToString(d.creationTime))) ::
+        Nil)
+    }
+  }
+
+  /** :: DeveloperApi ::
+    * Custom JSON4S serializer for [[Event]] intended to be used by database
+    * access, or anywhere that demands serdes of [[Event]] to/from JSON4S JValue
+    */
+  @DeveloperApi
+  class DBSerializer extends CustomSerializer[Event](format => (
+    deserializeFromJValue, serializeToJValue))
+
+  /** :: DeveloperApi ::
+    * Custom JSON4S serializer for [[Event]] intended to be used by the Event
+    * Server, or anywhere that demands serdes of [[Event]] to/from JSON
+    */
+  @DeveloperApi
+  class APISerializer extends CustomSerializer[Event](format => (
+    readJson, writeJson))
+}
+
+
+@DeveloperApi
+object BatchEventsJson4sSupport {
+  implicit val formats = DefaultFormats
+
+  @DeveloperApi
+  def readJson: PartialFunction[JValue, Seq[Try[Event]]] = {
+    case JArray(events) => {
+      events.map { event =>
+        try {
+          Success(EventJson4sSupport.readJson(event))
+        } catch {
+          case e: Exception => Failure(e)
+        }
+      }
+    }
+  }
+
+  @DeveloperApi
+  class APISerializer extends CustomSerializer[Seq[Try[Event]]](format => (readJson, Map.empty))
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/LEventAggregator.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/LEventAggregator.scala b/data/src/main/scala/org/apache/predictionio/data/storage/LEventAggregator.scala
new file mode 100644
index 0000000..6836c6d
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/LEventAggregator.scala
@@ -0,0 +1,145 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.apache.predictionio.annotation.DeveloperApi
+import org.joda.time.DateTime
+
+/** :: DeveloperApi ::
+  * Provides aggregation support of [[Event]]s to [[LEvents]]. Engine developers
+  * should use [[org.apache.predictionio.data.store.LEventStore]] instead of using this
+  * directly.
+  *
+  * @group Event Data
+  */
+@DeveloperApi
+object LEventAggregator {
+  /** :: DeveloperApi ::
+    * Aggregate all properties grouped by entity type given an iterator of
+    * [[Event]]s with the latest property values from all [[Event]]s, and their
+    * first and last updated time
+    *
+    * @param events An iterator of [[Event]]s whose properties will be aggregated
+    * @return A map of entity type to [[PropertyMap]]
+    */
+  @DeveloperApi
+  def aggregateProperties(events: Iterator[Event]): Map[String, PropertyMap] = {
+    events.toList
+      .groupBy(_.entityId)
+      .mapValues(_.sortBy(_.eventTime.getMillis)
+        .foldLeft[Prop](Prop())(propAggregator))
+      .filter{ case (k, v) => v.dm.isDefined }
+      .mapValues{ v =>
+        require(v.firstUpdated.isDefined,
+          "Unexpected Error: firstUpdated cannot be None.")
+        require(v.lastUpdated.isDefined,
+          "Unexpected Error: lastUpdated cannot be None.")
+
+        PropertyMap(
+          fields = v.dm.get.fields,
+          firstUpdated = v.firstUpdated.get,
+          lastUpdated = v.lastUpdated.get
+        )
+      }
+  }
+
+  /** :: DeveloperApi ::
+    * Aggregate all properties given an iterator of [[Event]]s with the latest
+    * property values from all [[Event]]s, and their first and last updated time
+    *
+    * @param events An iterator of [[Event]]s whose properties will be aggregated
+    * @return An optional [[PropertyMap]]
+    */
+  @DeveloperApi
+  def aggregatePropertiesSingle(events: Iterator[Event])
+  : Option[PropertyMap] = {
+    val prop = events.toList
+      .sortBy(_.eventTime.getMillis)
+      .foldLeft[Prop](Prop())(propAggregator)
+
+    prop.dm.map{ d =>
+      require(prop.firstUpdated.isDefined,
+        "Unexpected Error: firstUpdated cannot be None.")
+      require(prop.lastUpdated.isDefined,
+        "Unexpected Error: lastUpdated cannot be None.")
+
+      PropertyMap(
+        fields = d.fields,
+        firstUpdated = prop.firstUpdated.get,
+        lastUpdated = prop.lastUpdated.get
+      )
+    }
+  }
+
+  /** Event names that control aggregation: \$set, \$unset, and \$delete */
+  val eventNames = List("$set", "$unset", "$delete")
+
+  private
+  def dataMapAggregator: ((Option[DataMap], Event) => Option[DataMap]) = {
+    (p, e) => {
+      e.event match {
+        case "$set" => {
+          if (p == None) {
+            Some(e.properties)
+          } else {
+            p.map(_ ++ e.properties)
+          }
+        }
+        case "$unset" => {
+          if (p == None) {
+            None
+          } else {
+            p.map(_ -- e.properties.keySet)
+          }
+        }
+        case "$delete" => None
+        case _ => p // do nothing for others
+      }
+    }
+  }
+
+  private
+  def propAggregator: ((Prop, Event) => Prop) = {
+    (p, e) => {
+      e.event match {
+        case "$set" | "$unset" | "$delete" => {
+          Prop(
+            dm = dataMapAggregator(p.dm, e),
+            firstUpdated = p.firstUpdated.map { t =>
+              first(t, e.eventTime)
+            }.orElse(Some(e.eventTime)),
+            lastUpdated = p.lastUpdated.map { t =>
+              last(t, e.eventTime)
+            }.orElse(Some(e.eventTime))
+          )
+        }
+        case _ => p // do nothing for others
+      }
+    }
+  }
+
+  private
+  def first(a: DateTime, b: DateTime): DateTime = if (b.isBefore(a)) b else a
+
+  private
+  def last(a: DateTime, b: DateTime): DateTime = if (b.isAfter(a)) b else a
+
+  private case class Prop(
+    dm: Option[DataMap] = None,
+    firstUpdated: Option[DateTime] = None,
+    lastUpdated: Option[DateTime] = None
+  )
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/LEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/LEvents.scala b/data/src/main/scala/org/apache/predictionio/data/storage/LEvents.scala
new file mode 100644
index 0000000..d6e753c
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/LEvents.scala
@@ -0,0 +1,489 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.apache.predictionio.annotation.DeveloperApi
+import org.apache.predictionio.annotation.Experimental
+
+import scala.concurrent.Future
+import scala.concurrent.Await
+import scala.concurrent.duration.Duration
+import scala.concurrent.ExecutionContext
+import scala.concurrent.TimeoutException
+
+import org.joda.time.DateTime
+
+/** :: DeveloperApi ::
+  * Base trait of a data access object that directly returns [[Event]] without
+  * going through Spark's parallelization. Engine developers should use
+  * [[org.apache.predictionio.data.store.LEventStore]] instead of using this directly.
+  *
+  * @group Event Data
+  */
+@DeveloperApi
+trait LEvents {
+  /** Default timeout for asynchronous operations that is set to 1 minute */
+  val defaultTimeout = Duration(60, "seconds")
+
+  /** :: DeveloperApi ::
+    * Initialize Event Store for an app ID and optionally a channel ID.
+    * This routine is to be called when an app is first created.
+    *
+    * @param appId App ID
+    * @param channelId Optional channel ID
+    * @return true if initialization was successful; false otherwise.
+    */
+  @DeveloperApi
+  def init(appId: Int, channelId: Option[Int] = None): Boolean
+
+  /** :: DeveloperApi ::
+    * Remove Event Store for an app ID and optional channel ID.
+    *
+    * @param appId App ID
+    * @param channelId Optional channel ID
+    * @return true if removal was successful; false otherwise.
+    */
+  @DeveloperApi
+  def remove(appId: Int, channelId: Option[Int] = None): Boolean
+
+  /** :: DeveloperApi ::
+    * Close this Event Store interface object, e.g. close connection, release
+    * resources, etc.
+    */
+  @DeveloperApi
+  def close(): Unit
+
+  /** :: DeveloperApi ::
+    * Insert an [[Event]] in a non-blocking fashion.
+    *
+    * @param event An [[Event]] to be inserted
+    * @param appId App ID for the [[Event]] to be inserted to
+    */
+  @DeveloperApi
+  def futureInsert(event: Event, appId: Int)(implicit ec: ExecutionContext):
+    Future[String] = futureInsert(event, appId, None)
+
+  /** :: DeveloperApi ::
+    * Insert an [[Event]] in a non-blocking fashion.
+    *
+    * @param event An [[Event]] to be inserted
+    * @param appId App ID for the [[Event]] to be inserted to
+    * @param channelId Optional channel ID for the [[Event]] to be inserted to
+    */
+  @DeveloperApi
+  def futureInsert(
+    event: Event, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext): Future[String]
+
+  /** :: DeveloperApi ::
+    * Get an [[Event]] in a non-blocking fashion.
+    *
+    * @param eventId ID of the [[Event]]
+    * @param appId ID of the app that contains the [[Event]]
+    */
+  @DeveloperApi
+  def futureGet(eventId: String, appId: Int)(implicit ec: ExecutionContext):
+    Future[Option[Event]] = futureGet(eventId, appId, None)
+
+  /** :: DeveloperApi ::
+    * Get an [[Event]] in a non-blocking fashion.
+    *
+    * @param eventId ID of the [[Event]]
+    * @param appId ID of the app that contains the [[Event]]
+    * @param channelId Optional channel ID that contains the [[Event]]
+    */
+  @DeveloperApi
+  def futureGet(
+      eventId: String,
+      appId: Int,
+      channelId: Option[Int]
+    )(implicit ec: ExecutionContext): Future[Option[Event]]
+
+  /** :: DeveloperApi ::
+    * Delete an [[Event]] in a non-blocking fashion.
+    *
+    * @param eventId ID of the [[Event]]
+    * @param appId ID of the app that contains the [[Event]]
+    */
+  @DeveloperApi
+  def futureDelete(eventId: String, appId: Int)(implicit ec: ExecutionContext):
+    Future[Boolean] = futureDelete(eventId, appId, None)
+
+  /** :: DeveloperApi ::
+    * Delete an [[Event]] in a non-blocking fashion.
+    *
+    * @param eventId ID of the [[Event]]
+    * @param appId ID of the app that contains the [[Event]]
+    * @param channelId Optional channel ID that contains the [[Event]]
+    */
+  @DeveloperApi
+  def futureDelete(
+      eventId: String,
+      appId: Int,
+      channelId: Option[Int]
+    )(implicit ec: ExecutionContext): Future[Boolean]
+
+  /** :: DeveloperApi ::
+    * Reads from database and returns a Future of Iterator of [[Event]]s.
+    *
+    * @param appId return events of this app ID
+    * @param channelId return events of this channel ID (default channel if it's None)
+    * @param startTime return events with eventTime >= startTime
+    * @param untilTime return events with eventTime < untilTime
+    * @param entityType return events of this entityType
+    * @param entityId return events of this entityId
+    * @param eventNames return events with any of these event names.
+    * @param targetEntityType return events of this targetEntityType:
+    *   - None means no restriction on targetEntityType
+    *   - Some(None) means no targetEntityType for this event
+    *   - Some(Some(x)) means targetEntityType should match x.
+    * @param targetEntityId return events of this targetEntityId
+    *   - None means no restriction on targetEntityId
+    *   - Some(None) means no targetEntityId for this event
+    *   - Some(Some(x)) means targetEntityId should match x.
+    * @param limit Limit number of events. Get all events if None or Some(-1)
+    * @param reversed Reverse the order.
+    *   - return oldest events first if None or Some(false) (default)
+    *   - return latest events first if Some(true)
+    * @param ec ExecutionContext
+    * @return Future[Iterator[Event]]
+    */
+  @DeveloperApi
+  def futureFind(
+      appId: Int,
+      channelId: Option[Int] = None,
+      startTime: Option[DateTime] = None,
+      untilTime: Option[DateTime] = None,
+      entityType: Option[String] = None,
+      entityId: Option[String] = None,
+      eventNames: Option[Seq[String]] = None,
+      targetEntityType: Option[Option[String]] = None,
+      targetEntityId: Option[Option[String]] = None,
+      limit: Option[Int] = None,
+      reversed: Option[Boolean] = None
+    )(implicit ec: ExecutionContext): Future[Iterator[Event]]
+
+  /** Aggregate properties of entities based on these special events:
+    * \$set, \$unset, \$delete events.
+    * and returns a Future of Map of entityId to properties.
+    *
+    * @param appId use events of this app ID
+    * @param channelId use events of this channel ID (default channel if it's None)
+    * @param entityType aggregate properties of the entities of this entityType
+    * @param startTime use events with eventTime >= startTime
+    * @param untilTime use events with eventTime < untilTime
+    * @param required only keep entities with these required properties defined
+    * @param ec ExecutionContext
+    * @return Future[Map[String, PropertyMap]]
+    */
+  private[prediction] def futureAggregateProperties(
+    appId: Int,
+    channelId: Option[Int] = None,
+    entityType: String,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    required: Option[Seq[String]] = None)(implicit ec: ExecutionContext):
+    Future[Map[String, PropertyMap]] = {
+      futureFind(
+        appId = appId,
+        channelId = channelId,
+        startTime = startTime,
+        untilTime = untilTime,
+        entityType = Some(entityType),
+        eventNames = Some(LEventAggregator.eventNames)
+      ).map{ eventIt =>
+        val dm = LEventAggregator.aggregateProperties(eventIt)
+        if (required.isDefined) {
+          dm.filter { case (k, v) =>
+            required.get.map(v.contains(_)).reduce(_ && _)
+          }
+        } else dm
+      }
+    }
+
+  /**
+    * :: Experimental ::
+    *
+    * Aggregate properties of the specified entity (entityType + entityId)
+    * based on these special events:
+    * \$set, \$unset, \$delete events.
+    * and returns a Future of Option[PropertyMap]
+    *
+    * @param appId use events of this app ID
+    * @param channelId use events of this channel ID (default channel if it's None)
+    * @param entityType the entityType
+    * @param entityId the entityId
+    * @param startTime use events with eventTime >= startTime
+    * @param untilTime use events with eventTime < untilTime
+    * @param ec ExecutionContext
+    * @return Future[Option[PropertyMap]]
+    */
+  @Experimental
+  private[prediction] def futureAggregatePropertiesOfEntity(
+    appId: Int,
+    channelId: Option[Int] = None,
+    entityType: String,
+    entityId: String,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None)(implicit ec: ExecutionContext):
+    Future[Option[PropertyMap]] = {
+      futureFind(
+        appId = appId,
+        channelId = channelId,
+        startTime = startTime,
+        untilTime = untilTime,
+        entityType = Some(entityType),
+        entityId = Some(entityId),
+        eventNames = Some(LEventAggregator.eventNames)
+      ).map{ eventIt =>
+        LEventAggregator.aggregatePropertiesSingle(eventIt)
+      }
+    }
+
+  // following is blocking
+  private[prediction] def insert(event: Event, appId: Int,
+    channelId: Option[Int] = None,
+    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
+    String = {
+    Await.result(futureInsert(event, appId, channelId), timeout)
+  }
+
+  private[prediction] def get(eventId: String, appId: Int,
+    channelId: Option[Int] = None,
+    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
+    Option[Event] = {
+    Await.result(futureGet(eventId, appId, channelId), timeout)
+  }
+
+  private[prediction] def delete(eventId: String, appId: Int,
+    channelId: Option[Int] = None,
+    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
+    Boolean = {
+    Await.result(futureDelete(eventId, appId, channelId), timeout)
+  }
+
+  /** reads from database and returns events iterator.
+    *
+    * @param appId return events of this app ID
+    * @param channelId return events of this channel ID (default channel if it's None)
+    * @param startTime return events with eventTime >= startTime
+    * @param untilTime return events with eventTime < untilTime
+    * @param entityType return events of this entityType
+    * @param entityId return events of this entityId
+    * @param eventNames return events with any of these event names.
+    * @param targetEntityType return events of this targetEntityType:
+    *   - None means no restriction on targetEntityType
+    *   - Some(None) means no targetEntityType for this event
+    *   - Some(Some(x)) means targetEntityType should match x.
+    * @param targetEntityId return events of this targetEntityId
+    *   - None means no restriction on targetEntityId
+    *   - Some(None) means no targetEntityId for this event
+    *   - Some(Some(x)) means targetEntityId should match x.
+    * @param limit Limit number of events. Get all events if None or Some(-1)
+    * @param reversed Reverse the order (should be used with both
+    *   targetEntityType and targetEntityId specified)
+    *   - return oldest events first if None or Some(false) (default)
+    *   - return latest events first if Some(true)
+    * @param ec ExecutionContext
+    * @return Iterator[Event]
+    */
+  private[prediction] def find(
+    appId: Int,
+    channelId: Option[Int] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    entityType: Option[String] = None,
+    entityId: Option[String] = None,
+    eventNames: Option[Seq[String]] = None,
+    targetEntityType: Option[Option[String]] = None,
+    targetEntityId: Option[Option[String]] = None,
+    limit: Option[Int] = None,
+    reversed: Option[Boolean] = None,
+    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
+    Iterator[Event] = {
+      Await.result(futureFind(
+        appId = appId,
+        channelId = channelId,
+        startTime = startTime,
+        untilTime = untilTime,
+        entityType = entityType,
+        entityId = entityId,
+        eventNames = eventNames,
+        targetEntityType = targetEntityType,
+        targetEntityId = targetEntityId,
+        limit = limit,
+        reversed = reversed), timeout)
+  }
+
+  // NOTE: remove in next release
+  @deprecated("Use find() instead.", "0.9.2")
+  private[prediction] def findLegacy(
+    appId: Int,
+    channelId: Option[Int] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    entityType: Option[String] = None,
+    entityId: Option[String] = None,
+    eventNames: Option[Seq[String]] = None,
+    targetEntityType: Option[Option[String]] = None,
+    targetEntityId: Option[Option[String]] = None,
+    limit: Option[Int] = None,
+    reversed: Option[Boolean] = None,
+    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
+    Either[StorageError, Iterator[Event]] = {
+      try {
+        // return Either for legacy usage
+        Right(Await.result(futureFind(
+          appId = appId,
+          channelId = channelId,
+          startTime = startTime,
+          untilTime = untilTime,
+          entityType = entityType,
+          entityId = entityId,
+          eventNames = eventNames,
+          targetEntityType = targetEntityType,
+          targetEntityId = targetEntityId,
+          limit = limit,
+          reversed = reversed), timeout))
+      } catch {
+        case e: TimeoutException => Left(StorageError(s"${e}"))
+        case e: Exception => Left(StorageError(s"${e}"))
+      }
+  }
+
+  /** reads events of the specified entity.
+    *
+    * @param appId return events of this app ID
+    * @param channelId return events of this channel ID (default channel if it's None)
+    * @param entityType return events of this entityType
+    * @param entityId return events of this entityId
+    * @param eventNames return events with any of these event names.
+    * @param targetEntityType return events of this targetEntityType:
+    *   - None means no restriction on targetEntityType
+    *   - Some(None) means no targetEntityType for this event
+    *   - Some(Some(x)) means targetEntityType should match x.
+    * @param targetEntityId return events of this targetEntityId
+    *   - None means no restriction on targetEntityId
+    *   - Some(None) means no targetEntityId for this event
+    *   - Some(Some(x)) means targetEntityId should match x.
+    * @param startTime return events with eventTime >= startTime
+    * @param untilTime return events with eventTime < untilTime
+    * @param limit Limit number of events. Get all events if None or Some(-1)
+    * @param latest Return latest event first (default true)
+    * @param ec ExecutionContext
+    * @return Either[StorageError, Iterator[Event]]
+    */
+  // NOTE: remove this function in next release
+  @deprecated("Use LEventStore.findByEntity() instead.", "0.9.2")
+  def findSingleEntity(
+    appId: Int,
+    channelId: Option[Int] = None,
+    entityType: String,
+    entityId: String,
+    eventNames: Option[Seq[String]] = None,
+    targetEntityType: Option[Option[String]] = None,
+    targetEntityId: Option[Option[String]] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    limit: Option[Int] = None,
+    latest: Boolean = true,
+    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
+    Either[StorageError, Iterator[Event]] = {
+
+    findLegacy(
+      appId = appId,
+      channelId = channelId,
+      startTime = startTime,
+      untilTime = untilTime,
+      entityType = Some(entityType),
+      entityId = Some(entityId),
+      eventNames = eventNames,
+      targetEntityType = targetEntityType,
+      targetEntityId = targetEntityId,
+      limit = limit,
+      reversed = Some(latest),
+      timeout = timeout)
+
+  }
+
+  /** Aggregate properties of entities based on these special events:
+    * \$set, \$unset, \$delete events.
+    * and returns a Map of entityId to properties.
+    *
+    * @param appId use events of this app ID
+    * @param channelId use events of this channel ID (default channel if it's None)
+    * @param entityType aggregate properties of the entities of this entityType
+    * @param startTime use events with eventTime >= startTime
+    * @param untilTime use events with eventTime < untilTime
+    * @param required only keep entities with these required properties defined
+    * @param ec ExecutionContext
+    * @return Map[String, PropertyMap]
+    */
+  private[prediction] def aggregateProperties(
+    appId: Int,
+    channelId: Option[Int] = None,
+    entityType: String,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    required: Option[Seq[String]] = None,
+    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
+    Map[String, PropertyMap] = {
+    Await.result(futureAggregateProperties(
+      appId = appId,
+      channelId = channelId,
+      entityType = entityType,
+      startTime = startTime,
+      untilTime = untilTime,
+      required = required), timeout)
+  }
+
+  /**
+    * :: Experimental ::
+    *
+    * Aggregate properties of the specified entity (entityType + entityId)
+    * based on these special events:
+    * \$set, \$unset, \$delete events.
+    * and returns Option[PropertyMap]
+    *
+    * @param appId use events of this app ID
+    * @param channelId use events of this channel ID
+    * @param entityType the entityType
+    * @param entityId the entityId
+    * @param startTime use events with eventTime >= startTime
+    * @param untilTime use events with eventTime < untilTime
+    * @param ec ExecutionContext
+    * @return Future[Option[PropertyMap]]
+    */
+  @Experimental
+  private[prediction] def aggregatePropertiesOfEntity(
+    appId: Int,
+    channelId: Option[Int] = None,
+    entityType: String,
+    entityId: String,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
+    Option[PropertyMap] = {
+
+    Await.result(futureAggregatePropertiesOfEntity(
+      appId = appId,
+      channelId = channelId,
+      entityType = entityType,
+      entityId = entityId,
+      startTime = startTime,
+      untilTime = untilTime), timeout)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/Models.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/Models.scala b/data/src/main/scala/org/apache/predictionio/data/storage/Models.scala
new file mode 100644
index 0000000..15d7444
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/Models.scala
@@ -0,0 +1,80 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import com.google.common.io.BaseEncoding
+import org.apache.predictionio.annotation.DeveloperApi
+import org.json4s._
+
+/** :: DeveloperApi ::
+  * Stores model for each engine instance
+  *
+  * @param id ID of the model, which should be the same as engine instance ID
+  * @param models Trained models of all algorithms
+  * @group Model Data
+  */
+@DeveloperApi
+case class Model(
+  id: String,
+  models: Array[Byte])
+
+/** :: DeveloperApi ::
+  * Base trait for of the [[Model]] data access object
+  *
+  * @group Model Data
+  */
+@DeveloperApi
+trait Models {
+  /** Insert a new [[Model]] */
+  def insert(i: Model): Unit
+
+  /** Get a [[Model]] by ID */
+  def get(id: String): Option[Model]
+
+  /** Delete a [[Model]] */
+  def delete(id: String): Unit
+}
+
+/** :: DeveloperApi ::
+  * JSON4S serializer for [[Model]]
+  *
+  * @group Model Data
+  */
+@DeveloperApi
+class ModelSerializer extends CustomSerializer[Model](
+  format => ({
+    case JObject(fields) =>
+      implicit val formats = DefaultFormats
+      val seed = Model(
+          id = "",
+          models = Array[Byte]())
+      fields.foldLeft(seed) { case (i, field) =>
+        field match {
+          case JField("id", JString(id)) => i.copy(id = id)
+          case JField("models", JString(models)) =>
+            i.copy(models = BaseEncoding.base64.decode(models))
+          case _ => i
+        }
+      }
+  },
+  {
+    case i: Model =>
+      JObject(
+        JField("id", JString(i.id)) ::
+        JField("models", JString(BaseEncoding.base64.encode(i.models))) ::
+        Nil)
+  }
+))

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/PEventAggregator.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/PEventAggregator.scala b/data/src/main/scala/org/apache/predictionio/data/storage/PEventAggregator.scala
new file mode 100644
index 0000000..72287dd
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/PEventAggregator.scala
@@ -0,0 +1,209 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.joda.time.DateTime
+
+import org.json4s.JValue
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+// each JValue data associated with the time it is set
+private[prediction] case class PropTime(val d: JValue, val t: Long)
+    extends Serializable
+
+private[prediction] case class SetProp (
+  val fields: Map[String, PropTime],
+  // last set time. Note: fields could be empty with valid set time
+  val t: Long) extends Serializable {
+
+  def ++ (that: SetProp): SetProp = {
+    val commonKeys = fields.keySet.intersect(that.fields.keySet)
+
+    val common: Map[String, PropTime] = commonKeys.map { k =>
+      val thisData = this.fields(k)
+      val thatData = that.fields(k)
+      // only keep the value with latest time
+      val v = if (thisData.t > thatData.t) thisData else thatData
+      (k, v)
+    }.toMap
+
+    val combinedFields = common ++
+      (this.fields -- commonKeys) ++ (that.fields -- commonKeys)
+
+    // keep the latest set time
+    val combinedT = if (this.t > that.t) this.t else that.t
+
+    SetProp(
+      fields = combinedFields,
+      t = combinedT
+    )
+  }
+}
+
+private[prediction] case class UnsetProp (fields: Map[String, Long])
+    extends Serializable {
+  def ++ (that: UnsetProp): UnsetProp = {
+    val commonKeys = fields.keySet.intersect(that.fields.keySet)
+
+    val common: Map[String, Long] = commonKeys.map { k =>
+      val thisData = this.fields(k)
+      val thatData = that.fields(k)
+      // only keep the value with latest time
+      val v = if (thisData > thatData) thisData else thatData
+      (k, v)
+    }.toMap
+
+    val combinedFields = common ++
+      (this.fields -- commonKeys) ++ (that.fields -- commonKeys)
+
+    UnsetProp(
+      fields = combinedFields
+    )
+  }
+}
+
+private[prediction] case class DeleteEntity (t: Long) extends Serializable {
+  def ++ (that: DeleteEntity): DeleteEntity = {
+    if (this.t > that.t) this else that
+  }
+}
+
+private[prediction] case class EventOp (
+  val setProp: Option[SetProp] = None,
+  val unsetProp: Option[UnsetProp] = None,
+  val deleteEntity: Option[DeleteEntity] = None,
+  val firstUpdated: Option[DateTime] = None,
+  val lastUpdated: Option[DateTime] = None
+) extends Serializable {
+
+  def ++ (that: EventOp): EventOp = {
+    val firstUp = (this.firstUpdated ++ that.firstUpdated).reduceOption{
+      (a, b) => if (b.getMillis < a.getMillis) b else a
+    }
+    val lastUp = (this.lastUpdated ++ that.lastUpdated).reduceOption {
+      (a, b) => if (b.getMillis > a.getMillis) b else a
+    }
+
+    EventOp(
+      setProp = (setProp ++ that.setProp).reduceOption(_ ++ _),
+      unsetProp = (unsetProp ++ that.unsetProp).reduceOption(_ ++ _),
+      deleteEntity = (deleteEntity ++ that.deleteEntity).reduceOption(_ ++ _),
+      firstUpdated = firstUp,
+      lastUpdated = lastUp
+    )
+  }
+
+  def toPropertyMap(): Option[PropertyMap] = {
+    setProp.flatMap { set =>
+
+      val unsetKeys: Set[String] = unsetProp.map( unset =>
+        unset.fields.filter{ case (k, v) => (v >= set.fields(k).t) }.keySet
+      ).getOrElse(Set())
+
+      val combinedFields = deleteEntity.map { delete =>
+        if (delete.t >= set.t) {
+          None
+        } else {
+          val deleteKeys: Set[String] = set.fields
+            .filter { case (k, PropTime(kv, t)) =>
+              (delete.t >= t)
+            }.keySet
+          Some(set.fields -- unsetKeys -- deleteKeys)
+        }
+      }.getOrElse{
+        Some(set.fields -- unsetKeys)
+      }
+
+      // Note: mapValues() doesn't return concrete Map and causes
+      // NotSerializableException issue. Use map(identity) to work around this.
+      // see https://issues.scala-lang.org/browse/SI-7005
+      combinedFields.map{ f =>
+        require(firstUpdated.isDefined,
+          "Unexpected Error: firstUpdated cannot be None.")
+        require(lastUpdated.isDefined,
+          "Unexpected Error: lastUpdated cannot be None.")
+        PropertyMap(
+          fields = f.mapValues(_.d).map(identity),
+          firstUpdated = firstUpdated.get,
+          lastUpdated = lastUpdated.get
+        )
+      }
+    }
+  }
+
+}
+
+private[prediction] object EventOp {
+  // create EventOp from Event object
+  def apply(e: Event): EventOp = {
+    val t = e.eventTime.getMillis
+    e.event match {
+      case "$set" => {
+        val fields = e.properties.fields.mapValues(jv =>
+          PropTime(jv, t)
+        ).map(identity)
+
+        EventOp(
+          setProp = Some(SetProp(fields = fields, t = t)),
+          firstUpdated = Some(e.eventTime),
+          lastUpdated = Some(e.eventTime)
+        )
+      }
+      case "$unset" => {
+        val fields = e.properties.fields.mapValues(jv => t).map(identity)
+        EventOp(
+          unsetProp = Some(UnsetProp(fields = fields)),
+          firstUpdated = Some(e.eventTime),
+          lastUpdated = Some(e.eventTime)
+        )
+      }
+      case "$delete" => {
+        EventOp(
+          deleteEntity = Some(DeleteEntity(t)),
+          firstUpdated = Some(e.eventTime),
+          lastUpdated = Some(e.eventTime)
+        )
+      }
+      case _ => {
+        EventOp()
+      }
+    }
+  }
+}
+
+
+private[prediction] object PEventAggregator {
+
+  val eventNames = List("$set", "$unset", "$delete")
+
+  def aggregateProperties(eventsRDD: RDD[Event]): RDD[(String, PropertyMap)] = {
+    eventsRDD
+      .map( e => (e.entityId, EventOp(e) ))
+      .aggregateByKey[EventOp](EventOp())(
+        // within same partition
+        seqOp = { case (u, v) => u ++ v },
+        // across partition
+        combOp = { case (accu, u) => accu ++ u }
+      )
+      .mapValues(_.toPropertyMap)
+      .filter{ case (k, v) => v.isDefined }
+      .map{ case (k, v) => (k, v.get) }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/PEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/PEvents.scala b/data/src/main/scala/org/apache/predictionio/data/storage/PEvents.scala
new file mode 100644
index 0000000..49e5a5e
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/PEvents.scala
@@ -0,0 +1,182 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import grizzled.slf4j.Logger
+import org.apache.predictionio.annotation.DeveloperApi
+import org.apache.predictionio.annotation.Experimental
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+import org.joda.time.DateTime
+
+import scala.reflect.ClassTag
+
+/** :: DeveloperApi ::
+  * Base trait of a data access object that returns [[Event]] related RDD data
+  * structure. Engine developers should use
+  * [[org.apache.predictionio.data.store.PEventStore]] instead of using this directly.
+  *
+  * @group Event Data
+  */
+@DeveloperApi
+trait PEvents extends Serializable {
+  @transient protected lazy val logger = Logger[this.type]
+  @deprecated("Use PEventStore.find() instead.", "0.9.2")
+  def getByAppIdAndTimeAndEntity(appId: Int,
+    startTime: Option[DateTime],
+    untilTime: Option[DateTime],
+    entityType: Option[String],
+    entityId: Option[String])(sc: SparkContext): RDD[Event] = {
+      find(
+        appId = appId,
+        startTime = startTime,
+        untilTime = untilTime,
+        entityType = entityType,
+        entityId = entityId,
+        eventNames = None
+      )(sc)
+    }
+
+  /** :: DeveloperApi ::
+    * Read from database and return the events. The deprecation here is intended
+    * to engine developers only.
+    *
+    * @param appId return events of this app ID
+    * @param channelId return events of this channel ID (default channel if it's None)
+    * @param startTime return events with eventTime >= startTime
+    * @param untilTime return events with eventTime < untilTime
+    * @param entityType return events of this entityType
+    * @param entityId return events of this entityId
+    * @param eventNames return events with any of these event names.
+    * @param targetEntityType return events of this targetEntityType:
+    *   - None means no restriction on targetEntityType
+    *   - Some(None) means no targetEntityType for this event
+    *   - Some(Some(x)) means targetEntityType should match x.
+    * @param targetEntityId return events of this targetEntityId
+    *   - None means no restriction on targetEntityId
+    *   - Some(None) means no targetEntityId for this event
+    *   - Some(Some(x)) means targetEntityId should match x.
+    * @param sc Spark context
+    * @return RDD[Event]
+    */
+  @deprecated("Use PEventStore.find() instead.", "0.9.2")
+  @DeveloperApi
+  def find(
+    appId: Int,
+    channelId: Option[Int] = None,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    entityType: Option[String] = None,
+    entityId: Option[String] = None,
+    eventNames: Option[Seq[String]] = None,
+    targetEntityType: Option[Option[String]] = None,
+    targetEntityId: Option[Option[String]] = None)(sc: SparkContext): RDD[Event]
+
+  /** Aggregate properties of entities based on these special events:
+    * \$set, \$unset, \$delete events. The deprecation here is intended to
+    * engine developers only.
+    *
+    * @param appId use events of this app ID
+    * @param channelId use events of this channel ID (default channel if it's None)
+    * @param entityType aggregate properties of the entities of this entityType
+    * @param startTime use events with eventTime >= startTime
+    * @param untilTime use events with eventTime < untilTime
+    * @param required only keep entities with these required properties defined
+    * @param sc Spark context
+    * @return RDD[(String, PropertyMap)] RDD of entityId and PropertyMap pair
+    */
+  @deprecated("Use PEventStore.aggregateProperties() instead.", "0.9.2")
+  def aggregateProperties(
+    appId: Int,
+    channelId: Option[Int] = None,
+    entityType: String,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    required: Option[Seq[String]] = None)
+    (sc: SparkContext): RDD[(String, PropertyMap)] = {
+    val eventRDD = find(
+      appId = appId,
+      channelId = channelId,
+      startTime = startTime,
+      untilTime = untilTime,
+      entityType = Some(entityType),
+      eventNames = Some(PEventAggregator.eventNames))(sc)
+
+    val dmRDD = PEventAggregator.aggregateProperties(eventRDD)
+
+    required map { r =>
+      dmRDD.filter { case (k, v) =>
+        r.map(v.contains(_)).reduce(_ && _)
+      }
+    } getOrElse dmRDD
+  }
+
+  /** :: Experimental ::
+    * Extract EntityMap[A] from events for the entityType
+    * NOTE: it is local EntityMap[A]
+    */
+  @deprecated("Use PEventStore.aggregateProperties() instead.", "0.9.2")
+  @Experimental
+  def extractEntityMap[A: ClassTag](
+    appId: Int,
+    entityType: String,
+    startTime: Option[DateTime] = None,
+    untilTime: Option[DateTime] = None,
+    required: Option[Seq[String]] = None)
+    (sc: SparkContext)(extract: DataMap => A): EntityMap[A] = {
+    val idToData: Map[String, A] = aggregateProperties(
+      appId = appId,
+      entityType = entityType,
+      startTime = startTime,
+      untilTime = untilTime,
+      required = required
+    )(sc).map{ case (id, dm) =>
+      try {
+        (id, extract(dm))
+      } catch {
+        case e: Exception => {
+          logger.error(s"Failed to get extract entity from DataMap $dm of " +
+            s"entityId $id.", e)
+          throw e
+        }
+      }
+    }.collectAsMap.toMap
+
+    new EntityMap(idToData)
+  }
+
+  /** :: DeveloperApi ::
+    * Write events to database
+    *
+    * @param events RDD of Event
+    * @param appId the app ID
+    * @param sc Spark Context
+    */
+  @DeveloperApi
+  def write(events: RDD[Event], appId: Int)(sc: SparkContext): Unit =
+    write(events, appId, None)(sc)
+
+  /** :: DeveloperApi ::
+    * Write events to database
+    *
+    * @param events RDD of Event
+    * @param appId the app ID
+    * @param channelId  channel ID (default channel if it's None)
+    * @param sc Spark Context
+    */
+  @DeveloperApi
+  def write(events: RDD[Event], appId: Int, channelId: Option[Int])(sc: SparkContext): Unit
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/PropertyMap.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/PropertyMap.scala b/data/src/main/scala/org/apache/predictionio/data/storage/PropertyMap.scala
new file mode 100644
index 0000000..9935558
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/PropertyMap.scala
@@ -0,0 +1,96 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+package org.apache.predictionio.data.storage
+
+import org.joda.time.DateTime
+
+import org.json4s.JValue
+import org.json4s.JObject
+import org.json4s.native.JsonMethods.parse
+
+/** A PropertyMap stores aggregated properties of the entity.
+  * Internally it is a Map
+  * whose keys are property names and values are corresponding JSON values
+  * respectively. Use the get() method to retrieve the value of mandatory
+  * property or use getOpt() to retrieve the value of the optional property.
+  *
+  * @param fields Map of property name to JValue
+  * @param firstUpdated first updated time of this PropertyMap
+  * @param lastUpdated last updated time of this PropertyMap
+  */
+class PropertyMap(
+  fields: Map[String, JValue],
+  val firstUpdated: DateTime,
+  val lastUpdated: DateTime
+) extends DataMap(fields) {
+
+  override
+  def toString: String = s"PropertyMap(${fields}, ${firstUpdated}, ${lastUpdated})"
+
+  override
+  def hashCode: Int =
+    41 * (
+      41 * (
+        41 + fields.hashCode
+      ) + firstUpdated.hashCode
+    ) + lastUpdated.hashCode
+
+  override
+  def equals(other: Any): Boolean = other match {
+    case that: PropertyMap => {
+      (that.canEqual(this)) &&
+      (super.equals(that)) &&
+      (this.firstUpdated.equals(that.firstUpdated)) &&
+      (this.lastUpdated.equals(that.lastUpdated))
+    }
+    case that: DataMap => { // for testing purpose
+      super.equals(that)
+    }
+    case _ => false
+  }
+
+  override
+  def canEqual(other: Any): Boolean = other.isInstanceOf[PropertyMap]
+}
+
+/** Companion object of the [[PropertyMap]] class. */
+object PropertyMap {
+
+  /** Create an PropertyMap from a Map of String to JValue,
+    * firstUpdated and lastUpdated time.
+    *
+    * @param fields a Map of String to JValue
+    * @param firstUpdated First updated time
+    * @param lastUpdated Last updated time
+    * @return a new PropertyMap
+    */
+  def apply(fields: Map[String, JValue],
+    firstUpdated: DateTime, lastUpdated: DateTime): PropertyMap =
+    new PropertyMap(fields, firstUpdated, lastUpdated)
+
+  /** Create an PropertyMap from a JSON String and firstUpdated and lastUpdated
+    * time.
+    * @param js JSON String. eg """{ "a": 1, "b": "foo" }"""
+    * @param firstUpdated First updated time
+    * @param lastUpdated Last updated time
+    * @return a new PropertyMap
+    */
+  def apply(js: String, firstUpdated: DateTime, lastUpdated: DateTime)
+  : PropertyMap = apply(
+      fields = parse(js).asInstanceOf[JObject].obj.toMap,
+      firstUpdated = firstUpdated,
+      lastUpdated = lastUpdated
+    )
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/Storage.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/Storage.scala b/data/src/main/scala/org/apache/predictionio/data/storage/Storage.scala
new file mode 100644
index 0000000..1f170be
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/Storage.scala
@@ -0,0 +1,403 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import java.lang.reflect.InvocationTargetException
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.annotation.DeveloperApi
+
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.language.existentials
+import scala.reflect.runtime.universe._
+
+/** :: DeveloperApi ::
+  * Any storage backend drivers will need to implement this trait with exactly
+  * '''StorageClient''' as the class name. PredictionIO storage layer will look
+  * for this class when it instantiates the actual backend for use by higher
+  * level storage access APIs.
+  *
+  * @group Storage System
+  */
+@DeveloperApi
+trait BaseStorageClient {
+  /** Configuration of the '''StorageClient''' */
+  val config: StorageClientConfig
+
+  /** The actual client object. This could be a database connection or any kind
+    * of database access object.
+    */
+  val client: AnyRef
+
+  /** Set a prefix for storage class discovery. As an example, if this prefix
+    * is set as ''JDBC'', when the storage layer instantiates an implementation
+    * of [[Apps]], it will try to look for a class named ''JDBCApps''.
+    */
+  val prefix: String = ""
+}
+
+/** :: DeveloperApi ::
+  * A wrapper of storage client configuration that will be populated by
+  * PredictionIO automatically, and passed to the StorageClient during
+  * instantiation.
+  *
+  * @param parallel This is set to true by PredictionIO when the storage client
+  *                 is instantiated in a parallel data source.
+  * @param test This is set to true by PredictionIO when tests are being run.
+  * @param properties This is populated by PredictionIO automatically from
+  *                   environmental configuration variables. If you have these
+  *                   variables,
+  *                   - PIO_STORAGE_SOURCES_PGSQL_TYPE=jdbc
+  *                   - PIO_STORAGE_SOURCES_PGSQL_USERNAME=abc
+  *                   - PIO_STOARGE_SOURCES_PGSQL_PASSWORD=xyz
+  *
+  *                   this field will be filled as a map of string to string:
+  *                   - TYPE -> jdbc
+  *                   - USERNAME -> abc
+  *                   - PASSWORD -> xyz
+  *
+  * @group Storage System
+  */
+@DeveloperApi
+case class StorageClientConfig(
+  parallel: Boolean = false, // parallelized access (RDD)?
+  test: Boolean = false, // test mode config
+  properties: Map[String, String] = Map())
+
+/** :: DeveloperApi ::
+  * Thrown when a StorageClient runs into an exceptional condition
+  *
+  * @param message Exception error message
+  * @param cause The underlying exception that caused the exception
+  * @group Storage System
+  */
+@DeveloperApi
+class StorageClientException(message: String, cause: Throwable)
+  extends RuntimeException(message, cause)
+
+@deprecated("Use StorageException", "0.9.2")
+private[prediction] case class StorageError(message: String)
+
+/** :: DeveloperApi ::
+  * Thrown by data access objects when they run into exceptional conditions
+  *
+  * @param message Exception error message
+  * @param cause The underlying exception that caused the exception
+  *
+  * @group Storage System
+  */
+@DeveloperApi
+class StorageException(message: String, cause: Throwable)
+  extends Exception(message, cause) {
+
+  def this(message: String) = this(message, null)
+}
+
+/** Backend-agnostic data storage layer with lazy initialization. Use this
+  * object when you need to interface with Event Store in your engine.
+  *
+  * @group Storage System
+  */
+object Storage extends Logging {
+  private case class ClientMeta(
+    sourceType: String,
+    client: BaseStorageClient,
+    config: StorageClientConfig)
+
+  private case class DataObjectMeta(sourceName: String, namespace: String)
+
+  private var errors = 0
+
+  private val sourcesPrefix = "PIO_STORAGE_SOURCES"
+
+  private val sourceTypesRegex = """PIO_STORAGE_SOURCES_([^_]+)_TYPE""".r
+
+  private val sourceKeys: Seq[String] = sys.env.keys.toSeq.flatMap { k =>
+    sourceTypesRegex findFirstIn k match {
+      case Some(sourceTypesRegex(sourceType)) => Seq(sourceType)
+      case None => Nil
+    }
+  }
+
+  if (sourceKeys.size == 0) warn("There is no properly configured data source.")
+
+  private val s2cm = scala.collection.mutable.Map[String, Option[ClientMeta]]()
+
+  /** Reference to the app data repository. */
+  private val EventDataRepository = "EVENTDATA"
+  private val ModelDataRepository = "MODELDATA"
+  private val MetaDataRepository = "METADATA"
+
+  private val repositoriesPrefix = "PIO_STORAGE_REPOSITORIES"
+
+  private val repositoryNamesRegex =
+    """PIO_STORAGE_REPOSITORIES_([^_]+)_NAME""".r
+
+  private val repositoryKeys: Seq[String] = sys.env.keys.toSeq.flatMap { k =>
+    repositoryNamesRegex findFirstIn k match {
+      case Some(repositoryNamesRegex(repositoryName)) => Seq(repositoryName)
+      case None => Nil
+    }
+  }
+
+  if (repositoryKeys.size == 0) {
+    warn("There is no properly configured repository.")
+  }
+
+  private val requiredRepositories = Seq(MetaDataRepository)
+
+  requiredRepositories foreach { r =>
+    if (!repositoryKeys.contains(r)) {
+      error(s"Required repository (${r}) configuration is missing.")
+      errors += 1
+    }
+  }
+  private val repositoriesToDataObjectMeta: Map[String, DataObjectMeta] =
+    repositoryKeys.map(r =>
+      try {
+        val keyedPath = repositoriesPrefixPath(r)
+        val name = sys.env(prefixPath(keyedPath, "NAME"))
+        val sourceName = sys.env(prefixPath(keyedPath, "SOURCE"))
+        if (sourceKeys.contains(sourceName)) {
+          r -> DataObjectMeta(
+            sourceName = sourceName,
+            namespace = name)
+        } else {
+          error(s"$sourceName is not a configured storage source.")
+          r -> DataObjectMeta("", "")
+        }
+      } catch {
+        case e: Throwable =>
+          error(e.getMessage)
+          errors += 1
+          r -> DataObjectMeta("", "")
+      }
+    ).toMap
+
+  if (errors > 0) {
+    error(s"There were $errors configuration errors. Exiting.")
+    sys.exit(errors)
+  }
+
+  // End of constructor and field definitions and begin method definitions
+
+  private def prefixPath(prefix: String, body: String) = s"${prefix}_$body"
+
+  private def sourcesPrefixPath(body: String) = prefixPath(sourcesPrefix, body)
+
+  private def repositoriesPrefixPath(body: String) =
+    prefixPath(repositoriesPrefix, body)
+
+  private def sourcesToClientMeta(
+      source: String,
+      parallel: Boolean,
+      test: Boolean): Option[ClientMeta] = {
+    val sourceName = if (parallel) s"parallel-$source" else source
+    s2cm.getOrElseUpdate(sourceName, updateS2CM(source, parallel, test))
+  }
+
+  private def getClient(
+    clientConfig: StorageClientConfig,
+    pkg: String): BaseStorageClient = {
+    val className = "org.apache.predictionio.data.storage." + pkg + ".StorageClient"
+    try {
+      Class.forName(className).getConstructors()(0).newInstance(clientConfig).
+        asInstanceOf[BaseStorageClient]
+    } catch {
+      case e: ClassNotFoundException =>
+        val originalClassName = pkg + ".StorageClient"
+        Class.forName(originalClassName).getConstructors()(0).
+          newInstance(clientConfig).asInstanceOf[BaseStorageClient]
+      case e: java.lang.reflect.InvocationTargetException =>
+        throw e.getCause
+    }
+  }
+
+  /** Get the StorageClient config data from PIO Framework's environment variables */
+  def getConfig(sourceName: String): Option[StorageClientConfig] = {
+    if (s2cm.contains(sourceName) && s2cm.get(sourceName).nonEmpty
+      && s2cm.get(sourceName).get.nonEmpty) {
+      Some(s2cm.get(sourceName).get.get.config)
+    } else None
+  }
+
+  private def updateS2CM(k: String, parallel: Boolean, test: Boolean):
+  Option[ClientMeta] = {
+    try {
+      val keyedPath = sourcesPrefixPath(k)
+      val sourceType = sys.env(prefixPath(keyedPath, "TYPE"))
+      val props = sys.env.filter(t => t._1.startsWith(keyedPath)).map(
+        t => t._1.replace(s"${keyedPath}_", "") -> t._2)
+      val clientConfig = StorageClientConfig(
+        properties = props,
+        parallel = parallel,
+        test = test)
+      val client = getClient(clientConfig, sourceType)
+      Some(ClientMeta(sourceType, client, clientConfig))
+    } catch {
+      case e: Throwable =>
+        error(s"Error initializing storage client for source ${k}", e)
+        errors += 1
+        None
+    }
+  }
+
+  private[prediction]
+  def getDataObjectFromRepo[T](repo: String, test: Boolean = false)
+    (implicit tag: TypeTag[T]): T = {
+    val repoDOMeta = repositoriesToDataObjectMeta(repo)
+    val repoDOSourceName = repoDOMeta.sourceName
+    getDataObject[T](repoDOSourceName, repoDOMeta.namespace, test = test)
+  }
+
+  private[prediction]
+  def getPDataObject[T](repo: String)(implicit tag: TypeTag[T]): T = {
+    val repoDOMeta = repositoriesToDataObjectMeta(repo)
+    val repoDOSourceName = repoDOMeta.sourceName
+    getPDataObject[T](repoDOSourceName, repoDOMeta.namespace)
+  }
+
+  private[prediction] def getDataObject[T](
+      sourceName: String,
+      namespace: String,
+      parallel: Boolean = false,
+      test: Boolean = false)(implicit tag: TypeTag[T]): T = {
+    val clientMeta = sourcesToClientMeta(sourceName, parallel, test) getOrElse {
+      throw new StorageClientException(
+        s"Data source $sourceName was not properly initialized.", null)
+    }
+    val sourceType = clientMeta.sourceType
+    val ctorArgs = dataObjectCtorArgs(clientMeta.client, namespace)
+    val classPrefix = clientMeta.client.prefix
+    val originalClassName = tag.tpe.toString.split('.')
+    val rawClassName = sourceType + "." + classPrefix + originalClassName.last
+    val className = "org.apache.predictionio.data.storage." + rawClassName
+    val clazz = try {
+      Class.forName(className)
+    } catch {
+      case e: ClassNotFoundException =>
+        try {
+          Class.forName(rawClassName)
+        } catch {
+          case e: ClassNotFoundException =>
+            throw new StorageClientException("No storage backend " +
+              "implementation can be found (tried both " +
+              s"$className and $rawClassName)", e)
+        }
+    }
+    val constructor = clazz.getConstructors()(0)
+    try {
+      constructor.newInstance(ctorArgs: _*).
+        asInstanceOf[T]
+    } catch {
+      case e: IllegalArgumentException =>
+        error(
+          "Unable to instantiate data object with class '" +
+          constructor.getDeclaringClass.getName + " because its constructor" +
+          " does not have the right number of arguments." +
+          " Number of required constructor arguments: " +
+          ctorArgs.size + "." +
+          " Number of existing constructor arguments: " +
+          constructor.getParameterTypes.size + "." +
+          s" Storage source name: ${sourceName}." +
+          s" Exception message: ${e.getMessage}).", e)
+        errors += 1
+        throw e
+      case e: java.lang.reflect.InvocationTargetException =>
+        throw e.getCause
+    }
+  }
+
+  private def getPDataObject[T](
+      sourceName: String,
+      databaseName: String)(implicit tag: TypeTag[T]): T =
+    getDataObject[T](sourceName, databaseName, true)
+
+  private def dataObjectCtorArgs(
+      client: BaseStorageClient,
+      namespace: String): Seq[AnyRef] = {
+    Seq(client.client, client.config, namespace)
+  }
+
+  private[prediction] def verifyAllDataObjects(): Unit = {
+    info("Verifying Meta Data Backend (Source: " +
+      s"${repositoriesToDataObjectMeta(MetaDataRepository).sourceName})...")
+    getMetaDataEngineManifests()
+    getMetaDataEngineInstances()
+    getMetaDataEvaluationInstances()
+    getMetaDataApps()
+    getMetaDataAccessKeys()
+    info("Verifying Model Data Backend (Source: " +
+      s"${repositoriesToDataObjectMeta(ModelDataRepository).sourceName})...")
+    getModelDataModels()
+    info("Verifying Event Data Backend (Source: " +
+      s"${repositoriesToDataObjectMeta(EventDataRepository).sourceName})...")
+    val eventsDb = getLEvents(test = true)
+    info("Test writing to Event Store (App Id 0)...")
+    // use appId=0 for testing purpose
+    eventsDb.init(0)
+    eventsDb.insert(Event(
+      event = "test",
+      entityType = "test",
+      entityId = "test"), 0)
+    eventsDb.remove(0)
+    eventsDb.close()
+  }
+
+  private[prediction] def getMetaDataEngineManifests(): EngineManifests =
+    getDataObjectFromRepo[EngineManifests](MetaDataRepository)
+
+  private[prediction] def getMetaDataEngineInstances(): EngineInstances =
+    getDataObjectFromRepo[EngineInstances](MetaDataRepository)
+
+  private[prediction] def getMetaDataEvaluationInstances(): EvaluationInstances =
+    getDataObjectFromRepo[EvaluationInstances](MetaDataRepository)
+
+  private[prediction] def getMetaDataApps(): Apps =
+    getDataObjectFromRepo[Apps](MetaDataRepository)
+
+  private[prediction] def getMetaDataAccessKeys(): AccessKeys =
+    getDataObjectFromRepo[AccessKeys](MetaDataRepository)
+
+  private[prediction] def getMetaDataChannels(): Channels =
+    getDataObjectFromRepo[Channels](MetaDataRepository)
+
+  private[prediction] def getModelDataModels(): Models =
+    getDataObjectFromRepo[Models](ModelDataRepository)
+
+  /** Obtains a data access object that returns [[Event]] related local data
+    * structure.
+    */
+  def getLEvents(test: Boolean = false): LEvents =
+    getDataObjectFromRepo[LEvents](EventDataRepository, test = test)
+
+  /** Obtains a data access object that returns [[Event]] related RDD data
+    * structure.
+    */
+  def getPEvents(): PEvents =
+    getPDataObject[PEvents](EventDataRepository)
+
+  def config: Map[String, Map[String, Map[String, String]]] = Map(
+    "sources" -> s2cm.toMap.map { case (source, clientMeta) =>
+      source -> clientMeta.map { cm =>
+        Map(
+          "type" -> cm.sourceType,
+          "config" -> cm.config.properties.map(t => s"${t._1} -> ${t._2}").mkString(", ")
+        )
+      }.getOrElse(Map.empty)
+    }
+  )
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/Utils.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/Utils.scala b/data/src/main/scala/org/apache/predictionio/data/storage/Utils.scala
new file mode 100644
index 0000000..321b245
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/Utils.scala
@@ -0,0 +1,47 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage
+
+import org.joda.time.DateTime
+import org.joda.time.format.ISODateTimeFormat
+
+/** Backend-agnostic storage utilities. */
+private[prediction] object Utils {
+  /**
+   * Add prefix to custom attribute keys.
+   */
+  def addPrefixToAttributeKeys[T](
+      attributes: Map[String, T],
+      prefix: String = "ca_"): Map[String, T] = {
+    attributes map { case (k, v) => (prefix + k, v) }
+  }
+
+  /** Remove prefix from custom attribute keys. */
+  def removePrefixFromAttributeKeys[T](
+      attributes: Map[String, T],
+      prefix: String = "ca_"): Map[String, T] = {
+    attributes map { case (k, v) => (k.stripPrefix(prefix), v) }
+  }
+
+  /**
+   * Appends App ID to any ID.
+   * Used for distinguishing different app's data within a single collection.
+   */
+  def idWithAppid(appid: Int, id: String): String = appid + "_" + id
+
+  def stringToDateTime(dt: String): DateTime =
+    ISODateTimeFormat.dateTimeParser.parseDateTime(dt)
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESAccessKeys.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESAccessKeys.scala b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESAccessKeys.scala
new file mode 100644
index 0000000..7853d97
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESAccessKeys.scala
@@ -0,0 +1,116 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.elasticsearch
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.apache.predictionio.data.storage.AccessKey
+import org.apache.predictionio.data.storage.AccessKeys
+import org.elasticsearch.ElasticsearchException
+import org.elasticsearch.client.Client
+import org.elasticsearch.index.query.FilterBuilders._
+import org.json4s.JsonDSL._
+import org.json4s._
+import org.json4s.native.JsonMethods._
+import org.json4s.native.Serialization.read
+import org.json4s.native.Serialization.write
+
+import scala.util.Random
+
+/** Elasticsearch implementation of AccessKeys. */
+class ESAccessKeys(client: Client, config: StorageClientConfig, index: String)
+    extends AccessKeys with Logging {
+  implicit val formats = DefaultFormats.lossless
+  private val estype = "accesskeys"
+
+  val indices = client.admin.indices
+  val indexExistResponse = indices.prepareExists(index).get
+  if (!indexExistResponse.isExists) {
+    indices.prepareCreate(index).get
+  }
+  val typeExistResponse = indices.prepareTypesExists(index).setTypes(estype).get
+  if (!typeExistResponse.isExists) {
+    val json =
+      (estype ->
+        ("properties" ->
+          ("key" -> ("type" -> "string") ~ ("index" -> "not_analyzed")) ~
+          ("events" -> ("type" -> "string") ~ ("index" -> "not_analyzed"))))
+    indices.preparePutMapping(index).setType(estype).
+      setSource(compact(render(json))).get
+  }
+
+  def insert(accessKey: AccessKey): Option[String] = {
+    val key = if (accessKey.key.isEmpty) generateKey else accessKey.key
+    update(accessKey.copy(key = key))
+    Some(key)
+  }
+
+  def get(key: String): Option[AccessKey] = {
+    try {
+      val response = client.prepareGet(
+        index,
+        estype,
+        key).get()
+      Some(read[AccessKey](response.getSourceAsString))
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        None
+      case e: NullPointerException => None
+    }
+  }
+
+  def getAll(): Seq[AccessKey] = {
+    try {
+      val builder = client.prepareSearch(index).setTypes(estype)
+      ESUtils.getAll[AccessKey](client, builder)
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        Seq[AccessKey]()
+    }
+  }
+
+  def getByAppid(appid: Int): Seq[AccessKey] = {
+    try {
+      val builder = client.prepareSearch(index).setTypes(estype).
+        setPostFilter(termFilter("appid", appid))
+      ESUtils.getAll[AccessKey](client, builder)
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        Seq[AccessKey]()
+    }
+  }
+
+  def update(accessKey: AccessKey): Unit = {
+    try {
+      client.prepareIndex(index, estype, accessKey.key).setSource(write(accessKey)).get()
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+    }
+  }
+
+  def delete(key: String): Unit = {
+    try {
+      client.prepareDelete(index, estype, key).get
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESApps.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESApps.scala b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESApps.scala
new file mode 100644
index 0000000..6790b52
--- /dev/null
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESApps.scala
@@ -0,0 +1,127 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.data.storage.elasticsearch
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.data.storage.StorageClientConfig
+import org.apache.predictionio.data.storage.App
+import org.apache.predictionio.data.storage.Apps
+import org.elasticsearch.ElasticsearchException
+import org.elasticsearch.client.Client
+import org.elasticsearch.index.query.FilterBuilders._
+import org.json4s.JsonDSL._
+import org.json4s._
+import org.json4s.native.JsonMethods._
+import org.json4s.native.Serialization.read
+import org.json4s.native.Serialization.write
+
+/** Elasticsearch implementation of Items. */
+class ESApps(client: Client, config: StorageClientConfig, index: String)
+  extends Apps with Logging {
+  implicit val formats = DefaultFormats.lossless
+  private val estype = "apps"
+  private val seq = new ESSequences(client, config, index)
+
+  val indices = client.admin.indices
+  val indexExistResponse = indices.prepareExists(index).get
+  if (!indexExistResponse.isExists) {
+    indices.prepareCreate(index).get
+  }
+  val typeExistResponse = indices.prepareTypesExists(index).setTypes(estype).get
+  if (!typeExistResponse.isExists) {
+    val json =
+      (estype ->
+        ("properties" ->
+          ("name" -> ("type" -> "string") ~ ("index" -> "not_analyzed"))))
+    indices.preparePutMapping(index).setType(estype).
+      setSource(compact(render(json))).get
+  }
+
+  def insert(app: App): Option[Int] = {
+    val id =
+      if (app.id == 0) {
+        var roll = seq.genNext("apps")
+        while (!get(roll).isEmpty) roll = seq.genNext("apps")
+        roll
+      }
+      else app.id
+    val realapp = app.copy(id = id)
+    update(realapp)
+    Some(id)
+  }
+
+  def get(id: Int): Option[App] = {
+    try {
+      val response = client.prepareGet(
+        index,
+        estype,
+        id.toString).get()
+      Some(read[App](response.getSourceAsString))
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        None
+      case e: NullPointerException => None
+    }
+  }
+
+  def getByName(name: String): Option[App] = {
+    try {
+      val response = client.prepareSearch(index).setTypes(estype).
+        setPostFilter(termFilter("name", name)).get
+      val hits = response.getHits().hits()
+      if (hits.size > 0) {
+        Some(read[App](hits.head.getSourceAsString))
+      } else {
+        None
+      }
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        None
+    }
+  }
+
+  def getAll(): Seq[App] = {
+    try {
+      val builder = client.prepareSearch(index).setTypes(estype)
+      ESUtils.getAll[App](client, builder)
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+        Seq[App]()
+    }
+  }
+
+  def update(app: App): Unit = {
+    try {
+      val response = client.prepareIndex(index, estype, app.id.toString).
+        setSource(write(app)).get()
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+    }
+  }
+
+  def delete(id: Int): Unit = {
+    try {
+      client.prepareDelete(index, estype, id.toString).get
+    } catch {
+      case e: ElasticsearchException =>
+        error(e.getMessage)
+    }
+  }
+}



[31/34] incubator-predictionio git commit: change private scope to predictionio

Posted by do...@apache.org.
change private scope to predictionio


Project: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/commit/d7c14178
Tree: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/tree/d7c14178
Diff: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/diff/d7c14178

Branch: refs/heads/develop
Commit: d7c14178d14610d69cb3453a49c7dc830db98d07
Parents: 6d160b6
Author: Xusen Yin <yi...@gmail.com>
Authored: Thu Jul 7 17:05:48 2016 -0700
Committer: Xusen Yin <yi...@gmail.com>
Committed: Thu Jul 7 17:05:48 2016 -0700

----------------------------------------------------------------------
 .../apache/predictionio/controller/Engine.scala |  6 ++--
 .../predictionio/workflow/FakeWorkflow.scala    |  6 ++--
 .../predictionio/workflow/WorkflowUtils.scala   |  2 +-
 .../org/apache/predictionio/data/Utils.scala    |  2 +-
 .../apache/predictionio/data/api/Webhooks.scala |  2 +-
 .../data/api/WebhooksConnectors.scala           |  2 +-
 .../predictionio/data/storage/BiMap.scala       |  2 +-
 .../predictionio/data/storage/LEvents.scala     | 18 +++++-----
 .../data/storage/PEventAggregator.scala         | 14 ++++----
 .../predictionio/data/storage/Storage.scala     | 24 ++++++-------
 .../predictionio/data/storage/Utils.scala       |  2 +-
 .../data/storage/jdbc/JDBCLEvents.scala         |  2 +-
 .../apache/predictionio/data/store/Common.scala |  2 +-
 .../predictionio/data/view/PBatchView.scala     | 12 +++----
 .../data/webhooks/ConnectorException.scala      |  2 +-
 .../data/webhooks/ConnectorUtil.scala           |  2 +-
 .../data/webhooks/FormConnector.scala           |  2 +-
 .../data/webhooks/JsonConnector.scala           |  2 +-
 .../exampleform/ExampleFormConnector.scala      |  2 +-
 .../examplejson/ExampleJsonConnector.scala      |  2 +-
 .../webhooks/mailchimp/MailChimpConnector.scala |  2 +-
 .../webhooks/segmentio/SegmentIOConnector.scala | 38 ++++++++++----------
 22 files changed, 74 insertions(+), 74 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/core/src/main/scala/org/apache/predictionio/controller/Engine.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/Engine.scala b/core/src/main/scala/org/apache/predictionio/controller/Engine.scala
index c875a9f..1bc177a 100644
--- a/core/src/main/scala/org/apache/predictionio/controller/Engine.scala
+++ b/core/src/main/scala/org/apache/predictionio/controller/Engine.scala
@@ -85,7 +85,7 @@ class Engine[TD, EI, PD, Q, P, A](
     val servingClassMap: Map[String, Class[_ <: BaseServing[Q, P]]])
   extends BaseEngine[EI, Q, P, A] {
 
-  private[prediction]
+  private[predictionio]
   implicit lazy val formats = Utils.json4sDefaultFormats +
     new NameParamsSerializer
 
@@ -192,7 +192,7 @@ class Engine[TD, EI, PD, Q, P, A](
     * possible that models are not persisted. This method retrains non-persisted
     * models and return a list of models that can be used directly in deploy.
     */
-  private[prediction]
+  private[predictionio]
   def prepareDeploy(
     sc: SparkContext,
     engineParams: EngineParams,
@@ -416,7 +416,7 @@ class Engine[TD, EI, PD, Q, P, A](
       servingParams = servingParams)
   }
 
-  private[prediction] def engineInstanceToEngineParams(
+  private[predictionio] def engineInstanceToEngineParams(
     engineInstance: EngineInstance,
     jsonExtractor: JsonExtractorOption): EngineParams = {
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/core/src/main/scala/org/apache/predictionio/workflow/FakeWorkflow.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/FakeWorkflow.scala b/core/src/main/scala/org/apache/predictionio/workflow/FakeWorkflow.scala
index f11ea2e..c980b97 100644
--- a/core/src/main/scala/org/apache/predictionio/workflow/FakeWorkflow.scala
+++ b/core/src/main/scala/org/apache/predictionio/workflow/FakeWorkflow.scala
@@ -27,7 +27,7 @@ import org.apache.spark.rdd.RDD
 
 
 @Experimental
-private[prediction] class FakeEngine
+private[predictionio] class FakeEngine
 extends BaseEngine[EmptyParams, EmptyParams, EmptyParams, EmptyParams] {
   @transient lazy val logger = Logger[this.type]
 
@@ -49,7 +49,7 @@ extends BaseEngine[EmptyParams, EmptyParams, EmptyParams, EmptyParams] {
 }
 
 @Experimental
-private[prediction] class FakeRunner(f: (SparkContext => Unit))
+private[predictionio] class FakeRunner(f: (SparkContext => Unit))
     extends BaseEvaluator[EmptyParams, EmptyParams, EmptyParams, EmptyParams,
       FakeEvalResult] {
   @transient private lazy val logger = Logger[this.type]
@@ -65,7 +65,7 @@ private[prediction] class FakeRunner(f: (SparkContext => Unit))
 }
 
 @Experimental
-private[prediction] case class FakeEvalResult() extends BaseEvaluatorResult {
+private[predictionio] case class FakeEvalResult() extends BaseEvaluatorResult {
   override val noSave: Boolean = true
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala b/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala
index e26b754..cd80fd9 100644
--- a/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala
+++ b/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala
@@ -211,7 +211,7 @@ object WorkflowUtils extends Logging {
     */
   def javaObjectToJValue(params: AnyRef): JValue = parse(gson.toJson(params))
 
-  private[prediction] def checkUpgrade(
+  private[predictionio] def checkUpgrade(
       component: String = "core",
       engine: String = ""): Unit = {
     val runner = new Thread(new UpgradeCheckRunner(component, engine))

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/Utils.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/Utils.scala b/data/src/main/scala/org/apache/predictionio/data/Utils.scala
index db8c7a2..11816b4 100644
--- a/data/src/main/scala/org/apache/predictionio/data/Utils.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/Utils.scala
@@ -20,7 +20,7 @@ import org.joda.time.format.ISODateTimeFormat
 
 import java.lang.IllegalArgumentException
 
-private[prediction] object Utils {
+private[predictionio] object Utils {
 
   // use dateTime() for strict ISO8601 format
   val dateTimeFormatter = ISODateTimeFormat.dateTime().withOffsetParsed()

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/api/Webhooks.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/api/Webhooks.scala b/data/src/main/scala/org/apache/predictionio/data/api/Webhooks.scala
index 04ff78f..7347dcb 100644
--- a/data/src/main/scala/org/apache/predictionio/data/api/Webhooks.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/api/Webhooks.scala
@@ -39,7 +39,7 @@ import akka.actor.ActorSelection
 import scala.concurrent.{ExecutionContext, Future}
 
 
-private[prediction] object Webhooks {
+private[predictionio] object Webhooks {
 
   def postJson(
     appId: Int,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/api/WebhooksConnectors.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/api/WebhooksConnectors.scala b/data/src/main/scala/org/apache/predictionio/data/api/WebhooksConnectors.scala
index c2578ee..9fc6a7f 100644
--- a/data/src/main/scala/org/apache/predictionio/data/api/WebhooksConnectors.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/api/WebhooksConnectors.scala
@@ -21,7 +21,7 @@ import org.apache.predictionio.data.webhooks.FormConnector
 import org.apache.predictionio.data.webhooks.segmentio.SegmentIOConnector
 import org.apache.predictionio.data.webhooks.mailchimp.MailChimpConnector
 
-private[prediction] object WebhooksConnectors {
+private[predictionio] object WebhooksConnectors {
 
   val json: Map[String, JsonConnector] = Map(
     "segmentio" -> SegmentIOConnector

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/storage/BiMap.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/BiMap.scala b/data/src/main/scala/org/apache/predictionio/data/storage/BiMap.scala
index ad845b3..3236b99 100644
--- a/data/src/main/scala/org/apache/predictionio/data/storage/BiMap.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/BiMap.scala
@@ -22,7 +22,7 @@ import org.apache.spark.rdd.RDD
 /** Immutable Bi-directional Map
   *
   */
-class BiMap[K, V] private[prediction] (
+class BiMap[K, V] private[predictionio] (
   private val m: Map[K, V],
   private val i: Option[BiMap[V, K]] = None
   ) extends Serializable {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/storage/LEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/LEvents.scala b/data/src/main/scala/org/apache/predictionio/data/storage/LEvents.scala
index d6e753c..519af2c 100644
--- a/data/src/main/scala/org/apache/predictionio/data/storage/LEvents.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/LEvents.scala
@@ -188,7 +188,7 @@ trait LEvents {
     * @param ec ExecutionContext
     * @return Future[Map[String, PropertyMap]]
     */
-  private[prediction] def futureAggregateProperties(
+  private[predictionio] def futureAggregateProperties(
     appId: Int,
     channelId: Option[Int] = None,
     entityType: String,
@@ -231,7 +231,7 @@ trait LEvents {
     * @return Future[Option[PropertyMap]]
     */
   @Experimental
-  private[prediction] def futureAggregatePropertiesOfEntity(
+  private[predictionio] def futureAggregatePropertiesOfEntity(
     appId: Int,
     channelId: Option[Int] = None,
     entityType: String,
@@ -253,21 +253,21 @@ trait LEvents {
     }
 
   // following is blocking
-  private[prediction] def insert(event: Event, appId: Int,
+  private[predictionio] def insert(event: Event, appId: Int,
     channelId: Option[Int] = None,
     timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
     String = {
     Await.result(futureInsert(event, appId, channelId), timeout)
   }
 
-  private[prediction] def get(eventId: String, appId: Int,
+  private[predictionio] def get(eventId: String, appId: Int,
     channelId: Option[Int] = None,
     timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
     Option[Event] = {
     Await.result(futureGet(eventId, appId, channelId), timeout)
   }
 
-  private[prediction] def delete(eventId: String, appId: Int,
+  private[predictionio] def delete(eventId: String, appId: Int,
     channelId: Option[Int] = None,
     timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
     Boolean = {
@@ -299,7 +299,7 @@ trait LEvents {
     * @param ec ExecutionContext
     * @return Iterator[Event]
     */
-  private[prediction] def find(
+  private[predictionio] def find(
     appId: Int,
     channelId: Option[Int] = None,
     startTime: Option[DateTime] = None,
@@ -329,7 +329,7 @@ trait LEvents {
 
   // NOTE: remove in next release
   @deprecated("Use find() instead.", "0.9.2")
-  private[prediction] def findLegacy(
+  private[predictionio] def findLegacy(
     appId: Int,
     channelId: Option[Int] = None,
     startTime: Option[DateTime] = None,
@@ -431,7 +431,7 @@ trait LEvents {
     * @param ec ExecutionContext
     * @return Map[String, PropertyMap]
     */
-  private[prediction] def aggregateProperties(
+  private[predictionio] def aggregateProperties(
     appId: Int,
     channelId: Option[Int] = None,
     entityType: String,
@@ -467,7 +467,7 @@ trait LEvents {
     * @return Future[Option[PropertyMap]]
     */
   @Experimental
-  private[prediction] def aggregatePropertiesOfEntity(
+  private[predictionio] def aggregatePropertiesOfEntity(
     appId: Int,
     channelId: Option[Int] = None,
     entityType: String,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/storage/PEventAggregator.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/PEventAggregator.scala b/data/src/main/scala/org/apache/predictionio/data/storage/PEventAggregator.scala
index 72287dd..7b946ab 100644
--- a/data/src/main/scala/org/apache/predictionio/data/storage/PEventAggregator.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/PEventAggregator.scala
@@ -24,10 +24,10 @@ import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
 
 // each JValue data associated with the time it is set
-private[prediction] case class PropTime(val d: JValue, val t: Long)
+private[predictionio] case class PropTime(val d: JValue, val t: Long)
     extends Serializable
 
-private[prediction] case class SetProp (
+private[predictionio] case class SetProp (
   val fields: Map[String, PropTime],
   // last set time. Note: fields could be empty with valid set time
   val t: Long) extends Serializable {
@@ -56,7 +56,7 @@ private[prediction] case class SetProp (
   }
 }
 
-private[prediction] case class UnsetProp (fields: Map[String, Long])
+private[predictionio] case class UnsetProp (fields: Map[String, Long])
     extends Serializable {
   def ++ (that: UnsetProp): UnsetProp = {
     val commonKeys = fields.keySet.intersect(that.fields.keySet)
@@ -78,13 +78,13 @@ private[prediction] case class UnsetProp (fields: Map[String, Long])
   }
 }
 
-private[prediction] case class DeleteEntity (t: Long) extends Serializable {
+private[predictionio] case class DeleteEntity (t: Long) extends Serializable {
   def ++ (that: DeleteEntity): DeleteEntity = {
     if (this.t > that.t) this else that
   }
 }
 
-private[prediction] case class EventOp (
+private[predictionio] case class EventOp (
   val setProp: Option[SetProp] = None,
   val unsetProp: Option[UnsetProp] = None,
   val deleteEntity: Option[DeleteEntity] = None,
@@ -149,7 +149,7 @@ private[prediction] case class EventOp (
 
 }
 
-private[prediction] object EventOp {
+private[predictionio] object EventOp {
   // create EventOp from Event object
   def apply(e: Event): EventOp = {
     val t = e.eventTime.getMillis
@@ -188,7 +188,7 @@ private[prediction] object EventOp {
 }
 
 
-private[prediction] object PEventAggregator {
+private[predictionio] object PEventAggregator {
 
   val eventNames = List("$set", "$unset", "$delete")
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/storage/Storage.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/Storage.scala b/data/src/main/scala/org/apache/predictionio/data/storage/Storage.scala
index 1f170be..6c150e7 100644
--- a/data/src/main/scala/org/apache/predictionio/data/storage/Storage.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/Storage.scala
@@ -89,7 +89,7 @@ class StorageClientException(message: String, cause: Throwable)
   extends RuntimeException(message, cause)
 
 @deprecated("Use StorageException", "0.9.2")
-private[prediction] case class StorageError(message: String)
+private[predictionio] case class StorageError(message: String)
 
 /** :: DeveloperApi ::
   * Thrown by data access objects when they run into exceptional conditions
@@ -255,7 +255,7 @@ object Storage extends Logging {
     }
   }
 
-  private[prediction]
+  private[predictionio]
   def getDataObjectFromRepo[T](repo: String, test: Boolean = false)
     (implicit tag: TypeTag[T]): T = {
     val repoDOMeta = repositoriesToDataObjectMeta(repo)
@@ -263,14 +263,14 @@ object Storage extends Logging {
     getDataObject[T](repoDOSourceName, repoDOMeta.namespace, test = test)
   }
 
-  private[prediction]
+  private[predictionio]
   def getPDataObject[T](repo: String)(implicit tag: TypeTag[T]): T = {
     val repoDOMeta = repositoriesToDataObjectMeta(repo)
     val repoDOSourceName = repoDOMeta.sourceName
     getPDataObject[T](repoDOSourceName, repoDOMeta.namespace)
   }
 
-  private[prediction] def getDataObject[T](
+  private[predictionio] def getDataObject[T](
       sourceName: String,
       namespace: String,
       parallel: Boolean = false,
@@ -332,7 +332,7 @@ object Storage extends Logging {
     Seq(client.client, client.config, namespace)
   }
 
-  private[prediction] def verifyAllDataObjects(): Unit = {
+  private[predictionio] def verifyAllDataObjects(): Unit = {
     info("Verifying Meta Data Backend (Source: " +
       s"${repositoriesToDataObjectMeta(MetaDataRepository).sourceName})...")
     getMetaDataEngineManifests()
@@ -357,25 +357,25 @@ object Storage extends Logging {
     eventsDb.close()
   }
 
-  private[prediction] def getMetaDataEngineManifests(): EngineManifests =
+  private[predictionio] def getMetaDataEngineManifests(): EngineManifests =
     getDataObjectFromRepo[EngineManifests](MetaDataRepository)
 
-  private[prediction] def getMetaDataEngineInstances(): EngineInstances =
+  private[predictionio] def getMetaDataEngineInstances(): EngineInstances =
     getDataObjectFromRepo[EngineInstances](MetaDataRepository)
 
-  private[prediction] def getMetaDataEvaluationInstances(): EvaluationInstances =
+  private[predictionio] def getMetaDataEvaluationInstances(): EvaluationInstances =
     getDataObjectFromRepo[EvaluationInstances](MetaDataRepository)
 
-  private[prediction] def getMetaDataApps(): Apps =
+  private[predictionio] def getMetaDataApps(): Apps =
     getDataObjectFromRepo[Apps](MetaDataRepository)
 
-  private[prediction] def getMetaDataAccessKeys(): AccessKeys =
+  private[predictionio] def getMetaDataAccessKeys(): AccessKeys =
     getDataObjectFromRepo[AccessKeys](MetaDataRepository)
 
-  private[prediction] def getMetaDataChannels(): Channels =
+  private[predictionio] def getMetaDataChannels(): Channels =
     getDataObjectFromRepo[Channels](MetaDataRepository)
 
-  private[prediction] def getModelDataModels(): Models =
+  private[predictionio] def getModelDataModels(): Models =
     getDataObjectFromRepo[Models](ModelDataRepository)
 
   /** Obtains a data access object that returns [[Event]] related local data

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/storage/Utils.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/Utils.scala b/data/src/main/scala/org/apache/predictionio/data/storage/Utils.scala
index 321b245..c11d08c 100644
--- a/data/src/main/scala/org/apache/predictionio/data/storage/Utils.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/Utils.scala
@@ -19,7 +19,7 @@ import org.joda.time.DateTime
 import org.joda.time.format.ISODateTimeFormat
 
 /** Backend-agnostic storage utilities. */
-private[prediction] object Utils {
+private[predictionio] object Utils {
   /**
    * Add prefix to custom attribute keys.
    */

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCLEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCLEvents.scala b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCLEvents.scala
index 945879c..e048dc4 100644
--- a/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCLEvents.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCLEvents.scala
@@ -220,7 +220,7 @@ class JDBCLEvents(
     }
   }
 
-  private[prediction] def resultToEvent(rs: WrappedResultSet): Event = {
+  private[predictionio] def resultToEvent(rs: WrappedResultSet): Event = {
     Event(
       eventId = rs.stringOpt("id"),
       event = rs.string("event"),

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/store/Common.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/store/Common.scala b/data/src/main/scala/org/apache/predictionio/data/store/Common.scala
index 81b4b28..b86ebce 100644
--- a/data/src/main/scala/org/apache/predictionio/data/store/Common.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/store/Common.scala
@@ -18,7 +18,7 @@ package org.apache.predictionio.data.store
 import org.apache.predictionio.data.storage.Storage
 import grizzled.slf4j.Logger
 
-private[prediction] object Common {
+private[predictionio] object Common {
 
   @transient lazy val logger = Logger[this.type]
   @transient lazy private val appsDb = Storage.getMetaDataApps()

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/view/PBatchView.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/view/PBatchView.scala b/data/src/main/scala/org/apache/predictionio/data/view/PBatchView.scala
index 6c75402..4cf32cd 100644
--- a/data/src/main/scala/org/apache/predictionio/data/view/PBatchView.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/view/PBatchView.scala
@@ -31,9 +31,9 @@ import org.apache.spark.rdd.RDD
 
 
 // each JValue data associated with the time it is set
-private[prediction] case class PropTime(val d: JValue, val t: Long) extends Serializable
+private[predictionio] case class PropTime(val d: JValue, val t: Long) extends Serializable
 
-private[prediction] case class SetProp (
+private[predictionio] case class SetProp (
   val fields: Map[String, PropTime],
   // last set time. Note: fields could be empty with valid set time
   val t: Long) extends Serializable {
@@ -62,7 +62,7 @@ private[prediction] case class SetProp (
   }
 }
 
-private[prediction] case class UnsetProp (fields: Map[String, Long]) extends Serializable {
+private[predictionio] case class UnsetProp (fields: Map[String, Long]) extends Serializable {
   def ++ (that: UnsetProp): UnsetProp = {
     val commonKeys = fields.keySet.intersect(that.fields.keySet)
 
@@ -83,13 +83,13 @@ private[prediction] case class UnsetProp (fields: Map[String, Long]) extends Ser
   }
 }
 
-private[prediction] case class DeleteEntity (t: Long) extends Serializable {
+private[predictionio] case class DeleteEntity (t: Long) extends Serializable {
   def ++ (that: DeleteEntity): DeleteEntity = {
     if (this.t > that.t) this else that
   }
 }
 
-private[prediction] case class EventOp (
+private[predictionio] case class EventOp (
   val setProp: Option[SetProp] = None,
   val unsetProp: Option[UnsetProp] = None,
   val deleteEntity: Option[DeleteEntity] = None
@@ -133,7 +133,7 @@ private[prediction] case class EventOp (
 
 }
 
-private[prediction] object EventOp {
+private[predictionio] object EventOp {
   def apply(e: Event): EventOp = {
     val t = e.eventTime.getMillis
     e.event match {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorException.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorException.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorException.scala
index ee47a9c..a895e7d 100644
--- a/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorException.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorException.scala
@@ -20,7 +20,7 @@ package org.apache.predictionio.data.webhooks
   * @param message the detail message
   * @param cause the cause
   */
-private[prediction] class ConnectorException(message: String, cause: Throwable)
+private[predictionio] class ConnectorException(message: String, cause: Throwable)
   extends Exception(message, cause) {
 
   /** Webhooks Connnector Exception with cause being set to null

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorUtil.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorUtil.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorUtil.scala
index 40feb98..c77c138 100644
--- a/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorUtil.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/ConnectorUtil.scala
@@ -25,7 +25,7 @@ import org.json4s.native.Serialization.read
 import org.json4s.native.Serialization.write
 
 
-private[prediction] object ConnectorUtil {
+private[predictionio] object ConnectorUtil {
 
   implicit val eventJson4sFormats: Formats = DefaultFormats +
     new EventJson4sSupport.APISerializer

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/webhooks/FormConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/FormConnector.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/FormConnector.scala
index dd04a21..74a79e3 100644
--- a/data/src/main/scala/org/apache/predictionio/data/webhooks/FormConnector.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/FormConnector.scala
@@ -19,7 +19,7 @@ import org.json4s.JObject
 
 /** Connector for Webhooks connection with Form submission data format
   */
-private[prediction] trait FormConnector {
+private[predictionio] trait FormConnector {
 
   // TODO: support conversion to multiple events?
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/webhooks/JsonConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/JsonConnector.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/JsonConnector.scala
index eda8059..04cebbf 100644
--- a/data/src/main/scala/org/apache/predictionio/data/webhooks/JsonConnector.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/JsonConnector.scala
@@ -18,7 +18,7 @@ package org.apache.predictionio.data.webhooks
 import org.json4s.JObject
 
 /** Connector for Webhooks connection */
-private[prediction] trait JsonConnector {
+private[predictionio] trait JsonConnector {
 
   // TODO: support conversion to multiple events?
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/webhooks/exampleform/ExampleFormConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/exampleform/ExampleFormConnector.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/exampleform/ExampleFormConnector.scala
index adf8791..14667b0 100644
--- a/data/src/main/scala/org/apache/predictionio/data/webhooks/exampleform/ExampleFormConnector.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/exampleform/ExampleFormConnector.scala
@@ -49,7 +49,7 @@ import org.json4s.JObject
   *   "timestamp"="2015-01-15T04:20:23.567Z"
   *
   */
-private[prediction] object ExampleFormConnector extends FormConnector {
+private[predictionio] object ExampleFormConnector extends FormConnector {
 
   override
   def toEventJson(data: Map[String, String]): JObject = {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/webhooks/examplejson/ExampleJsonConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/examplejson/ExampleJsonConnector.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/examplejson/ExampleJsonConnector.scala
index 2129134..204a3c4 100644
--- a/data/src/main/scala/org/apache/predictionio/data/webhooks/examplejson/ExampleJsonConnector.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/examplejson/ExampleJsonConnector.scala
@@ -57,7 +57,7 @@ import org.json4s.JObject
   *   "timestamp": "2015-01-15T04:20:23.567Z"
   * }
   */
-private[prediction] object ExampleJsonConnector extends JsonConnector {
+private[predictionio] object ExampleJsonConnector extends JsonConnector {
 
   implicit val json4sFormats: Formats = DefaultFormats
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnector.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnector.scala
index abf8a7f..3c4d32f 100644
--- a/data/src/main/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnector.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/mailchimp/MailChimpConnector.scala
@@ -26,7 +26,7 @@ import org.json4s.JObject
 import org.joda.time.DateTime
 import org.joda.time.format.DateTimeFormat
 
-private[prediction] object MailChimpConnector extends FormConnector {
+private[predictionio] object MailChimpConnector extends FormConnector {
 
   override
   def toEventJson(data: Map[String, String]): JObject = {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/d7c14178/data/src/main/scala/org/apache/predictionio/data/webhooks/segmentio/SegmentIOConnector.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/webhooks/segmentio/SegmentIOConnector.scala b/data/src/main/scala/org/apache/predictionio/data/webhooks/segmentio/SegmentIOConnector.scala
index b7548b0..99ecb90 100644
--- a/data/src/main/scala/org/apache/predictionio/data/webhooks/segmentio/SegmentIOConnector.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/webhooks/segmentio/SegmentIOConnector.scala
@@ -18,7 +18,7 @@ package org.apache.predictionio.data.webhooks.segmentio
 import org.apache.predictionio.data.webhooks.{ConnectorException, JsonConnector}
 import org.json4s._
 
-private[prediction] object SegmentIOConnector extends JsonConnector {
+private[predictionio] object SegmentIOConnector extends JsonConnector {
 
   // private lazy val supportedAPI = Vector("2", "2.0", "2.0.0")
 
@@ -183,29 +183,29 @@ private[prediction] object SegmentIOConnector extends JsonConnector {
 
 object Events {
 
-  private[prediction] case class Track(
+  private[predictionio] case class Track(
     event: String,
     properties: Option[JObject] = None
   )
 
-  private[prediction] case class Alias(previous_id: String, user_id: String)
+  private[predictionio] case class Alias(previous_id: String, user_id: String)
 
-  private[prediction] case class Group(
+  private[predictionio] case class Group(
     group_id: String,
     traits: Option[JObject] = None
   )
 
-  private[prediction] case class Screen(
+  private[predictionio] case class Screen(
     name: Option[String] = None,
     properties: Option[JObject] = None
   )
 
-  private[prediction] case class Page(
+  private[predictionio] case class Page(
     name: Option[String] = None,
     properties: Option[JObject] = None
   )
 
-  private[prediction] case class Identify(
+  private[predictionio] case class Identify(
     user_id: String,
     traits: Option[JObject]
   )
@@ -214,14 +214,14 @@ object Events {
 
 object Common {
 
-  private[prediction] case class Integrations(
+  private[predictionio] case class Integrations(
     All: Boolean = false,
     Mixpanel: Boolean = false,
     Marketo: Boolean = false,
     Salesforse: Boolean = false
   )
 
-  private[prediction] case class Context(
+  private[predictionio] case class Context(
     ip: String,
     library: Library,
     user_agent: String,
@@ -236,13 +236,13 @@ object Common {
     timezone: Option[String] = None
   )
 
-  private[prediction] case class Screen(width: Int, height: Int, density: Int)
+  private[predictionio] case class Screen(width: Int, height: Int, density: Int)
 
-  private[prediction] case class Referrer(id: String, `type`: String)
+  private[predictionio] case class Referrer(id: String, `type`: String)
 
-  private[prediction] case class OS(name: String, version: String)
+  private[predictionio] case class OS(name: String, version: String)
 
-  private[prediction] case class Location(
+  private[predictionio] case class Location(
     city: Option[String] = None,
     country: Option[String] = None,
     latitude: Option[Double] = None,
@@ -258,16 +258,16 @@ object Common {
     url: String
   )
 
-  private[prediction] case class Network(
+  private[predictionio] case class Network(
     bluetooth: Option[Boolean] = None,
     carrier: Option[String] = None,
     cellular: Option[Boolean] = None,
     wifi: Option[Boolean] = None
   )
 
-  private[prediction] case class Library(name: String, version: String)
+  private[predictionio] case class Library(name: String, version: String)
 
-  private[prediction] case class Device(
+  private[predictionio] case class Device(
     id: Option[String] = None,
     advertising_id: Option[String] = None,
     ad_tracking_enabled: Option[Boolean] = None,
@@ -278,7 +278,7 @@ object Common {
     token: Option[String] = None
   )
 
-  private[prediction] case class Campaign(
+  private[predictionio] case class Campaign(
     name: Option[String] = None,
     source: Option[String] = None,
     medium: Option[String] = None,
@@ -286,7 +286,7 @@ object Common {
     content: Option[String] = None
   )
 
-  private[prediction] case class App(
+  private[predictionio] case class App(
     name: Option[String] = None,
     version: Option[String] = None,
     build: Option[String] = None
@@ -294,7 +294,7 @@ object Common {
 
 }
 
-private[prediction] case class Common(
+private[predictionio] case class Common(
   `type`: String,
   sent_at: String,
   timestamp: String,


[29/34] incubator-predictionio git commit: change all to org.apache.predictionio except docs

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Query.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Query.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Query.java
index e06e826..d4a9854 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Query.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Query.java
@@ -1,4 +1,4 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
 import java.io.Serializable;
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4a.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4a.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4a.java
index 316f266..5ada77f 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4a.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4a.java
@@ -1,17 +1,17 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaEngine;
-import io.prediction.controller.java.JavaEngineBuilder;
-import io.prediction.controller.java.JavaEngineParams;
-import io.prediction.controller.java.JavaEngineParamsBuilder;
-import io.prediction.controller.java.JavaWorkflow;
-import io.prediction.controller.java.WorkflowParamsBuilder;
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaEngine;
+import org.apache.predictionio.controller.java.JavaEngineBuilder;
+import org.apache.predictionio.controller.java.JavaEngineParams;
+import org.apache.predictionio.controller.java.JavaEngineParamsBuilder;
+import org.apache.predictionio.controller.java.JavaWorkflow;
+import org.apache.predictionio.controller.java.WorkflowParamsBuilder;
 
 import java.util.HashMap;
 
-import io.prediction.controller.IdentityPreparator;
+import org.apache.predictionio.controller.IdentityPreparator;
 
 public class Runner4a {
   

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4b.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4b.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4b.java
index ed53d2c..871cc06 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4b.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4b.java
@@ -1,17 +1,17 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaEngine;
-import io.prediction.controller.java.JavaEngineBuilder;
-import io.prediction.controller.java.JavaEngineParams;
-import io.prediction.controller.java.JavaEngineParamsBuilder;
-import io.prediction.controller.java.JavaWorkflow;
-import io.prediction.controller.java.WorkflowParamsBuilder;
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaEngine;
+import org.apache.predictionio.controller.java.JavaEngineBuilder;
+import org.apache.predictionio.controller.java.JavaEngineParams;
+import org.apache.predictionio.controller.java.JavaEngineParamsBuilder;
+import org.apache.predictionio.controller.java.JavaWorkflow;
+import org.apache.predictionio.controller.java.WorkflowParamsBuilder;
 
 import java.util.HashMap;
 
-import io.prediction.controller.IdentityPreparator;
+import org.apache.predictionio.controller.IdentityPreparator;
 
 public class Runner4b {
   

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4c.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4c.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4c.java
index c359f1f..9806c45 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4c.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4c.java
@@ -1,18 +1,18 @@
-package io.prediction.examples.java.recommendations.tutorial4;
-
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaEngine;
-import io.prediction.controller.java.JavaEngineBuilder;
-import io.prediction.controller.java.JavaEngineParams;
-import io.prediction.controller.java.JavaEngineParamsBuilder;
-import io.prediction.controller.java.LJavaFirstServing;
-import io.prediction.controller.java.JavaWorkflow;
-import io.prediction.controller.java.WorkflowParamsBuilder;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
+
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaEngine;
+import org.apache.predictionio.controller.java.JavaEngineBuilder;
+import org.apache.predictionio.controller.java.JavaEngineParams;
+import org.apache.predictionio.controller.java.JavaEngineParamsBuilder;
+import org.apache.predictionio.controller.java.LJavaFirstServing;
+import org.apache.predictionio.controller.java.JavaWorkflow;
+import org.apache.predictionio.controller.java.WorkflowParamsBuilder;
 
 import java.util.HashMap;
 
-import io.prediction.controller.IdentityPreparator;
+import org.apache.predictionio.controller.IdentityPreparator;
 
 public class Runner4c {
   public static void main(String[] args) {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4d.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4d.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4d.java
index a1d5361..c40b2cb 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4d.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Runner4d.java
@@ -1,18 +1,18 @@
-package io.prediction.examples.java.recommendations.tutorial4;
-
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaEngine;
-import io.prediction.controller.java.JavaEngineBuilder;
-import io.prediction.controller.java.JavaEngineParams;
-import io.prediction.controller.java.JavaEngineParamsBuilder;
-import io.prediction.controller.java.LJavaFirstServing;
-import io.prediction.controller.java.JavaWorkflow;
-import io.prediction.controller.java.WorkflowParamsBuilder;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
+
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaEngine;
+import org.apache.predictionio.controller.java.JavaEngineBuilder;
+import org.apache.predictionio.controller.java.JavaEngineParams;
+import org.apache.predictionio.controller.java.JavaEngineParamsBuilder;
+import org.apache.predictionio.controller.java.LJavaFirstServing;
+import org.apache.predictionio.controller.java.JavaWorkflow;
+import org.apache.predictionio.controller.java.WorkflowParamsBuilder;
 
 import java.util.HashMap;
 
-import io.prediction.controller.IdentityPreparator;
+import org.apache.predictionio.controller.IdentityPreparator;
 
 public class Runner4d {
   public static void main(String[] args) {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Serving.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Serving.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Serving.java
index 3fc1525..5c081a4 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Serving.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Serving.java
@@ -1,7 +1,7 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
-import io.prediction.controller.java.LJavaServing;
-import io.prediction.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.LJavaServing;
+import org.apache.predictionio.controller.java.EmptyParams;
 import java.lang.Iterable;
 
 public class Serving extends LJavaServing<EmptyParams, Query, Float> {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/SingleEngineFactory.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/SingleEngineFactory.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/SingleEngineFactory.java
index 75fb5eb..40b3831 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/SingleEngineFactory.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/SingleEngineFactory.java
@@ -1,10 +1,10 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaEngine;
-import io.prediction.controller.java.JavaEngineBuilder;
-import io.prediction.controller.java.LJavaFirstServing;
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaEngine;
+import org.apache.predictionio.controller.java.JavaEngineBuilder;
+import org.apache.predictionio.controller.java.LJavaFirstServing;
 
 public class SingleEngineFactory implements IJavaEngineFactory {
   public JavaEngine<TrainingData, EmptyParams, PreparedData, Query, Float, Object> apply() {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/TrainingData.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/TrainingData.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/TrainingData.java
index c91ad9c..cd9752c 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/TrainingData.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/TrainingData.java
@@ -1,4 +1,4 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
 import java.io.Serializable;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/multiple-algo-engine.json
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/multiple-algo-engine.json b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/multiple-algo-engine.json
index dda4710..2f757c2 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/multiple-algo-engine.json
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/multiple-algo-engine.json
@@ -1,7 +1,7 @@
 {
-  "id": "io.prediction.examples.java.recommendations.tutorial4.EngineFactory",
+  "id": "org.apache.predictionio.examples.java.recommendations.tutorial4.EngineFactory",
   "version": "0.9.1",
   "name": "FeatureBased Recommendations Engine",
-  "engineFactory": "io.prediction.examples.java.recommendations.tutorial4.EngineFactory"
+  "engineFactory": "org.apache.predictionio.examples.java.recommendations.tutorial4.EngineFactory"
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/single-algo-engine.json
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/single-algo-engine.json b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/single-algo-engine.json
index 56a6462..ceaf576 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/single-algo-engine.json
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/single-algo-engine.json
@@ -1,6 +1,6 @@
 {
-  "id": "io.prediction.examples.java.recommendations.tutorial4.SingleEngineFactory",
+  "id": "org.apache.predictionio.examples.java.recommendations.tutorial4.SingleEngineFactory",
   "version": "0.9.1",
   "name": "FeatureBased Recommendations Engine",
-  "engineFactory": "io.prediction.examples.java.recommendations.tutorial4.SingleEngineFactory"
+  "engineFactory": "org.apache.predictionio.examples.java.recommendations.tutorial4.SingleEngineFactory"
 }

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/EngineFactory.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/EngineFactory.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/EngineFactory.java
index c7edefa..b69c923 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/EngineFactory.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/EngineFactory.java
@@ -1,12 +1,12 @@
-package io.prediction.examples.java.recommendations.tutorial5;
+package org.apache.predictionio.examples.java.recommendations.tutorial5;
 
-import io.prediction.examples.java.recommendations.tutorial3.DataSource;
-import io.prediction.examples.java.recommendations.tutorial1.TrainingData;
-import io.prediction.examples.java.recommendations.tutorial1.Query;
+import org.apache.predictionio.examples.java.recommendations.tutorial3.DataSource;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.TrainingData;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.Query;
 
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaSimpleEngine;
-import io.prediction.controller.java.JavaSimpleEngineBuilder;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaSimpleEngine;
+import org.apache.predictionio.controller.java.JavaSimpleEngineBuilder;
 
 public class EngineFactory implements IJavaEngineFactory {
   public JavaSimpleEngine<TrainingData, Object, Query, Float, Float> apply() {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgoModel.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgoModel.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgoModel.java
index 11332e1..c134e54 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgoModel.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgoModel.java
@@ -1,4 +1,4 @@
-package io.prediction.examples.java.recommendations.tutorial5;
+package org.apache.predictionio.examples.java.recommendations.tutorial5;
 
 import java.io.Serializable;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgoParams.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgoParams.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgoParams.java
index 123bffa..128a114 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgoParams.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgoParams.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.recommendations.tutorial5;
+package org.apache.predictionio.examples.java.recommendations.tutorial5;
 
-import io.prediction.controller.java.JavaParams;
+import org.apache.predictionio.controller.java.JavaParams;
 
 public class MahoutAlgoParams implements JavaParams {
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgorithm.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgorithm.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgorithm.java
index bf99690..bc26444 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgorithm.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/MahoutAlgorithm.java
@@ -1,10 +1,10 @@
-package io.prediction.examples.java.recommendations.tutorial5;
+package org.apache.predictionio.examples.java.recommendations.tutorial5;
 
-import io.prediction.controller.java.LJavaAlgorithm;
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.examples.java.recommendations.tutorial1.TrainingData;
-import io.prediction.examples.java.recommendations.tutorial1.Query;
-import io.prediction.engines.util.MahoutUtil;
+import org.apache.predictionio.controller.java.LJavaAlgorithm;
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.TrainingData;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.Query;
+import org.apache.predictionio.engines.util.MahoutUtil;
 
 import org.apache.mahout.cf.taste.recommender.Recommender;
 import org.apache.mahout.cf.taste.model.DataModel;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/Runner5.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/Runner5.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/Runner5.java
index 9967563..ea98348 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/Runner5.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/Runner5.java
@@ -1,16 +1,16 @@
-package io.prediction.examples.java.recommendations.tutorial5;
+package org.apache.predictionio.examples.java.recommendations.tutorial5;
 
-import io.prediction.examples.java.recommendations.tutorial1.DataSourceParams;
-import io.prediction.examples.java.recommendations.tutorial3.Evaluator;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.DataSourceParams;
+import org.apache.predictionio.examples.java.recommendations.tutorial3.Evaluator;
 
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaSimpleEngine;
-import io.prediction.controller.java.JavaSimpleEngineBuilder;
-import io.prediction.controller.java.JavaEngineParams;
-import io.prediction.controller.java.JavaEngineParamsBuilder;
-import io.prediction.controller.java.JavaWorkflow;
-import io.prediction.controller.java.WorkflowParamsBuilder;
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaSimpleEngine;
+import org.apache.predictionio.controller.java.JavaSimpleEngineBuilder;
+import org.apache.predictionio.controller.java.JavaEngineParams;
+import org.apache.predictionio.controller.java.JavaEngineParamsBuilder;
+import org.apache.predictionio.controller.java.JavaWorkflow;
+import org.apache.predictionio.controller.java.WorkflowParamsBuilder;
 
 import java.util.HashMap;
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/manifest.json
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/manifest.json b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/manifest.json
index aa33cf4..b9cdbe3 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/manifest.json
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial5/manifest.json
@@ -1,6 +1,6 @@
 {
-  "id": "io.prediction.examples.java.recommendations.tutorial5.EngineFactory",
+  "id": "org.apache.predictionio.examples.java.recommendations.tutorial5.EngineFactory",
   "version": "0.8.1-SNAPSHOT",
   "name": "Simple Mahout Recommendations Engine",
-  "engineFactory": "io.prediction.examples.java.recommendations.tutorial5.EngineFactory"
+  "engineFactory": "org.apache.predictionio.examples.java.recommendations.tutorial5.EngineFactory"
 }

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-parallel-helloworld/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/java-parallel-helloworld/build.sbt b/examples/experimental/java-parallel-helloworld/build.sbt
index 9b7f186..46c1306 100644
--- a/examples/experimental/java-parallel-helloworld/build.sbt
+++ b/examples/experimental/java-parallel-helloworld/build.sbt
@@ -7,5 +7,5 @@ name := "example-java-parallel-helloworld"
 organization := "org.sample"
 
 libraryDependencies ++= Seq(
-  "io.prediction" %% "core" % "0.9.1" % "provided",
+  "org.apache.predictionio" %% "core" % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core" % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-parallel-helloworld/engine.json
----------------------------------------------------------------------
diff --git a/examples/experimental/java-parallel-helloworld/engine.json b/examples/experimental/java-parallel-helloworld/engine.json
index fd53f80..5ce3737 100644
--- a/examples/experimental/java-parallel-helloworld/engine.json
+++ b/examples/experimental/java-parallel-helloworld/engine.json
@@ -1,7 +1,7 @@
 {
   "id": "default",
   "description": "Parallel Hello World Engine",
-  "engineFactory": "io.prediction.examples.java.parallel.EngineFactory",
+  "engineFactory": "org.apache.predictionio.examples.java.parallel.EngineFactory",
   "algorithms": [
     {
       "name": "ParallelAlgorithm",

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Algorithm.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Algorithm.java b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Algorithm.java
index e4a188d..f0dc121 100644
--- a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Algorithm.java
+++ b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Algorithm.java
@@ -1,7 +1,7 @@
-package io.prediction.examples.java.parallel;
+package org.apache.predictionio.examples.java.parallel;
 
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.PJavaAlgorithm;
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.PJavaAlgorithm;
 
 import java.io.Serializable;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/DataSource.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/DataSource.java b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/DataSource.java
index 1e88da9..1429aef 100644
--- a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/DataSource.java
+++ b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/DataSource.java
@@ -1,7 +1,7 @@
-package io.prediction.examples.java.parallel;
+package org.apache.predictionio.examples.java.parallel;
 
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.PJavaDataSource;
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.PJavaDataSource;
 
 import java.util.List;
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/EngineFactory.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/EngineFactory.java b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/EngineFactory.java
index ffd7d12..85b4b12 100644
--- a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/EngineFactory.java
+++ b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/EngineFactory.java
@@ -1,9 +1,9 @@
-package io.prediction.examples.java.parallel;
+package org.apache.predictionio.examples.java.parallel;
 
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.LJavaFirstServing;
-import io.prediction.controller.java.PJavaEngine;
-import io.prediction.controller.java.PJavaEngineBuilder;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.LJavaFirstServing;
+import org.apache.predictionio.controller.java.PJavaEngine;
+import org.apache.predictionio.controller.java.PJavaEngineBuilder;
 
 import java.util.HashMap;
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Model.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Model.java b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Model.java
index efdf643..8d5f63d 100644
--- a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Model.java
+++ b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Model.java
@@ -1,4 +1,4 @@
-package io.prediction.examples.java.parallel;
+package org.apache.predictionio.examples.java.parallel;
 
 import java.io.Serializable;
 import java.lang.StringBuilder;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Preparator.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Preparator.java b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Preparator.java
index b6dce44..0934776 100644
--- a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Preparator.java
+++ b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Preparator.java
@@ -1,7 +1,7 @@
-package io.prediction.examples.java.parallel;
+package org.apache.predictionio.examples.java.parallel;
 
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.PJavaPreparator;
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.PJavaPreparator;
 
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.JavaSparkContext;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Query.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Query.java b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Query.java
index bbd9af4..4b56160 100644
--- a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Query.java
+++ b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Query.java
@@ -1,4 +1,4 @@
-package io.prediction.examples.java.parallel;
+package org.apache.predictionio.examples.java.parallel;
 
 import java.io.Serializable;
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Runner.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Runner.java b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Runner.java
index 07edd9b..48b30d2 100644
--- a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Runner.java
+++ b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Runner.java
@@ -1,14 +1,14 @@
-package io.prediction.examples.java.parallel;
+package org.apache.predictionio.examples.java.parallel;
 
-import io.prediction.controller.IEngineFactory;
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaEngineParams;
-import io.prediction.controller.java.JavaEngineParamsBuilder;
-import io.prediction.controller.java.JavaWorkflow;
-import io.prediction.controller.java.PJavaEngine;
-import io.prediction.controller.java.PJavaEngineBuilder;
-import io.prediction.controller.java.WorkflowParamsBuilder;
+import org.apache.predictionio.controller.IEngineFactory;
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaEngineParams;
+import org.apache.predictionio.controller.java.JavaEngineParamsBuilder;
+import org.apache.predictionio.controller.java.JavaWorkflow;
+import org.apache.predictionio.controller.java.PJavaEngine;
+import org.apache.predictionio.controller.java.PJavaEngineBuilder;
+import org.apache.predictionio.controller.java.WorkflowParamsBuilder;
 
 import java.util.HashMap;
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Serving.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Serving.java b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Serving.java
index 2f85f36..4dc57ba 100644
--- a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Serving.java
+++ b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/Serving.java
@@ -1,7 +1,7 @@
-package io.prediction.examples.java.parallel;
+package org.apache.predictionio.examples.java.parallel;
 
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.LJavaServing;
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.LJavaServing;
 
 public class Serving extends LJavaServing<EmptyParams, Query, Float> {
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/build.sbt b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/build.sbt
index 4e4dce7..1b3ac2d 100644
--- a/examples/experimental/java-parallel-helloworld/src/main/java/parallel/build.sbt
+++ b/examples/experimental/java-parallel-helloworld/src/main/java/parallel/build.sbt
@@ -4,11 +4,11 @@ assemblySettings
 
 name := "example-java-parallel"
 
-organization := "io.prediction.examples.java"
+organization := "org.apache.predictionio.examples.java"
 
 resolvers += Resolver.sonatypeRepo("snapshots")
 
 libraryDependencies ++= Seq(
-  "io.prediction" %% "core" % "0.8.0-SNAPSHOT" % "provided",
-  "io.prediction" %% "data" % "0.8.0-SNAPSHOT" % "provided",
+  "org.apache.predictionio" %% "core" % "0.8.0-SNAPSHOT" % "provided",
+  "org.apache.predictionio" %% "data" % "0.8.0-SNAPSHOT" % "provided",
   "org.apache.spark" %% "spark-core" % "1.0.2" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-cleanup-app/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-cleanup-app/build.sbt b/examples/experimental/scala-cleanup-app/build.sbt
index 7788d35..82ce614 100644
--- a/examples/experimental/scala-cleanup-app/build.sbt
+++ b/examples/experimental/scala-cleanup-app/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-vanilla"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.5" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.5" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.3.1" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.3.1" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-cleanup-app/engine.json
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-cleanup-app/engine.json b/examples/experimental/scala-cleanup-app/engine.json
index ab467c9..6f7f334 100644
--- a/examples/experimental/scala-cleanup-app/engine.json
+++ b/examples/experimental/scala-cleanup-app/engine.json
@@ -1,7 +1,7 @@
 {
   "id": "default",
   "description": "Default settings",
-  "engineFactory": "io.prediction.examples.experimental.cleanupapp.VanillaEngine",
+  "engineFactory": "org.apache.predictionio.examples.experimental.cleanupapp.VanillaEngine",
   "datasource": {
     "params" : {
       "appId": 1000000000,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-cleanup-app/src/main/scala/Algorithm.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-cleanup-app/src/main/scala/Algorithm.scala b/examples/experimental/scala-cleanup-app/src/main/scala/Algorithm.scala
index 2b3bbab..f5dd839 100644
--- a/examples/experimental/scala-cleanup-app/src/main/scala/Algorithm.scala
+++ b/examples/experimental/scala-cleanup-app/src/main/scala/Algorithm.scala
@@ -1,7 +1,7 @@
-package io.prediction.examples.experimental.cleanupapp
+package org.apache.predictionio.examples.experimental.cleanupapp
 
-import io.prediction.controller.P2LAlgorithm
-import io.prediction.controller.Params
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-cleanup-app/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-cleanup-app/src/main/scala/DataSource.scala b/examples/experimental/scala-cleanup-app/src/main/scala/DataSource.scala
index 41ce53e..8e0b0f6 100644
--- a/examples/experimental/scala-cleanup-app/src/main/scala/DataSource.scala
+++ b/examples/experimental/scala-cleanup-app/src/main/scala/DataSource.scala
@@ -1,12 +1,12 @@
-package io.prediction.examples.experimental.cleanupapp
+package org.apache.predictionio.examples.experimental.cleanupapp
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
-import io.prediction.workflow.StopAfterReadInterruption
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
+import org.apache.predictionio.workflow.StopAfterReadInterruption
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-cleanup-app/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-cleanup-app/src/main/scala/Engine.scala b/examples/experimental/scala-cleanup-app/src/main/scala/Engine.scala
index b7ac9dc..4caf7ba 100644
--- a/examples/experimental/scala-cleanup-app/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-cleanup-app/src/main/scala/Engine.scala
@@ -1,8 +1,8 @@
-package io.prediction.examples.experimental.cleanupapp
+package org.apache.predictionio.examples.experimental.cleanupapp
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
-import io.prediction.controller._
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
+import org.apache.predictionio.controller._
 
 case class Query(q: String) extends Serializable
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-cleanup-app/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-cleanup-app/src/main/scala/Preparator.scala b/examples/experimental/scala-cleanup-app/src/main/scala/Preparator.scala
index 54bba9f..35d8513 100644
--- a/examples/experimental/scala-cleanup-app/src/main/scala/Preparator.scala
+++ b/examples/experimental/scala-cleanup-app/src/main/scala/Preparator.scala
@@ -1,7 +1,7 @@
-package io.prediction.examples.experimental.cleanupapp
+package org.apache.predictionio.examples.experimental.cleanupapp
 
-import io.prediction.controller.PPreparator
-import io.prediction.data.storage.Event
+import org.apache.predictionio.controller.PPreparator
+import org.apache.predictionio.data.storage.Event
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-cleanup-app/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-cleanup-app/src/main/scala/Serving.scala b/examples/experimental/scala-cleanup-app/src/main/scala/Serving.scala
index 9898307..123aeb4 100644
--- a/examples/experimental/scala-cleanup-app/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-cleanup-app/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
-package io.prediction.examples.experimental.cleanupapp
+package org.apache.predictionio.examples.experimental.cleanupapp
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {
@@ -10,4 +10,4 @@ class Serving
     predictedResults: Seq[PredictedResult]): PredictedResult = {
     predictedResults.head
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/build.sbt b/examples/experimental/scala-local-friend-recommendation/build.sbt
index 3c6ca7f..659f345 100644
--- a/examples/experimental/scala-local-friend-recommendation/build.sbt
+++ b/examples/experimental/scala-local-friend-recommendation/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "examples-friendrecommendation"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction" %% "core" % "0.9.1" % "provided",
-  "io.prediction" %% "data" % "0.9.1" % "provided",
+  "org.apache.predictionio" %% "core" % "0.9.1" % "provided",
+  "org.apache.predictionio" %% "data" % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core" % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/keyword_similarity_engine.json
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/keyword_similarity_engine.json b/examples/experimental/scala-local-friend-recommendation/keyword_similarity_engine.json
index acba39a..f80fe5d 100644
--- a/examples/experimental/scala-local-friend-recommendation/keyword_similarity_engine.json
+++ b/examples/experimental/scala-local-friend-recommendation/keyword_similarity_engine.json
@@ -1,8 +1,8 @@
 {
-  "id": "io.prediction.examples.friendrecommendation.keywordsimilarity",
+  "id": "org.apache.predictionio.examples.friendrecommendation.keywordsimilarity",
   "version": "0.1",
   "name": "Friend Recommendation Engine with Keyword Similarity Method",
-  "engineFactory": "io.prediction.examples.friendrecommendation.KeywordSimilarityEngineFactory",
+  "engineFactory": "org.apache.predictionio.examples.friendrecommendation.KeywordSimilarityEngineFactory",
   "datasource": {
     "itemFilePath": "data/item.txt",
     "userKeywordFilePath": "data/user_key_word.txt",

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/random_engine.json
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/random_engine.json b/examples/experimental/scala-local-friend-recommendation/random_engine.json
index 40f2f61..5aba3c3 100644
--- a/examples/experimental/scala-local-friend-recommendation/random_engine.json
+++ b/examples/experimental/scala-local-friend-recommendation/random_engine.json
@@ -1,8 +1,8 @@
 {
-  "id": "io.prediction.examples.friendrecommendation.random",
+  "id": "org.apache.predictionio.examples.friendrecommendation.random",
   "version": "0.1",
   "name": "Friend Recommendation Engine with Random Method",
-  "engineFactory": "io.prediction.examples.friendrecommendation.RandomEngineFactory",
+  "engineFactory": "org.apache.predictionio.examples.friendrecommendation.RandomEngineFactory",
   "datasource": {
     "itemFilePath": "data/item.txt",
     "userKeywordFilePath": "data/user_key_word.txt",

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationAlgoParams.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationAlgoParams.scala b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationAlgoParams.scala
index 375825a..7a73001 100644
--- a/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationAlgoParams.scala
+++ b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationAlgoParams.scala
@@ -1,6 +1,6 @@
-package io.prediction.examples.friendrecommendation
+package org.apache.predictionio.examples.friendrecommendation
 
-import io.prediction.controller._
+import org.apache.predictionio.controller._
 
 class FriendRecommendationAlgoParams (
 ) extends Params

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationDataSource.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationDataSource.scala b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationDataSource.scala
index 757a6e7..04bf21e 100644
--- a/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationDataSource.scala
+++ b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationDataSource.scala
@@ -1,6 +1,6 @@
-package io.prediction.examples.friendrecommendation
+package org.apache.predictionio.examples.friendrecommendation
 
-import io.prediction.controller._
+import org.apache.predictionio.controller._
 import scala.io.Source
 import scala.collection.immutable.HashMap
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationDataSourceParams.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationDataSourceParams.scala b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationDataSourceParams.scala
index 4f3d4b3..88e8b17 100644
--- a/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationDataSourceParams.scala
+++ b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationDataSourceParams.scala
@@ -1,6 +1,6 @@
-package io.prediction.examples.friendrecommendation
+package org.apache.predictionio.examples.friendrecommendation
 
-import io.prediction.controller._
+import org.apache.predictionio.controller._
 
 class FriendRecommendationDataSourceParams(
   val itemFilePath: String,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationPrediction.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationPrediction.scala b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationPrediction.scala
index 74a23ca..6a57390 100644
--- a/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationPrediction.scala
+++ b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationPrediction.scala
@@ -1,4 +1,4 @@
-package io.prediction.examples.friendrecommendation
+package org.apache.predictionio.examples.friendrecommendation
 
 class FriendRecommendationPrediction (
   val confidence: Double,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationQuery.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationQuery.scala b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationQuery.scala
index 5f37e75..a7a64d2 100644
--- a/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationQuery.scala
+++ b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationQuery.scala
@@ -1,4 +1,4 @@
-package io.prediction.examples.friendrecommendation
+package org.apache.predictionio.examples.friendrecommendation
 
 class FriendRecommendationQuery (
   // To align with the KDD 2012 scenario

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationTrainingData.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationTrainingData.scala b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationTrainingData.scala
index 0f0056c..18f9fb0 100644
--- a/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationTrainingData.scala
+++ b/examples/experimental/scala-local-friend-recommendation/src/main/scala/FriendRecommendationTrainingData.scala
@@ -1,4 +1,4 @@
-package io.prediction.examples.friendrecommendation
+package org.apache.predictionio.examples.friendrecommendation
 
 import scala.collection.immutable.HashMap
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityAlgorithm.scala b/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityAlgorithm.scala
index eace058..bb83e2b 100644
--- a/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityAlgorithm.scala
+++ b/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityAlgorithm.scala
@@ -1,6 +1,6 @@
-package io.prediction.examples.friendrecommendation
+package org.apache.predictionio.examples.friendrecommendation
 
-import io.prediction.controller._
+import org.apache.predictionio.controller._
 import scala.collection.immutable.HashMap
 import scala.math
 import scala.io.Source

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityEngineFactory.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityEngineFactory.scala b/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityEngineFactory.scala
index 5ea6663..a96f1b0 100644
--- a/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityEngineFactory.scala
+++ b/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityEngineFactory.scala
@@ -1,6 +1,6 @@
-package io.prediction.examples.friendrecommendation
+package org.apache.predictionio.examples.friendrecommendation
 
-import io.prediction.controller._
+import org.apache.predictionio.controller._
 
 object KeywordSimilarityEngineFactory extends IEngineFactory {
   override

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityModel.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityModel.scala b/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityModel.scala
index 4523327..0e9f046 100644
--- a/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityModel.scala
+++ b/examples/experimental/scala-local-friend-recommendation/src/main/scala/KeywordSimilarityModel.scala
@@ -1,4 +1,4 @@
-package io.prediction.examples.friendrecommendation
+package org.apache.predictionio.examples.friendrecommendation
 
 import scala.collection.immutable.HashMap
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomAlgorithm.scala b/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomAlgorithm.scala
index 392feac..47badea 100644
--- a/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomAlgorithm.scala
+++ b/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomAlgorithm.scala
@@ -1,6 +1,6 @@
-package io.prediction.examples.friendrecommendation
+package org.apache.predictionio.examples.friendrecommendation
 
-import io.prediction.controller._
+import org.apache.predictionio.controller._
 
 // For random algorithm
 import scala.util.Random

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomEngineFactory.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomEngineFactory.scala b/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomEngineFactory.scala
index a1dc073..2f4938a 100644
--- a/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomEngineFactory.scala
+++ b/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomEngineFactory.scala
@@ -1,6 +1,6 @@
-package io.prediction.examples.friendrecommendation
+package org.apache.predictionio.examples.friendrecommendation
 
-import io.prediction.controller._
+import org.apache.predictionio.controller._
 
 object RandomEngineFactory extends IEngineFactory {
   override

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomModel.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomModel.scala b/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomModel.scala
index ab1955d..1e4bf7c 100644
--- a/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomModel.scala
+++ b/examples/experimental/scala-local-friend-recommendation/src/main/scala/RandomModel.scala
@@ -1,4 +1,4 @@
-package io.prediction.examples.friendrecommendation
+package org.apache.predictionio.examples.friendrecommendation
 
 class RandomModel(
   val randomThreshold: Double

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-helloworld/HelloWorld.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-helloworld/HelloWorld.scala b/examples/experimental/scala-local-helloworld/HelloWorld.scala
index 5fa3d68..90caf65 100644
--- a/examples/experimental/scala-local-helloworld/HelloWorld.scala
+++ b/examples/experimental/scala-local-helloworld/HelloWorld.scala
@@ -1,6 +1,6 @@
 package org.sample.helloworld
 
-import io.prediction.controller._
+import org.apache.predictionio.controller._
 
 import scala.io.Source
 import scala.collection.immutable.HashMap

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-helloworld/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-helloworld/build.sbt b/examples/experimental/scala-local-helloworld/build.sbt
index 8d4f042..579a8ee 100644
--- a/examples/experimental/scala-local-helloworld/build.sbt
+++ b/examples/experimental/scala-local-helloworld/build.sbt
@@ -7,6 +7,6 @@ name := "example-scala-local-helloworld"
 organization := "org.sample"
 
 libraryDependencies ++= Seq(
-  "io.prediction" %% "core" % "0.9.1" % "provided",
-  "io.prediction" %% "data" % "0.9.1" % "provided",
+  "org.apache.predictionio" %% "core" % "0.9.1" % "provided",
+  "org.apache.predictionio" %% "data" % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core" % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-movielens-evaluation/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-movielens-evaluation/build.sbt b/examples/experimental/scala-local-movielens-evaluation/build.sbt
index 8e8417a..156bab7 100644
--- a/examples/experimental/scala-local-movielens-evaluation/build.sbt
+++ b/examples/experimental/scala-local-movielens-evaluation/build.sbt
@@ -10,6 +10,6 @@ organization := "myorg"
 version := "0.0.1-SNAPSHOT"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
-  "io.prediction"    %% "engines"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "engines"          % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-movielens-evaluation/src/main/scala/Evaluation.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-movielens-evaluation/src/main/scala/Evaluation.scala b/examples/experimental/scala-local-movielens-evaluation/src/main/scala/Evaluation.scala
index 0186903..e5a8061 100644
--- a/examples/experimental/scala-local-movielens-evaluation/src/main/scala/Evaluation.scala
+++ b/examples/experimental/scala-local-movielens-evaluation/src/main/scala/Evaluation.scala
@@ -1,18 +1,18 @@
-package io.prediction.examples.mlc
+package org.apache.predictionio.examples.mlc
 
-import io.prediction.engines.itemrank.PreparatorParams
-import io.prediction.engines.itemrank.EventsDataSourceParams
-import io.prediction.engines.itemrank.ItemRankEngine
-import io.prediction.engines.itemrank.ItemRankDetailedEvaluator
-import io.prediction.engines.itemrank.DetailedEvaluatorParams
-import io.prediction.engines.itemrank.MeasureType
-import io.prediction.engines.itemrank.mahout.ItemBasedAlgoParams
-import io.prediction.engines.base.AttributeNames
-import io.prediction.engines.base.EventsSlidingEvalParams
-import io.prediction.engines.base.BinaryRatingParams
-import io.prediction.controller.WorkflowParams
-import io.prediction.controller.Workflow
-import io.prediction.controller.EngineParams
+import org.apache.predictionio.engines.itemrank.PreparatorParams
+import org.apache.predictionio.engines.itemrank.EventsDataSourceParams
+import org.apache.predictionio.engines.itemrank.ItemRankEngine
+import org.apache.predictionio.engines.itemrank.ItemRankDetailedEvaluator
+import org.apache.predictionio.engines.itemrank.DetailedEvaluatorParams
+import org.apache.predictionio.engines.itemrank.MeasureType
+import org.apache.predictionio.engines.itemrank.mahout.ItemBasedAlgoParams
+import org.apache.predictionio.engines.base.AttributeNames
+import org.apache.predictionio.engines.base.EventsSlidingEvalParams
+import org.apache.predictionio.engines.base.BinaryRatingParams
+import org.apache.predictionio.controller.WorkflowParams
+import org.apache.predictionio.controller.Workflow
+import org.apache.predictionio.controller.EngineParams
 
 import com.github.nscala_time.time.Imports._
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-movielens-evaluation/src/main/scala/ItemRecEvaluation.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-movielens-evaluation/src/main/scala/ItemRecEvaluation.scala b/examples/experimental/scala-local-movielens-evaluation/src/main/scala/ItemRecEvaluation.scala
index bc48cbd..c8fa643 100644
--- a/examples/experimental/scala-local-movielens-evaluation/src/main/scala/ItemRecEvaluation.scala
+++ b/examples/experimental/scala-local-movielens-evaluation/src/main/scala/ItemRecEvaluation.scala
@@ -1,19 +1,19 @@
-package io.prediction.examples.mlc
+package org.apache.predictionio.examples.mlc
 
-import io.prediction.engines.itemrec.ItemRecEngine
-import io.prediction.engines.itemrec.EventsDataSourceParams
-import io.prediction.engines.itemrec.PreparatorParams
-import io.prediction.engines.itemrec.NCItemBasedAlgorithmParams
-import io.prediction.engines.itemrec.EvalParams
-import io.prediction.engines.itemrec.ItemRecEvaluator
-import io.prediction.engines.itemrec.ItemRecEvaluatorParams
-import io.prediction.engines.itemrec.MeasureType
-import io.prediction.engines.base.EventsSlidingEvalParams
-import io.prediction.engines.base.BinaryRatingParams
+import org.apache.predictionio.engines.itemrec.ItemRecEngine
+import org.apache.predictionio.engines.itemrec.EventsDataSourceParams
+import org.apache.predictionio.engines.itemrec.PreparatorParams
+import org.apache.predictionio.engines.itemrec.NCItemBasedAlgorithmParams
+import org.apache.predictionio.engines.itemrec.EvalParams
+import org.apache.predictionio.engines.itemrec.ItemRecEvaluator
+import org.apache.predictionio.engines.itemrec.ItemRecEvaluatorParams
+import org.apache.predictionio.engines.itemrec.MeasureType
+import org.apache.predictionio.engines.base.EventsSlidingEvalParams
+import org.apache.predictionio.engines.base.BinaryRatingParams
 
-import io.prediction.controller.EngineParams
-import io.prediction.controller.Workflow
-import io.prediction.controller.WorkflowParams
+import org.apache.predictionio.controller.EngineParams
+import org.apache.predictionio.controller.Workflow
+import org.apache.predictionio.controller.WorkflowParams
 
 import com.github.nscala_time.time.Imports._
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-movielens-filtering/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-movielens-filtering/build.sbt b/examples/experimental/scala-local-movielens-filtering/build.sbt
index fd490a0..04d59ad 100644
--- a/examples/experimental/scala-local-movielens-filtering/build.sbt
+++ b/examples/experimental/scala-local-movielens-filtering/build.sbt
@@ -10,6 +10,6 @@ organization := "myorg"
 version := "0.0.1-SNAPSHOT"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
-  "io.prediction"    %% "engines"       % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "engines"       % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-movielens-filtering/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-movielens-filtering/src/main/scala/Engine.scala b/examples/experimental/scala-local-movielens-filtering/src/main/scala/Engine.scala
index 89daefd..bc7da4a 100644
--- a/examples/experimental/scala-local-movielens-filtering/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-local-movielens-filtering/src/main/scala/Engine.scala
@@ -1,10 +1,10 @@
 package myorg
 
-import io.prediction.controller.Engine
-import io.prediction.controller.IEngineFactory
-import io.prediction.engines.itemrec.EventsDataSource
-import io.prediction.engines.itemrec.ItemRecPreparator
-import io.prediction.engines.itemrec.NCItemBasedAlgorithm
+import org.apache.predictionio.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.engines.itemrec.EventsDataSource
+import org.apache.predictionio.engines.itemrec.ItemRecPreparator
+import org.apache.predictionio.engines.itemrec.NCItemBasedAlgorithm
 
 object TempFilterEngine extends IEngineFactory {
   def apply() = {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-movielens-filtering/src/main/scala/Filtering.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-movielens-filtering/src/main/scala/Filtering.scala b/examples/experimental/scala-local-movielens-filtering/src/main/scala/Filtering.scala
index 5c201a0..2751426 100644
--- a/examples/experimental/scala-local-movielens-filtering/src/main/scala/Filtering.scala
+++ b/examples/experimental/scala-local-movielens-filtering/src/main/scala/Filtering.scala
@@ -1,9 +1,9 @@
 package myorg
 
-import io.prediction.controller.LServing
-import io.prediction.controller.Params
-import io.prediction.engines.itemrec.Prediction
-import io.prediction.engines.itemrec.Query
+import org.apache.predictionio.controller.LServing
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.engines.itemrec.Prediction
+import org.apache.predictionio.engines.itemrec.Query
 import scala.io.Source
 
 case class TempFilterParams(val filepath: String) extends Params

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-regression/README.md
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-regression/README.md b/examples/experimental/scala-local-regression/README.md
index dc5bdc8..3b98dc1 100644
--- a/examples/experimental/scala-local-regression/README.md
+++ b/examples/experimental/scala-local-regression/README.md
@@ -86,10 +86,10 @@ Running Evaluation Metrics
 To run evaluation metrics, use the following command.
 ```
 $ cd $PIO_HOME/examples/scala-local-regression
-$ ../../bin/pio eval --metrics-class io.prediction.controller.MeanSquareError
+$ ../../bin/pio eval --metrics-class org.apache.predictionio.controller.MeanSquareError
 ```
 Notice the extra required argument `--metrics-class
-io.prediction.controller.MeanSquareError` for the `eval` command. This instructs
+org.apache.predictionio.controller.MeanSquareError` for the `eval` command. This instructs
 PredictionIO to run the specified metrics during evaluation. When you look at
 the console output again, you should be able to see a mean square error
 computed, like the following.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-regression/Run.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-regression/Run.scala b/examples/experimental/scala-local-regression/Run.scala
index 3708a1c..7bbe900 100644
--- a/examples/experimental/scala-local-regression/Run.scala
+++ b/examples/experimental/scala-local-regression/Run.scala
@@ -1,18 +1,18 @@
-package io.prediction.examples.regression.local
-
-import io.prediction.controller.EmptyParams
-import io.prediction.controller.Engine
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.EngineParams
-import io.prediction.controller.LFirstServing
-import io.prediction.controller.LAlgorithm
-import io.prediction.controller.LDataSource
-import io.prediction.controller.LPreparator
-import io.prediction.controller.MeanSquareError
-import io.prediction.controller.Params
-import io.prediction.controller.Utils
-import io.prediction.controller.Workflow
-import io.prediction.controller.WorkflowParams
+package org.apache.predictionio.examples.regression.local
+
+import org.apache.predictionio.controller.EmptyParams
+import org.apache.predictionio.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.EngineParams
+import org.apache.predictionio.controller.LFirstServing
+import org.apache.predictionio.controller.LAlgorithm
+import org.apache.predictionio.controller.LDataSource
+import org.apache.predictionio.controller.LPreparator
+import org.apache.predictionio.controller.MeanSquareError
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.Utils
+import org.apache.predictionio.controller.Workflow
+import org.apache.predictionio.controller.WorkflowParams
 
 import breeze.linalg.DenseMatrix
 import breeze.linalg.DenseVector

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-regression/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-regression/build.sbt b/examples/experimental/scala-local-regression/build.sbt
index 2f99692..67c1977 100644
--- a/examples/experimental/scala-local-regression/build.sbt
+++ b/examples/experimental/scala-local-regression/build.sbt
@@ -4,10 +4,10 @@ assemblySettings
 
 name := "example-scala-local-regression"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.json4s"       %% "json4s-native" % "3.2.10",
   "org.scalanlp"     %% "nak"           % "1.3")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-local-regression/engine.json
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-local-regression/engine.json b/examples/experimental/scala-local-regression/engine.json
index c1818ac..eaa8f14 100644
--- a/examples/experimental/scala-local-regression/engine.json
+++ b/examples/experimental/scala-local-regression/engine.json
@@ -1,7 +1,7 @@
 {
   "id": "default",
   "description": "Default settings",
-  "engineFactory": "io.prediction.examples.regression.local.RegressionEngineFactory",
+  "engineFactory": "org.apache.predictionio.examples.regression.local.RegressionEngineFactory",
   "datasource": {
     "params": {
       "filepath": "../data/lr_data.txt",

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-friend-recommendation/README.md
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-friend-recommendation/README.md b/examples/experimental/scala-parallel-friend-recommendation/README.md
index d5b66be..44f193d 100644
--- a/examples/experimental/scala-parallel-friend-recommendation/README.md
+++ b/examples/experimental/scala-parallel-friend-recommendation/README.md
@@ -14,7 +14,7 @@ Prerequisite: GraphX package.
 
 Parameter Explained
 -------------------
-datasource - graphEdgelistPath : The edge-list passed to GraphX's graph loader. For efficient memory storage of intermediate SimRank score calculations, the vertex ids should be in a contiguous range from 0 to (#Vertex-1). There is a utility function for re-mapping the vertex Id values : io.prediction.examples.pfriendrecommendation.DeltaSimRankRDD.normalizeGraph. 
+datasource - graphEdgelistPath : The edge-list passed to GraphX's graph loader. For efficient memory storage of intermediate SimRank score calculations, the vertex ids should be in a contiguous range from 0 to (#Vertex-1). There is a utility function for re-mapping the vertex Id values : org.apache.predictionio.examples.pfriendrecommendation.DeltaSimRankRDD.normalizeGraph. 
 
 The provided DataSource class uses the GraphLoader provided by GraphX. Graphs can be specified by a tab-separated edge list, where each line specifies one edge.
 The the user can refer to the provided example edge list at `$EXAMPLE_HOME/data/edge_list_small.txt` for a graph specification with 10 vertices and 20 edges.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-friend-recommendation/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-friend-recommendation/build.sbt b/examples/experimental/scala-parallel-friend-recommendation/build.sbt
index 64b0f68..63a1734 100644
--- a/examples/experimental/scala-parallel-friend-recommendation/build.sbt
+++ b/examples/experimental/scala-parallel-friend-recommendation/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-recommendation-custom-preparator"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-graphx"  % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-friend-recommendation/engine-forest.json
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-friend-recommendation/engine-forest.json b/examples/experimental/scala-parallel-friend-recommendation/engine-forest.json
index 67cf332..2820578 100644
--- a/examples/experimental/scala-parallel-friend-recommendation/engine-forest.json
+++ b/examples/experimental/scala-parallel-friend-recommendation/engine-forest.json
@@ -1,7 +1,7 @@
 {
   "id": "SimRankParallelEngine",
   "description": "Engine for graph vertex similarity using SimRank",
-  "engineFactory": "io.prediction.examples.pfriendrecommendation.PSimRankEngineFactory",
+  "engineFactory": "org.apache.predictionio.examples.pfriendrecommendation.PSimRankEngineFactory",
 
   "datasource": {
     "name" : "forest",

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-friend-recommendation/engine.json
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-friend-recommendation/engine.json b/examples/experimental/scala-parallel-friend-recommendation/engine.json
index 229d2eb..413928f 100644
--- a/examples/experimental/scala-parallel-friend-recommendation/engine.json
+++ b/examples/experimental/scala-parallel-friend-recommendation/engine.json
@@ -1,7 +1,7 @@
 {
   "id": "SimRankParallelEngine",
   "description": "Engine for graph vertex similarity using SimRank",
-  "engineFactory": "io.prediction.examples.pfriendrecommendation.PSimRankEngineFactory",
+  "engineFactory": "org.apache.predictionio.examples.pfriendrecommendation.PSimRankEngineFactory",
 
   "datasource": {
     "name" : "default",

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DataSource.scala b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DataSource.scala
index 6b586f4..f12022b 100644
--- a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DataSource.scala
+++ b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
-package io.prediction.examples.pfriendrecommendation
+package org.apache.predictionio.examples.pfriendrecommendation
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DeltaSimRankRDD.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DeltaSimRankRDD.scala b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DeltaSimRankRDD.scala
index f94575f..a290399 100644
--- a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DeltaSimRankRDD.scala
+++ b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DeltaSimRankRDD.scala
@@ -1,4 +1,4 @@
-package io.prediction.examples.pfriendrecommendation
+package org.apache.predictionio.examples.pfriendrecommendation
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Engine.scala
index e189d0f..d984ac5 100644
--- a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
-package io.prediction.examples.pfriendrecommendation
+package org.apache.predictionio.examples.pfriendrecommendation
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   val item1: Long,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Preparator.scala b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Preparator.scala
index cbf647f..869fde2 100644
--- a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Preparator.scala
+++ b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Preparator.scala
@@ -1,15 +1,15 @@
-package io.prediction.examples.pfriendrecommendation
+package org.apache.predictionio.examples.pfriendrecommendation
 
-import io.prediction.controller.EmptyParams
-import io.prediction.controller.PPreparator
-import io.prediction.controller.EmptyPreparatorParams
+import org.apache.predictionio.controller.EmptyParams
+import org.apache.predictionio.controller.PPreparator
+import org.apache.predictionio.controller.EmptyPreparatorParams
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
 
 import scala.io.Source // ADDED
-import io.prediction.controller.Params // ADDED
+import org.apache.predictionio.controller.Params // ADDED
 
  // ADDED CustomPreparatorParams case class
 case class CustomPreparatorParams(

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Sampling.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Sampling.scala b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Sampling.scala
index 623341c..5f1e3ea 100644
--- a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Sampling.scala
+++ b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Sampling.scala
@@ -1,4 +1,4 @@
-package io.prediction.examples.pfriendrecommendation
+package org.apache.predictionio.examples.pfriendrecommendation
 
 import org.apache.spark.SparkContext._
 import org.apache.spark.graphx._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Serving.scala b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Serving.scala
index 84e8992..769280a 100644
--- a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/Serving.scala
@@ -1,7 +1,7 @@
-package io.prediction.examples.pfriendrecommendation
+package org.apache.predictionio.examples.pfriendrecommendation
 
-import io.prediction.controller.LServing
-import io.prediction.controller.EmptyServingParams
+import org.apache.predictionio.controller.LServing
+import org.apache.predictionio.controller.EmptyServingParams
 
 class Serving
   extends LServing[Query, Double] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/SimRankAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/SimRankAlgorithm.scala b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/SimRankAlgorithm.scala
index 29c7cc7..7246cf4 100644
--- a/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/SimRankAlgorithm.scala
+++ b/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/SimRankAlgorithm.scala
@@ -1,6 +1,6 @@
-package io.prediction.examples.pfriendrecommendation
-import io.prediction.controller.PAlgorithm
-import io.prediction.controller.Params
+package org.apache.predictionio.examples.pfriendrecommendation
+import org.apache.predictionio.controller.PAlgorithm
+import org.apache.predictionio.controller.Params
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-cat/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-cat/build.sbt b/examples/experimental/scala-parallel-recommendation-cat/build.sbt
index 9f8187e..c402953 100644
--- a/examples/experimental/scala-parallel-recommendation-cat/build.sbt
+++ b/examples/experimental/scala-parallel-recommendation-cat/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-recommendation-cat"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.8.6" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.8.6" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/ALSAlgorithm.scala b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/ALSAlgorithm.scala
index 0a60f8b..8128fb5 100644
--- a/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/ALSAlgorithm.scala
+++ b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/ALSAlgorithm.scala
@@ -1,10 +1,10 @@
 package org.template.recommendation
 
-import io.prediction.controller.P2LAlgorithm
-import io.prediction.controller.Params
-import io.prediction.data.storage.BiMap
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/DataSource.scala b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/DataSource.scala
index 6a08060..3c4a4ea 100644
--- a/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/DataSource.scala
+++ b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.recommendation
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._



[20/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/core/BasePreparator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/core/BasePreparator.scala b/core/src/main/scala/org/apache/predictionio/core/BasePreparator.scala
new file mode 100644
index 0000000..2075bbb
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/core/BasePreparator.scala
@@ -0,0 +1,42 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.core
+
+import org.apache.predictionio.annotation.DeveloperApi
+import org.apache.spark.SparkContext
+
+/** :: DeveloperApi ::
+  * Base class of all preparator controller classes
+  *
+  * Dev note: Probably will add an extra parameter for ad hoc JSON formatter
+  *
+  * @tparam TD Training data class
+  * @tparam PD Prepared data class
+  */
+@DeveloperApi
+abstract class BasePreparator[TD, PD]
+  extends AbstractDoer {
+  /** :: DeveloperApi ::
+    * Engine developers should not use this directly. This is called by training
+    * workflow to prepare data before handing it over to algorithm
+    *
+    * @param sc Spark context
+    * @param td Training data
+    * @return Prepared data
+    */
+  @DeveloperApi
+  def prepareBase(sc: SparkContext, td: TD): PD
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/core/BaseServing.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/core/BaseServing.scala b/core/src/main/scala/org/apache/predictionio/core/BaseServing.scala
new file mode 100644
index 0000000..bf1c842
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/core/BaseServing.scala
@@ -0,0 +1,51 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.core
+
+import org.apache.predictionio.annotation.DeveloperApi
+import org.apache.predictionio.annotation.Experimental
+
+/** :: DeveloperApi ::
+  * Base class of all serving controller classes
+  *
+  * @tparam Q Query class
+  * @tparam P Predicted result class
+  */
+@DeveloperApi
+abstract class BaseServing[Q, P]
+  extends AbstractDoer {
+  /** :: Experimental ::
+    * Engine developers should not use this directly. This is called by serving
+    * layer to supplement process the query before sending it to algorithms.
+    *
+    * @param q Query
+    * @return A supplement Query
+    */
+  @Experimental
+  def supplementBase(q: Q): Q
+
+  /** :: DeveloperApi ::
+    * Engine developers should not use this directly. This is called by serving
+    * layer to combine multiple predicted results from multiple algorithms, and
+    * custom business logic before serving to the end user.
+    *
+    * @param q Query
+    * @param ps List of predicted results
+    * @return A single predicted result
+    */
+  @DeveloperApi
+  def serveBase(q: Q, ps: Seq[P]): P
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/core/package.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/core/package.scala b/core/src/main/scala/org/apache/predictionio/core/package.scala
new file mode 100644
index 0000000..0f3098c
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/core/package.scala
@@ -0,0 +1,21 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio
+
+/** Core base classes of PredictionIO controller components. Engine developers
+  * should not use these directly.
+  */
+package object core {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/package.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/package.scala b/core/src/main/scala/org/apache/predictionio/package.scala
new file mode 100644
index 0000000..7b1989f
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/package.scala
@@ -0,0 +1,19 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache
+
+/** PredictionIO Scala API */
+package object predictionio {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/CoreWorkflow.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/CoreWorkflow.scala b/core/src/main/scala/org/apache/predictionio/workflow/CoreWorkflow.scala
new file mode 100644
index 0000000..6a27e87
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/CoreWorkflow.scala
@@ -0,0 +1,163 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+import org.apache.predictionio.controller.EngineParams
+import org.apache.predictionio.controller.Evaluation
+import org.apache.predictionio.core.BaseEngine
+import org.apache.predictionio.core.BaseEvaluator
+import org.apache.predictionio.core.BaseEvaluatorResult
+import org.apache.predictionio.data.storage.EngineInstance
+import org.apache.predictionio.data.storage.EvaluationInstance
+import org.apache.predictionio.data.storage.Model
+import org.apache.predictionio.data.storage.Storage
+
+import com.github.nscala_time.time.Imports.DateTime
+import grizzled.slf4j.Logger
+
+import scala.language.existentials
+
+/** CoreWorkflow handles PredictionIO metadata and environment variables of
+  * training and evaluation.
+  */
+object CoreWorkflow {
+  @transient lazy val logger = Logger[this.type]
+  @transient lazy val engineInstances = Storage.getMetaDataEngineInstances
+  @transient lazy val evaluationInstances =
+    Storage.getMetaDataEvaluationInstances()
+
+  def runTrain[EI, Q, P, A](
+      engine: BaseEngine[EI, Q, P, A],
+      engineParams: EngineParams,
+      engineInstance: EngineInstance,
+      env: Map[String, String] = WorkflowUtils.pioEnvVars,
+      params: WorkflowParams = WorkflowParams()) {
+    logger.debug("Starting SparkContext")
+    val mode = "training"
+    WorkflowUtils.checkUpgrade(mode, engineInstance.engineFactory)
+
+    val batch = if (params.batch.nonEmpty) {
+      s"{engineInstance.engineFactory} (${params.batch}})"
+    } else {
+      engineInstance.engineFactory
+    }
+    val sc = WorkflowContext(
+      batch,
+      env,
+      params.sparkEnv,
+      mode.capitalize)
+
+    try {
+
+      val models: Seq[Any] = engine.train(
+        sc = sc,
+        engineParams = engineParams,
+        engineInstanceId = engineInstance.id,
+        params = params
+      )
+
+      val instanceId = Storage.getMetaDataEngineInstances
+
+      val kryo = KryoInstantiator.newKryoInjection
+
+      logger.info("Inserting persistent model")
+      Storage.getModelDataModels.insert(Model(
+        id = engineInstance.id,
+        models = kryo(models)))
+
+      logger.info("Updating engine instance")
+      val engineInstances = Storage.getMetaDataEngineInstances
+      engineInstances.update(engineInstance.copy(
+        status = "COMPLETED",
+        endTime = DateTime.now
+        ))
+
+      logger.info("Training completed successfully.")
+    } catch {
+      case e @(
+          _: StopAfterReadInterruption |
+          _: StopAfterPrepareInterruption) => {
+        logger.info(s"Training interrupted by $e.")
+      }
+    } finally {
+      logger.debug("Stopping SparkContext")
+      sc.stop()
+    }
+  }
+
+  def runEvaluation[EI, Q, P, A, R <: BaseEvaluatorResult](
+      evaluation: Evaluation,
+      engine: BaseEngine[EI, Q, P, A],
+      engineParamsList: Seq[EngineParams],
+      evaluationInstance: EvaluationInstance,
+      evaluator: BaseEvaluator[EI, Q, P, A, R],
+      env: Map[String, String] = WorkflowUtils.pioEnvVars,
+      params: WorkflowParams = WorkflowParams()) {
+    logger.info("runEvaluation started")
+    logger.debug("Start SparkContext")
+
+    val mode = "evaluation"
+
+    WorkflowUtils.checkUpgrade(mode, engine.getClass.getName)
+
+    val batch = if (params.batch.nonEmpty) {
+      s"{evaluation.getClass.getName} (${params.batch}})"
+    } else {
+      evaluation.getClass.getName
+    }
+    val sc = WorkflowContext(
+      batch,
+      env,
+      params.sparkEnv,
+      mode.capitalize)
+    val evaluationInstanceId = evaluationInstances.insert(evaluationInstance)
+
+    logger.info(s"Starting evaluation instance ID: $evaluationInstanceId")
+
+    val evaluatorResult: BaseEvaluatorResult = EvaluationWorkflow.runEvaluation(
+      sc,
+      evaluation,
+      engine,
+      engineParamsList,
+      evaluator,
+      params)
+
+    if (evaluatorResult.noSave) {
+      logger.info(s"This evaluation result is not inserted into database: $evaluatorResult")
+    } else {
+      val evaluatedEvaluationInstance = evaluationInstance.copy(
+        status = "EVALCOMPLETED",
+        id = evaluationInstanceId,
+        endTime = DateTime.now,
+        evaluatorResults = evaluatorResult.toOneLiner,
+        evaluatorResultsHTML = evaluatorResult.toHTML,
+        evaluatorResultsJSON = evaluatorResult.toJSON
+      )
+
+      logger.info(s"Updating evaluation instance with result: $evaluatorResult")
+
+      evaluationInstances.update(evaluatedEvaluationInstance)
+    }
+
+    logger.debug("Stop SparkContext")
+
+    sc.stop()
+
+    logger.info("runEvaluation completed")
+  }
+}
+
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/CreateServer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/CreateServer.scala b/core/src/main/scala/org/apache/predictionio/workflow/CreateServer.scala
new file mode 100644
index 0000000..d4f6323
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/CreateServer.scala
@@ -0,0 +1,737 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+import java.io.PrintWriter
+import java.io.Serializable
+import java.io.StringWriter
+import java.util.concurrent.TimeUnit
+
+import akka.actor._
+import akka.event.Logging
+import akka.io.IO
+import akka.pattern.ask
+import akka.util.Timeout
+import com.github.nscala_time.time.Imports.DateTime
+import com.twitter.bijection.Injection
+import com.twitter.chill.KryoBase
+import com.twitter.chill.KryoInjection
+import com.twitter.chill.ScalaKryoInstantiator
+import com.typesafe.config.ConfigFactory
+import de.javakaffee.kryoserializers.SynchronizedCollectionsSerializer
+import grizzled.slf4j.Logging
+import org.apache.predictionio.authentication.KeyAuthentication
+import org.apache.predictionio.configuration.SSLConfiguration
+import org.apache.predictionio.controller.Engine
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.Utils
+import org.apache.predictionio.controller.WithPrId
+import org.apache.predictionio.core.BaseAlgorithm
+import org.apache.predictionio.core.BaseServing
+import org.apache.predictionio.core.Doer
+import org.apache.predictionio.data.storage.EngineInstance
+import org.apache.predictionio.data.storage.EngineManifest
+import org.apache.predictionio.data.storage.Storage
+import org.apache.predictionio.workflow.JsonExtractorOption.JsonExtractorOption
+import org.json4s._
+import org.json4s.native.JsonMethods._
+import org.json4s.native.Serialization.write
+import spray.can.Http
+import spray.can.server.ServerSettings
+import spray.http.MediaTypes._
+import spray.http._
+import spray.httpx.Json4sSupport
+import spray.routing._
+import spray.routing.authentication.{UserPass, BasicAuth}
+
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.concurrent.Future
+import scala.concurrent.duration._
+import scala.concurrent.future
+import scala.language.existentials
+import scala.util.Failure
+import scala.util.Random
+import scala.util.Success
+import scalaj.http.HttpOptions
+
+class KryoInstantiator(classLoader: ClassLoader) extends ScalaKryoInstantiator {
+  override def newKryo(): KryoBase = {
+    val kryo = super.newKryo()
+    kryo.setClassLoader(classLoader)
+    SynchronizedCollectionsSerializer.registerSerializers(kryo)
+    kryo
+  }
+}
+
+object KryoInstantiator extends Serializable {
+  def newKryoInjection : Injection[Any, Array[Byte]] = {
+    val kryoInstantiator = new KryoInstantiator(getClass.getClassLoader)
+    KryoInjection.instance(kryoInstantiator)
+  }
+}
+
+case class ServerConfig(
+  batch: String = "",
+  engineInstanceId: String = "",
+  engineId: Option[String] = None,
+  engineVersion: Option[String] = None,
+  engineVariant: String = "",
+  env: Option[String] = None,
+  ip: String = "0.0.0.0",
+  port: Int = 8000,
+  feedback: Boolean = false,
+  eventServerIp: String = "0.0.0.0",
+  eventServerPort: Int = 7070,
+  accessKey: Option[String] = None,
+  logUrl: Option[String] = None,
+  logPrefix: Option[String] = None,
+  logFile: Option[String] = None,
+  verbose: Boolean = false,
+  debug: Boolean = false,
+  jsonExtractor: JsonExtractorOption = JsonExtractorOption.Both)
+
+case class StartServer()
+case class BindServer()
+case class StopServer()
+case class ReloadServer()
+case class UpgradeCheck()
+
+
+object CreateServer extends Logging {
+  val actorSystem = ActorSystem("pio-server")
+  val engineInstances = Storage.getMetaDataEngineInstances
+  val engineManifests = Storage.getMetaDataEngineManifests
+  val modeldata = Storage.getModelDataModels
+
+  def main(args: Array[String]): Unit = {
+    val parser = new scopt.OptionParser[ServerConfig]("CreateServer") {
+      opt[String]("batch") action { (x, c) =>
+        c.copy(batch = x)
+      } text("Batch label of the deployment.")
+      opt[String]("engineId") action { (x, c) =>
+        c.copy(engineId = Some(x))
+      } text("Engine ID.")
+      opt[String]("engineVersion") action { (x, c) =>
+        c.copy(engineVersion = Some(x))
+      } text("Engine version.")
+      opt[String]("engine-variant") required() action { (x, c) =>
+        c.copy(engineVariant = x)
+      } text("Engine variant JSON.")
+      opt[String]("ip") action { (x, c) =>
+        c.copy(ip = x)
+      }
+      opt[String]("env") action { (x, c) =>
+        c.copy(env = Some(x))
+      } text("Comma-separated list of environmental variables (in 'FOO=BAR' " +
+        "format) to pass to the Spark execution environment.")
+      opt[Int]("port") action { (x, c) =>
+        c.copy(port = x)
+      } text("Port to bind to (default: 8000).")
+      opt[String]("engineInstanceId") required() action { (x, c) =>
+        c.copy(engineInstanceId = x)
+      } text("Engine instance ID.")
+      opt[Unit]("feedback") action { (_, c) =>
+        c.copy(feedback = true)
+      } text("Enable feedback loop to event server.")
+      opt[String]("event-server-ip") action { (x, c) =>
+        c.copy(eventServerIp = x)
+      }
+      opt[Int]("event-server-port") action { (x, c) =>
+        c.copy(eventServerPort = x)
+      } text("Event server port. Default: 7070")
+      opt[String]("accesskey") action { (x, c) =>
+        c.copy(accessKey = Some(x))
+      } text("Event server access key.")
+      opt[String]("log-url") action { (x, c) =>
+        c.copy(logUrl = Some(x))
+      }
+      opt[String]("log-prefix") action { (x, c) =>
+        c.copy(logPrefix = Some(x))
+      }
+      opt[String]("log-file") action { (x, c) =>
+        c.copy(logFile = Some(x))
+      }
+      opt[Unit]("verbose") action { (x, c) =>
+        c.copy(verbose = true)
+      } text("Enable verbose output.")
+      opt[Unit]("debug") action { (x, c) =>
+        c.copy(debug = true)
+      } text("Enable debug output.")
+      opt[String]("json-extractor") action { (x, c) =>
+        c.copy(jsonExtractor = JsonExtractorOption.withName(x))
+      }
+    }
+
+    parser.parse(args, ServerConfig()) map { sc =>
+      WorkflowUtils.modifyLogging(sc.verbose)
+      engineInstances.get(sc.engineInstanceId) map { engineInstance =>
+        val engineId = sc.engineId.getOrElse(engineInstance.engineId)
+        val engineVersion = sc.engineVersion.getOrElse(
+          engineInstance.engineVersion)
+        engineManifests.get(engineId, engineVersion) map { manifest =>
+          val engineFactoryName = engineInstance.engineFactory
+          val upgrade = actorSystem.actorOf(Props(
+            classOf[UpgradeActor],
+            engineFactoryName))
+          actorSystem.scheduler.schedule(
+            0.seconds,
+            1.days,
+            upgrade,
+            UpgradeCheck())
+          val master = actorSystem.actorOf(Props(
+            classOf[MasterActor],
+            sc,
+            engineInstance,
+            engineFactoryName,
+            manifest),
+          "master")
+          implicit val timeout = Timeout(5.seconds)
+          master ? StartServer()
+          actorSystem.awaitTermination
+        } getOrElse {
+          error(s"Invalid engine ID or version. Aborting server.")
+        }
+      } getOrElse {
+        error(s"Invalid engine instance ID. Aborting server.")
+      }
+    }
+  }
+
+  def createServerActorWithEngine[TD, EIN, PD, Q, P, A](
+    sc: ServerConfig,
+    engineInstance: EngineInstance,
+    engine: Engine[TD, EIN, PD, Q, P, A],
+    engineLanguage: EngineLanguage.Value,
+    manifest: EngineManifest): ActorRef = {
+
+    val engineParams = engine.engineInstanceToEngineParams(engineInstance, sc.jsonExtractor)
+
+    val kryo = KryoInstantiator.newKryoInjection
+
+    val modelsFromEngineInstance =
+      kryo.invert(modeldata.get(engineInstance.id).get.models).get.
+      asInstanceOf[Seq[Any]]
+
+    val batch = if (engineInstance.batch.nonEmpty) {
+      s"${engineInstance.engineFactory} (${engineInstance.batch})"
+    } else {
+      engineInstance.engineFactory
+    }
+
+    val sparkContext = WorkflowContext(
+      batch = batch,
+      executorEnv = engineInstance.env,
+      mode = "Serving",
+      sparkEnv = engineInstance.sparkConf)
+
+    val models = engine.prepareDeploy(
+      sparkContext,
+      engineParams,
+      engineInstance.id,
+      modelsFromEngineInstance,
+      params = WorkflowParams()
+    )
+
+    val algorithms = engineParams.algorithmParamsList.map { case (n, p) =>
+      Doer(engine.algorithmClassMap(n), p)
+    }
+
+    val servingParamsWithName = engineParams.servingParams
+
+    val serving = Doer(engine.servingClassMap(servingParamsWithName._1),
+      servingParamsWithName._2)
+
+    actorSystem.actorOf(
+      Props(
+        classOf[ServerActor[Q, P]],
+        sc,
+        engineInstance,
+        engine,
+        engineLanguage,
+        manifest,
+        engineParams.dataSourceParams._2,
+        engineParams.preparatorParams._2,
+        algorithms,
+        engineParams.algorithmParamsList.map(_._2),
+        models,
+        serving,
+        engineParams.servingParams._2))
+  }
+}
+
+class UpgradeActor(engineClass: String) extends Actor {
+  val log = Logging(context.system, this)
+  implicit val system = context.system
+  def receive: Actor.Receive = {
+    case x: UpgradeCheck =>
+      WorkflowUtils.checkUpgrade("deployment", engineClass)
+  }
+}
+
+class MasterActor (
+    sc: ServerConfig,
+    engineInstance: EngineInstance,
+    engineFactoryName: String,
+    manifest: EngineManifest) extends Actor with SSLConfiguration with KeyAuthentication {
+  val log = Logging(context.system, this)
+  implicit val system = context.system
+  var sprayHttpListener: Option[ActorRef] = None
+  var currentServerActor: Option[ActorRef] = None
+  var retry = 3
+
+  def undeploy(ip: String, port: Int): Unit = {
+    val serverUrl = s"https://${ip}:${port}"
+    log.info(
+      s"Undeploying any existing engine instance at $serverUrl")
+    try {
+      val code = scalaj.http.Http(s"$serverUrl/stop")
+        .option(HttpOptions.allowUnsafeSSL)
+        .param(ServerKey.param, ServerKey.get)
+        .method("POST").asString.code
+      code match {
+        case 200 => Unit
+        case 404 => log.error(
+          s"Another process is using $serverUrl. Unable to undeploy.")
+        case _ => log.error(
+          s"Another process is using $serverUrl, or an existing " +
+          s"engine server is not responding properly (HTTP $code). " +
+          "Unable to undeploy.")
+      }
+    } catch {
+      case e: java.net.ConnectException =>
+        log.warning(s"Nothing at $serverUrl")
+      case _: Throwable =>
+        log.error("Another process might be occupying " +
+          s"$ip:$port. Unable to undeploy.")
+    }
+  }
+
+  def receive: Actor.Receive = {
+    case x: StartServer =>
+      val actor = createServerActor(
+        sc,
+        engineInstance,
+        engineFactoryName,
+        manifest)
+      currentServerActor = Some(actor)
+      undeploy(sc.ip, sc.port)
+      self ! BindServer()
+    case x: BindServer =>
+      currentServerActor map { actor =>
+        val settings = ServerSettings(system)
+        IO(Http) ! Http.Bind(
+          actor,
+          interface = sc.ip,
+          port = sc.port,
+          settings = Some(settings.copy(sslEncryption = true)))
+      } getOrElse {
+        log.error("Cannot bind a non-existing server backend.")
+      }
+    case x: StopServer =>
+      log.info(s"Stop server command received.")
+      sprayHttpListener.map { l =>
+        log.info("Server is shutting down.")
+        l ! Http.Unbind(5.seconds)
+        system.shutdown
+      } getOrElse {
+        log.warning("No active server is running.")
+      }
+    case x: ReloadServer =>
+      log.info("Reload server command received.")
+      val latestEngineInstance =
+        CreateServer.engineInstances.getLatestCompleted(
+          manifest.id,
+          manifest.version,
+          engineInstance.engineVariant)
+      latestEngineInstance map { lr =>
+        val actor = createServerActor(sc, lr, engineFactoryName, manifest)
+        sprayHttpListener.map { l =>
+          l ! Http.Unbind(5.seconds)
+          val settings = ServerSettings(system)
+          IO(Http) ! Http.Bind(
+            actor,
+            interface = sc.ip,
+            port = sc.port,
+            settings = Some(settings.copy(sslEncryption = true)))
+          currentServerActor.get ! Kill
+          currentServerActor = Some(actor)
+        } getOrElse {
+          log.warning("No active server is running. Abort reloading.")
+        }
+      } getOrElse {
+        log.warning(
+          s"No latest completed engine instance for ${manifest.id} " +
+          s"${manifest.version}. Abort reloading.")
+      }
+    case x: Http.Bound =>
+      val serverUrl = s"https://${sc.ip}:${sc.port}"
+      log.info(s"Engine is deployed and running. Engine API is live at ${serverUrl}.")
+      sprayHttpListener = Some(sender)
+    case x: Http.CommandFailed =>
+      if (retry > 0) {
+        retry -= 1
+        log.error(s"Bind failed. Retrying... ($retry more trial(s))")
+        context.system.scheduler.scheduleOnce(1.seconds) {
+          self ! BindServer()
+        }
+      } else {
+        log.error("Bind failed. Shutting down.")
+        system.shutdown
+      }
+  }
+
+  def createServerActor(
+      sc: ServerConfig,
+      engineInstance: EngineInstance,
+      engineFactoryName: String,
+      manifest: EngineManifest): ActorRef = {
+    val (engineLanguage, engineFactory) =
+      WorkflowUtils.getEngine(engineFactoryName, getClass.getClassLoader)
+    val engine = engineFactory()
+
+    // EngineFactory return a base engine, which may not be deployable.
+    if (!engine.isInstanceOf[Engine[_,_,_,_,_,_]]) {
+      throw new NoSuchMethodException(s"Engine $engine is not deployable")
+    }
+
+    val deployableEngine = engine.asInstanceOf[Engine[_,_,_,_,_,_]]
+
+    CreateServer.createServerActorWithEngine(
+      sc,
+      engineInstance,
+      // engine,
+      deployableEngine,
+      engineLanguage,
+      manifest)
+  }
+}
+
+class ServerActor[Q, P](
+    val args: ServerConfig,
+    val engineInstance: EngineInstance,
+    val engine: Engine[_, _, _, Q, P, _],
+    val engineLanguage: EngineLanguage.Value,
+    val manifest: EngineManifest,
+    val dataSourceParams: Params,
+    val preparatorParams: Params,
+    val algorithms: Seq[BaseAlgorithm[_, _, Q, P]],
+    val algorithmsParams: Seq[Params],
+    val models: Seq[Any],
+    val serving: BaseServing[Q, P],
+    val servingParams: Params) extends Actor with HttpService with KeyAuthentication {
+  val serverStartTime = DateTime.now
+  val log = Logging(context.system, this)
+
+  var requestCount: Int = 0
+  var avgServingSec: Double = 0.0
+  var lastServingSec: Double = 0.0
+
+  /** The following is required by HttpService */
+  def actorRefFactory: ActorContext = context
+
+  implicit val timeout = Timeout(5, TimeUnit.SECONDS)
+  val pluginsActorRef =
+    context.actorOf(Props(classOf[PluginsActor], args.engineVariant), "PluginsActor")
+  val pluginContext = EngineServerPluginContext(log, args.engineVariant)
+
+  def receive: Actor.Receive = runRoute(myRoute)
+
+  val feedbackEnabled = if (args.feedback) {
+    if (args.accessKey.isEmpty) {
+      log.error("Feedback loop cannot be enabled because accessKey is empty.")
+      false
+    } else {
+      true
+    }
+  } else false
+
+  def remoteLog(logUrl: String, logPrefix: String, message: String): Unit = {
+    implicit val formats = Utils.json4sDefaultFormats
+    try {
+      scalaj.http.Http(logUrl).postData(
+        logPrefix + write(Map(
+          "engineInstance" -> engineInstance,
+          "message" -> message))).asString
+    } catch {
+      case e: Throwable =>
+        log.error(s"Unable to send remote log: ${e.getMessage}")
+    }
+  }
+
+  def getStackTraceString(e: Throwable): String = {
+    val writer = new StringWriter()
+    val printWriter = new PrintWriter(writer)
+    e.printStackTrace(printWriter)
+    writer.toString
+  }
+
+  val myRoute =
+    path("") {
+      get {
+        respondWithMediaType(`text/html`) {
+          detach() {
+            complete {
+              html.index(
+                args,
+                manifest,
+                engineInstance,
+                algorithms.map(_.toString),
+                algorithmsParams.map(_.toString),
+                models.map(_.toString),
+                dataSourceParams.toString,
+                preparatorParams.toString,
+                servingParams.toString,
+                serverStartTime,
+                feedbackEnabled,
+                args.eventServerIp,
+                args.eventServerPort,
+                requestCount,
+                avgServingSec,
+                lastServingSec
+              ).toString
+            }
+          }
+        }
+      }
+    } ~
+    path("queries.json") {
+      post {
+        detach() {
+          entity(as[String]) { queryString =>
+            try {
+              val servingStartTime = DateTime.now
+              val jsonExtractorOption = args.jsonExtractor
+              val queryTime = DateTime.now
+              // Extract Query from Json
+              val query = JsonExtractor.extract(
+                jsonExtractorOption,
+                queryString,
+                algorithms.head.queryClass,
+                algorithms.head.querySerializer,
+                algorithms.head.gsonTypeAdapterFactories
+              )
+              val queryJValue = JsonExtractor.toJValue(
+                jsonExtractorOption,
+                query,
+                algorithms.head.querySerializer,
+                algorithms.head.gsonTypeAdapterFactories)
+              // Deploy logic. First call Serving.supplement, then Algo.predict,
+              // finally Serving.serve.
+              val supplementedQuery = serving.supplementBase(query)
+              // TODO: Parallelize the following.
+              val predictions = algorithms.zipWithIndex.map { case (a, ai) =>
+                a.predictBase(models(ai), supplementedQuery)
+              }
+              // Notice that it is by design to call Serving.serve with the
+              // *original* query.
+              val prediction = serving.serveBase(query, predictions)
+              val predictionJValue = JsonExtractor.toJValue(
+                jsonExtractorOption,
+                prediction,
+                algorithms.head.querySerializer,
+                algorithms.head.gsonTypeAdapterFactories)
+              /** Handle feedback to Event Server
+                * Send the following back to the Event Server
+                * - appId
+                * - engineInstanceId
+                * - query
+                * - prediction
+                * - prId
+                */
+              val result = if (feedbackEnabled) {
+                implicit val formats =
+                  algorithms.headOption map { alg =>
+                    alg.querySerializer
+                  } getOrElse {
+                    Utils.json4sDefaultFormats
+                  }
+                // val genPrId = Random.alphanumeric.take(64).mkString
+                def genPrId: String = Random.alphanumeric.take(64).mkString
+                val newPrId = prediction match {
+                  case id: WithPrId =>
+                    val org = id.prId
+                    if (org.isEmpty) genPrId else org
+                  case _ => genPrId
+                }
+
+                // also save Query's prId as prId of this pio_pr predict events
+                val queryPrId =
+                  query match {
+                    case id: WithPrId =>
+                      Map("prId" -> id.prId)
+                    case _ =>
+                      Map()
+                  }
+                val data = Map(
+                  // "appId" -> dataSourceParams.asInstanceOf[ParamsWithAppId].appId,
+                  "event" -> "predict",
+                  "eventTime" -> queryTime.toString(),
+                  "entityType" -> "pio_pr", // prediction result
+                  "entityId" -> newPrId,
+                  "properties" -> Map(
+                    "engineInstanceId" -> engineInstance.id,
+                    "query" -> query,
+                    "prediction" -> prediction)) ++ queryPrId
+                // At this point args.accessKey should be Some(String).
+                val accessKey = args.accessKey.getOrElse("")
+                val f: Future[Int] = future {
+                  scalaj.http.Http(
+                    s"http://${args.eventServerIp}:${args.eventServerPort}/" +
+                    s"events.json?accessKey=$accessKey").postData(
+                    write(data)).header(
+                    "content-type", "application/json").asString.code
+                }
+                f onComplete {
+                  case Success(code) => {
+                    if (code != 201) {
+                      log.error(s"Feedback event failed. Status code: $code."
+                        + s"Data: ${write(data)}.")
+                    }
+                  }
+                  case Failure(t) => {
+                    log.error(s"Feedback event failed: ${t.getMessage}") }
+                }
+                // overwrite prId in predictedResult
+                // - if it is WithPrId,
+                //   then overwrite with new prId
+                // - if it is not WithPrId, no prId injection
+                if (prediction.isInstanceOf[WithPrId]) {
+                  predictionJValue merge parse(s"""{"prId" : "$newPrId"}""")
+                } else {
+                  predictionJValue
+                }
+              } else predictionJValue
+
+              val pluginResult =
+                pluginContext.outputBlockers.values.foldLeft(result) { case (r, p) =>
+                  p.process(engineInstance, queryJValue, r, pluginContext)
+                }
+
+              // Bookkeeping
+              val servingEndTime = DateTime.now
+              lastServingSec =
+                (servingEndTime.getMillis - servingStartTime.getMillis) / 1000.0
+              avgServingSec =
+                ((avgServingSec * requestCount) + lastServingSec) /
+                (requestCount + 1)
+              requestCount += 1
+
+              respondWithMediaType(`application/json`) {
+                complete(compact(render(pluginResult)))
+              }
+            } catch {
+              case e: MappingException =>
+                log.error(
+                  s"Query '$queryString' is invalid. Reason: ${e.getMessage}")
+                args.logUrl map { url =>
+                  remoteLog(
+                    url,
+                    args.logPrefix.getOrElse(""),
+                    s"Query:\n$queryString\n\nStack Trace:\n" +
+                      s"${getStackTraceString(e)}\n\n")
+                  }
+                complete(StatusCodes.BadRequest, e.getMessage)
+              case e: Throwable =>
+                val msg = s"Query:\n$queryString\n\nStack Trace:\n" +
+                  s"${getStackTraceString(e)}\n\n"
+                log.error(msg)
+                args.logUrl map { url =>
+                  remoteLog(
+                    url,
+                    args.logPrefix.getOrElse(""),
+                    msg)
+                  }
+                complete(StatusCodes.InternalServerError, msg)
+            }
+          }
+        }
+      }
+    } ~
+    path("reload") {
+      authenticate(withAccessKeyFromFile) { request =>
+        post {
+          complete {
+            context.actorSelection("/user/master") ! ReloadServer()
+            "Reloading..."
+          }
+        }
+      }
+    } ~
+    path("stop") {
+      authenticate(withAccessKeyFromFile) { request =>
+        post {
+          complete {
+            context.system.scheduler.scheduleOnce(1.seconds) {
+              context.actorSelection("/user/master") ! StopServer()
+            }
+            "Shutting down..."
+          }
+        }
+      }
+    } ~
+    pathPrefix("assets") {
+      getFromResourceDirectory("assets")
+    } ~
+    path("plugins.json") {
+      import EngineServerJson4sSupport._
+      get {
+        respondWithMediaType(MediaTypes.`application/json`) {
+          complete {
+            Map("plugins" -> Map(
+              "outputblockers" -> pluginContext.outputBlockers.map { case (n, p) =>
+                n -> Map(
+                  "name" -> p.pluginName,
+                  "description" -> p.pluginDescription,
+                  "class" -> p.getClass.getName,
+                  "params" -> pluginContext.pluginParams(p.pluginName))
+              },
+              "outputsniffers" -> pluginContext.outputSniffers.map { case (n, p) =>
+                n -> Map(
+                  "name" -> p.pluginName,
+                  "description" -> p.pluginDescription,
+                  "class" -> p.getClass.getName,
+                  "params" -> pluginContext.pluginParams(p.pluginName))
+              }
+            ))
+          }
+        }
+      }
+    } ~
+    path("plugins" / Segments) { segments =>
+      import EngineServerJson4sSupport._
+      get {
+        respondWithMediaType(MediaTypes.`application/json`) {
+          complete {
+            val pluginArgs = segments.drop(2)
+            val pluginType = segments(0)
+            val pluginName = segments(1)
+            pluginType match {
+              case EngineServerPlugin.outputSniffer =>
+                pluginsActorRef ? PluginsActor.HandleREST(
+                  pluginName = pluginName,
+                  pluginArgs = pluginArgs) map {
+                  _.asInstanceOf[String]
+                }
+            }
+          }
+        }
+      }
+    }
+}
+
+object EngineServerJson4sSupport extends Json4sSupport {
+  implicit def json4sFormats: Formats = DefaultFormats
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/CreateWorkflow.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/CreateWorkflow.scala b/core/src/main/scala/org/apache/predictionio/workflow/CreateWorkflow.scala
new file mode 100644
index 0000000..a4f3227
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/CreateWorkflow.scala
@@ -0,0 +1,274 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+import java.net.URI
+
+import com.github.nscala_time.time.Imports._
+import com.google.common.io.ByteStreams
+import grizzled.slf4j.Logging
+import org.apache.predictionio.controller.Engine
+import org.apache.predictionio.core.BaseEngine
+import org.apache.predictionio.data.storage.EngineInstance
+import org.apache.predictionio.data.storage.EvaluationInstance
+import org.apache.predictionio.data.storage.Storage
+import org.apache.predictionio.workflow.JsonExtractorOption.JsonExtractorOption
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.FileSystem
+import org.apache.hadoop.fs.Path
+import org.json4s.JValue
+import org.json4s.JString
+import org.json4s.native.JsonMethods.parse
+
+import scala.language.existentials
+
+object CreateWorkflow extends Logging {
+
+  case class WorkflowConfig(
+    deployMode: String = "",
+    batch: String = "",
+    engineId: String = "",
+    engineVersion: String = "",
+    engineVariant: String = "",
+    engineFactory: String = "",
+    engineParamsKey: String = "",
+    evaluationClass: Option[String] = None,
+    engineParamsGeneratorClass: Option[String] = None,
+    env: Option[String] = None,
+    skipSanityCheck: Boolean = false,
+    stopAfterRead: Boolean = false,
+    stopAfterPrepare: Boolean = false,
+    verbosity: Int = 0,
+    verbose: Boolean = false,
+    debug: Boolean = false,
+    logFile: Option[String] = None,
+    jsonExtractor: JsonExtractorOption = JsonExtractorOption.Both)
+
+  case class AlgorithmParams(name: String, params: JValue)
+
+  private def stringFromFile(filePath: String): String = {
+    try {
+      val uri = new URI(filePath)
+      val fs = FileSystem.get(uri, new Configuration())
+      new String(ByteStreams.toByteArray(fs.open(new Path(uri))).map(_.toChar))
+    } catch {
+      case e: java.io.IOException =>
+        error(s"Error reading from file: ${e.getMessage}. Aborting workflow.")
+        sys.exit(1)
+    }
+  }
+
+  val parser = new scopt.OptionParser[WorkflowConfig]("CreateWorkflow") {
+    override def errorOnUnknownArgument: Boolean = false
+    opt[String]("batch") action { (x, c) =>
+      c.copy(batch = x)
+    } text("Batch label of the workflow run.")
+    opt[String]("engine-id") required() action { (x, c) =>
+      c.copy(engineId = x)
+    } text("Engine's ID.")
+    opt[String]("engine-version") required() action { (x, c) =>
+      c.copy(engineVersion = x)
+    } text("Engine's version.")
+    opt[String]("engine-variant") required() action { (x, c) =>
+      c.copy(engineVariant = x)
+    } text("Engine variant JSON.")
+    opt[String]("evaluation-class") action { (x, c) =>
+      c.copy(evaluationClass = Some(x))
+    } text("Class name of the run's evaluator.")
+    opt[String]("engine-params-generator-class") action { (x, c) =>
+      c.copy(engineParamsGeneratorClass = Some(x))
+    } text("Path to evaluator parameters")
+    opt[String]("env") action { (x, c) =>
+      c.copy(env = Some(x))
+    } text("Comma-separated list of environmental variables (in 'FOO=BAR' " +
+      "format) to pass to the Spark execution environment.")
+    opt[Unit]("verbose") action { (x, c) =>
+      c.copy(verbose = true)
+    } text("Enable verbose output.")
+    opt[Unit]("debug") action { (x, c) =>
+      c.copy(debug = true)
+    } text("Enable debug output.")
+    opt[Unit]("skip-sanity-check") action { (x, c) =>
+      c.copy(skipSanityCheck = true)
+    }
+    opt[Unit]("stop-after-read") action { (x, c) =>
+      c.copy(stopAfterRead = true)
+    }
+    opt[Unit]("stop-after-prepare") action { (x, c) =>
+      c.copy(stopAfterPrepare = true)
+    }
+    opt[String]("deploy-mode") action { (x, c) =>
+      c.copy(deployMode = x)
+    }
+    opt[Int]("verbosity") action { (x, c) =>
+      c.copy(verbosity = x)
+    }
+    opt[String]("engine-factory") action { (x, c) =>
+      c.copy(engineFactory = x)
+    }
+    opt[String]("engine-params-key") action { (x, c) =>
+      c.copy(engineParamsKey = x)
+    }
+    opt[String]("log-file") action { (x, c) =>
+      c.copy(logFile = Some(x))
+    }
+    opt[String]("json-extractor") action { (x, c) =>
+      c.copy(jsonExtractor = JsonExtractorOption.withName(x))
+    }
+  }
+
+  def main(args: Array[String]): Unit = {
+    val wfcOpt = parser.parse(args, WorkflowConfig())
+    if (wfcOpt.isEmpty) {
+      logger.error("WorkflowConfig is empty. Quitting")
+      return
+    }
+
+    val wfc = wfcOpt.get
+
+    WorkflowUtils.modifyLogging(wfc.verbose)
+
+    val evaluation = wfc.evaluationClass.map { ec =>
+      try {
+        WorkflowUtils.getEvaluation(ec, getClass.getClassLoader)._2
+      } catch {
+        case e @ (_: ClassNotFoundException | _: NoSuchMethodException) =>
+          error(s"Unable to obtain evaluation $ec. Aborting workflow.", e)
+          sys.exit(1)
+      }
+    }
+
+    val engineParamsGenerator = wfc.engineParamsGeneratorClass.map { epg =>
+      try {
+        WorkflowUtils.getEngineParamsGenerator(epg, getClass.getClassLoader)._2
+      } catch {
+        case e @ (_: ClassNotFoundException | _: NoSuchMethodException) =>
+          error(s"Unable to obtain engine parameters generator $epg. " +
+            "Aborting workflow.", e)
+          sys.exit(1)
+      }
+    }
+
+    val pioEnvVars = wfc.env.map(e =>
+      e.split(',').flatMap(p =>
+        p.split('=') match {
+          case Array(k, v) => List(k -> v)
+          case _ => Nil
+        }
+      ).toMap
+    ).getOrElse(Map())
+
+    if (evaluation.isEmpty) {
+      val variantJson = parse(stringFromFile(wfc.engineVariant))
+      val engineFactory = if (wfc.engineFactory == "") {
+        variantJson \ "engineFactory" match {
+          case JString(s) => s
+          case _ =>
+            error("Unable to read engine factory class name from " +
+              s"${wfc.engineVariant}. Aborting.")
+            sys.exit(1)
+        }
+      } else wfc.engineFactory
+      val variantId = variantJson \ "id" match {
+        case JString(s) => s
+        case _ =>
+          error("Unable to read engine variant ID from " +
+            s"${wfc.engineVariant}. Aborting.")
+          sys.exit(1)
+      }
+      val (engineLanguage, engineFactoryObj) = try {
+        WorkflowUtils.getEngine(engineFactory, getClass.getClassLoader)
+      } catch {
+        case e @ (_: ClassNotFoundException | _: NoSuchMethodException) =>
+          error(s"Unable to obtain engine: ${e.getMessage}. Aborting workflow.")
+          sys.exit(1)
+      }
+
+      val engine: BaseEngine[_, _, _, _] = engineFactoryObj()
+
+      val customSparkConf = WorkflowUtils.extractSparkConf(variantJson)
+      val workflowParams = WorkflowParams(
+        verbose = wfc.verbosity,
+        skipSanityCheck = wfc.skipSanityCheck,
+        stopAfterRead = wfc.stopAfterRead,
+        stopAfterPrepare = wfc.stopAfterPrepare,
+        sparkEnv = WorkflowParams().sparkEnv ++ customSparkConf)
+
+      // Evaluator Not Specified. Do training.
+      if (!engine.isInstanceOf[Engine[_,_,_,_,_,_]]) {
+        throw new NoSuchMethodException(s"Engine $engine is not trainable")
+      }
+
+      val trainableEngine = engine.asInstanceOf[Engine[_, _, _, _, _, _]]
+
+      val engineParams = if (wfc.engineParamsKey == "") {
+        trainableEngine.jValueToEngineParams(variantJson, wfc.jsonExtractor)
+      } else {
+        engineFactoryObj.engineParams(wfc.engineParamsKey)
+      }
+
+      val engineInstance = EngineInstance(
+        id = "",
+        status = "INIT",
+        startTime = DateTime.now,
+        endTime = DateTime.now,
+        engineId = wfc.engineId,
+        engineVersion = wfc.engineVersion,
+        engineVariant = variantId,
+        engineFactory = engineFactory,
+        batch = wfc.batch,
+        env = pioEnvVars,
+        sparkConf = workflowParams.sparkEnv,
+        dataSourceParams =
+          JsonExtractor.paramToJson(wfc.jsonExtractor, engineParams.dataSourceParams),
+        preparatorParams =
+          JsonExtractor.paramToJson(wfc.jsonExtractor, engineParams.preparatorParams),
+        algorithmsParams =
+          JsonExtractor.paramsToJson(wfc.jsonExtractor, engineParams.algorithmParamsList),
+        servingParams =
+          JsonExtractor.paramToJson(wfc.jsonExtractor, engineParams.servingParams))
+
+      val engineInstanceId = Storage.getMetaDataEngineInstances.insert(
+        engineInstance)
+
+      CoreWorkflow.runTrain(
+        env = pioEnvVars,
+        params = workflowParams,
+        engine = trainableEngine,
+        engineParams = engineParams,
+        engineInstance = engineInstance.copy(id = engineInstanceId))
+    } else {
+      val workflowParams = WorkflowParams(
+        verbose = wfc.verbosity,
+        skipSanityCheck = wfc.skipSanityCheck,
+        stopAfterRead = wfc.stopAfterRead,
+        stopAfterPrepare = wfc.stopAfterPrepare,
+        sparkEnv = WorkflowParams().sparkEnv)
+      val evaluationInstance = EvaluationInstance(
+        evaluationClass = wfc.evaluationClass.get,
+        engineParamsGeneratorClass = wfc.engineParamsGeneratorClass.get,
+        batch = wfc.batch,
+        env = pioEnvVars,
+        sparkConf = workflowParams.sparkEnv
+      )
+      Workflow.runEvaluation(
+        evaluation = evaluation.get,
+        engineParamsGenerator = engineParamsGenerator.get,
+        evaluationInstance = evaluationInstance,
+        params = workflowParams)
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/EngineServerPlugin.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/EngineServerPlugin.scala b/core/src/main/scala/org/apache/predictionio/workflow/EngineServerPlugin.scala
new file mode 100644
index 0000000..5393e71
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/EngineServerPlugin.scala
@@ -0,0 +1,40 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+import org.apache.predictionio.data.storage.EngineInstance
+import org.json4s._
+
+trait EngineServerPlugin {
+  val pluginName: String
+  val pluginDescription: String
+  val pluginType: String
+
+  def start(context: EngineServerPluginContext): Unit
+
+  def process(
+    engineInstance: EngineInstance,
+    query: JValue,
+    prediction: JValue,
+    context: EngineServerPluginContext): JValue
+
+  def handleREST(arguments: Seq[String]): String
+}
+
+object EngineServerPlugin {
+  val outputBlocker = "outputblocker"
+  val outputSniffer = "outputsniffer"
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/EngineServerPluginContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/EngineServerPluginContext.scala b/core/src/main/scala/org/apache/predictionio/workflow/EngineServerPluginContext.scala
new file mode 100644
index 0000000..3742e01
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/EngineServerPluginContext.scala
@@ -0,0 +1,88 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+import java.net.URI
+import java.util.ServiceLoader
+
+import akka.event.LoggingAdapter
+import com.google.common.io.ByteStreams
+import grizzled.slf4j.Logging
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.FileSystem
+import org.apache.hadoop.fs.Path
+import org.json4s.DefaultFormats
+import org.json4s.Formats
+import org.json4s.JObject
+import org.json4s.JValue
+import org.json4s.native.JsonMethods._
+
+import scala.collection.JavaConversions._
+import scala.collection.mutable
+
+class EngineServerPluginContext(
+    val plugins: mutable.Map[String, mutable.Map[String, EngineServerPlugin]],
+    val pluginParams: mutable.Map[String, JValue],
+    val log: LoggingAdapter) {
+  def outputBlockers: Map[String, EngineServerPlugin] =
+    plugins.getOrElse(EngineServerPlugin.outputBlocker, Map()).toMap
+  def outputSniffers: Map[String, EngineServerPlugin] =
+    plugins.getOrElse(EngineServerPlugin.outputSniffer, Map()).toMap
+}
+
+object EngineServerPluginContext extends Logging {
+  implicit val formats: Formats = DefaultFormats
+
+  def apply(log: LoggingAdapter, engineVariant: String): EngineServerPluginContext = {
+    val plugins = mutable.Map[String, mutable.Map[String, EngineServerPlugin]](
+      EngineServerPlugin.outputBlocker -> mutable.Map(),
+      EngineServerPlugin.outputSniffer -> mutable.Map())
+    val pluginParams = mutable.Map[String, JValue]()
+    val serviceLoader = ServiceLoader.load(classOf[EngineServerPlugin])
+    val variantJson = parse(stringFromFile(engineVariant))
+    (variantJson \ "plugins").extractOpt[JObject].foreach { pluginDefs =>
+      pluginDefs.obj.foreach { pluginParams += _ }
+    }
+    serviceLoader foreach { service =>
+      pluginParams.get(service.pluginName) map { params =>
+        if ((params \ "enabled").extractOrElse(false)) {
+          info(s"Plugin ${service.pluginName} is enabled.")
+          plugins(service.pluginType) += service.pluginName -> service
+        } else {
+          info(s"Plugin ${service.pluginName} is disabled.")
+        }
+      } getOrElse {
+        info(s"Plugin ${service.pluginName} is disabled.")
+      }
+    }
+    new EngineServerPluginContext(
+      plugins,
+      pluginParams,
+      log)
+  }
+
+  private def stringFromFile(filePath: String): String = {
+    try {
+      val uri = new URI(filePath)
+      val fs = FileSystem.get(uri, new Configuration())
+      new String(ByteStreams.toByteArray(fs.open(new Path(uri))).map(_.toChar))
+    } catch {
+      case e: java.io.IOException =>
+        error(s"Error reading from file: ${e.getMessage}. Aborting.")
+        sys.exit(1)
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/EngineServerPluginsActor.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/EngineServerPluginsActor.scala b/core/src/main/scala/org/apache/predictionio/workflow/EngineServerPluginsActor.scala
new file mode 100644
index 0000000..0068751
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/EngineServerPluginsActor.scala
@@ -0,0 +1,46 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+import akka.actor.Actor
+import akka.event.Logging
+import org.apache.predictionio.data.storage.EngineInstance
+import org.json4s.JValue
+
+class PluginsActor(engineVariant: String) extends Actor {
+  implicit val system = context.system
+  val log = Logging(system, this)
+
+  val pluginContext = EngineServerPluginContext(log, engineVariant)
+
+  def receive: PartialFunction[Any, Unit] = {
+    case (ei: EngineInstance, q: JValue, p: JValue) =>
+      pluginContext.outputSniffers.values.foreach(_.process(ei, q, p, pluginContext))
+    case h: PluginsActor.HandleREST =>
+      try {
+        sender() ! pluginContext.outputSniffers(h.pluginName).handleREST(h.pluginArgs)
+      } catch {
+        case e: Exception =>
+          sender() ! s"""{"message":"${e.getMessage}"}"""
+      }
+    case _ =>
+      log.error("Unknown message sent to the Engine Server output sniffer plugin host.")
+  }
+}
+
+object PluginsActor {
+  case class HandleREST(pluginName: String, pluginArgs: Seq[String])
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/EvaluationWorkflow.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/EvaluationWorkflow.scala b/core/src/main/scala/org/apache/predictionio/workflow/EvaluationWorkflow.scala
new file mode 100644
index 0000000..6c7e731
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/EvaluationWorkflow.scala
@@ -0,0 +1,42 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+import org.apache.predictionio.controller.EngineParams
+import org.apache.predictionio.controller.Evaluation
+import org.apache.predictionio.core.BaseEvaluator
+import org.apache.predictionio.core.BaseEvaluatorResult
+import org.apache.predictionio.core.BaseEngine
+
+import grizzled.slf4j.Logger
+import org.apache.spark.SparkContext
+
+import scala.language.existentials
+
+object EvaluationWorkflow {
+  @transient lazy val logger = Logger[this.type]
+  def runEvaluation[EI, Q, P, A, R <: BaseEvaluatorResult](
+      sc: SparkContext,
+      evaluation: Evaluation,
+      engine: BaseEngine[EI, Q, P, A],
+      engineParamsList: Seq[EngineParams],
+      evaluator: BaseEvaluator[EI, Q, P, A, R],
+      params: WorkflowParams)
+    : R = {
+    val engineEvalDataSet = engine.batchEval(sc, engineParamsList, params)
+    evaluator.evaluateBase(sc, evaluation, engineEvalDataSet, params)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/FakeWorkflow.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/FakeWorkflow.scala b/core/src/main/scala/org/apache/predictionio/workflow/FakeWorkflow.scala
new file mode 100644
index 0000000..f11ea2e
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/FakeWorkflow.scala
@@ -0,0 +1,106 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+import org.apache.predictionio.annotation.Experimental
+// FIXME(yipjustin): Remove wildcard import.
+import org.apache.predictionio.core._
+import org.apache.predictionio.controller._
+
+import grizzled.slf4j.Logger
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+
+@Experimental
+private[prediction] class FakeEngine
+extends BaseEngine[EmptyParams, EmptyParams, EmptyParams, EmptyParams] {
+  @transient lazy val logger = Logger[this.type]
+
+  def train(
+    sc: SparkContext,
+    engineParams: EngineParams,
+    engineInstanceId: String,
+    params: WorkflowParams): Seq[Any] = {
+    throw new StopAfterReadInterruption()
+  }
+
+  def eval(
+    sc: SparkContext,
+    engineParams: EngineParams,
+    params: WorkflowParams)
+  : Seq[(EmptyParams, RDD[(EmptyParams, EmptyParams, EmptyParams)])] = {
+    return Seq[(EmptyParams, RDD[(EmptyParams, EmptyParams, EmptyParams)])]()
+  }
+}
+
+@Experimental
+private[prediction] class FakeRunner(f: (SparkContext => Unit))
+    extends BaseEvaluator[EmptyParams, EmptyParams, EmptyParams, EmptyParams,
+      FakeEvalResult] {
+  @transient private lazy val logger = Logger[this.type]
+  def evaluateBase(
+    sc: SparkContext,
+    evaluation: Evaluation,
+    engineEvalDataSet:
+        Seq[(EngineParams, Seq[(EmptyParams, RDD[(EmptyParams, EmptyParams, EmptyParams)])])],
+    params: WorkflowParams): FakeEvalResult = {
+    f(sc)
+    FakeEvalResult()
+  }
+}
+
+@Experimental
+private[prediction] case class FakeEvalResult() extends BaseEvaluatorResult {
+  override val noSave: Boolean = true
+}
+
+/** FakeRun allows user to implement custom function under the exact enviroment
+  * as other PredictionIO workflow.
+  *
+  * Useful for developing new features. Only need to extend this trait and
+  * implement a function: (SparkContext => Unit). For example, the code below
+  * can be run with `pio eval HelloWorld`.
+  *
+  * {{{
+  * object HelloWorld extends FakeRun {
+  *   // func defines the function pio runs, must have signature (SparkContext => Unit).
+  *   func = f
+  *
+  *   def f(sc: SparkContext): Unit {
+  *     val logger = Logger[this.type]
+  *     logger.info("HelloWorld")
+  *   }
+  * }
+  * }}}
+  *
+  */
+@Experimental
+trait FakeRun extends Evaluation with EngineParamsGenerator {
+  private[this] var _runner: FakeRunner = _
+
+  def runner: FakeRunner = _runner
+  def runner_=(r: FakeRunner) {
+    engineEvaluator = (new FakeEngine(), r)
+    engineParamsList = Seq(new EngineParams())
+  }
+
+  def func: (SparkContext => Unit) = { (sc: SparkContext) => Unit }
+  def func_=(f: SparkContext => Unit) {
+    runner = new FakeRunner(f)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/JsonExtractor.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/JsonExtractor.scala b/core/src/main/scala/org/apache/predictionio/workflow/JsonExtractor.scala
new file mode 100644
index 0000000..b9737a6
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/JsonExtractor.scala
@@ -0,0 +1,164 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+import com.google.gson.Gson
+import com.google.gson.GsonBuilder
+import com.google.gson.TypeAdapterFactory
+import org.apache.predictionio.controller.EngineParams
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.Utils
+import org.apache.predictionio.workflow.JsonExtractorOption.JsonExtractorOption
+import org.json4s.Extraction
+import org.json4s.Formats
+import org.json4s.JsonAST.{JArray, JValue}
+import org.json4s.native.JsonMethods.compact
+import org.json4s.native.JsonMethods.pretty
+import org.json4s.native.JsonMethods.parse
+import org.json4s.native.JsonMethods.render
+import org.json4s.reflect.TypeInfo
+
+object JsonExtractor {
+
+  def toJValue(
+    extractorOption: JsonExtractorOption,
+    o: Any,
+    json4sFormats: Formats = Utils.json4sDefaultFormats,
+    gsonTypeAdapterFactories: Seq[TypeAdapterFactory] = Seq.empty[TypeAdapterFactory]): JValue = {
+
+    extractorOption match {
+      case JsonExtractorOption.Both =>
+
+          val json4sResult = Extraction.decompose(o)(json4sFormats)
+          json4sResult.children.size match {
+            case 0 => parse(gson(gsonTypeAdapterFactories).toJson(o))
+            case _ => json4sResult
+          }
+      case JsonExtractorOption.Json4sNative =>
+        Extraction.decompose(o)(json4sFormats)
+      case JsonExtractorOption.Gson =>
+        parse(gson(gsonTypeAdapterFactories).toJson(o))
+    }
+  }
+
+  def extract[T](
+    extractorOption: JsonExtractorOption,
+    json: String,
+    clazz: Class[T],
+    json4sFormats: Formats = Utils.json4sDefaultFormats,
+    gsonTypeAdapterFactories: Seq[TypeAdapterFactory] = Seq.empty[TypeAdapterFactory]): T = {
+
+    extractorOption match {
+      case JsonExtractorOption.Both =>
+        try {
+          extractWithJson4sNative(json, json4sFormats, clazz)
+        } catch {
+          case e: Exception =>
+            extractWithGson(json, clazz, gsonTypeAdapterFactories)
+        }
+      case JsonExtractorOption.Json4sNative =>
+        extractWithJson4sNative(json, json4sFormats, clazz)
+      case JsonExtractorOption.Gson =>
+        extractWithGson(json, clazz, gsonTypeAdapterFactories)
+    }
+  }
+
+  def paramToJson(extractorOption: JsonExtractorOption, param: (String, Params)): String = {
+    // to be replaced JValue needs to be done by Json4s, otherwise the tuple JValue will be wrong
+    val toBeReplacedJValue =
+      JsonExtractor.toJValue(JsonExtractorOption.Json4sNative, (param._1, null))
+    val paramJValue = JsonExtractor.toJValue(extractorOption, param._2)
+
+    compact(render(toBeReplacedJValue.replace(param._1 :: Nil, paramJValue)))
+  }
+
+  def paramsToJson(extractorOption: JsonExtractorOption, params: Seq[(String, Params)]): String = {
+    compact(render(paramsToJValue(extractorOption, params)))
+  }
+
+  def engineParamsToJson(extractorOption: JsonExtractorOption, params: EngineParams) : String = {
+    compact(render(engineParamsToJValue(extractorOption, params)))
+  }
+
+  def engineParamstoPrettyJson(
+    extractorOption: JsonExtractorOption,
+    params: EngineParams) : String = {
+
+    pretty(render(engineParamsToJValue(extractorOption, params)))
+  }
+
+  private def engineParamsToJValue(extractorOption: JsonExtractorOption, params: EngineParams) = {
+    var jValue = toJValue(JsonExtractorOption.Json4sNative, params)
+
+    val dataSourceParamsJValue = toJValue(extractorOption, params.dataSourceParams._2)
+    jValue = jValue.replace(
+      "dataSourceParams" :: params.dataSourceParams._1 :: Nil,
+      dataSourceParamsJValue)
+
+    val preparatorParamsJValue = toJValue(extractorOption, params.preparatorParams._2)
+    jValue = jValue.replace(
+      "preparatorParams" :: params.preparatorParams._1 :: Nil,
+      preparatorParamsJValue)
+
+    val algorithmParamsJValue = paramsToJValue(extractorOption, params.algorithmParamsList)
+    jValue = jValue.replace("algorithmParamsList" :: Nil, algorithmParamsJValue)
+
+    val servingParamsJValue = toJValue(extractorOption, params.servingParams._2)
+    jValue = jValue.replace("servingParams" :: params.servingParams._1 :: Nil, servingParamsJValue)
+
+    jValue
+  }
+
+  private
+  def paramsToJValue(extractorOption: JsonExtractorOption, params: Seq[(String, Params)]) = {
+    val jValues = params.map { case (name, param) =>
+      // to be replaced JValue needs to be done by Json4s, otherwise the tuple JValue will be wrong
+      val toBeReplacedJValue =
+        JsonExtractor.toJValue(JsonExtractorOption.Json4sNative, (name, null))
+      val paramJValue = JsonExtractor.toJValue(extractorOption, param)
+
+      toBeReplacedJValue.replace(name :: Nil, paramJValue)
+    }
+
+    JArray(jValues.toList)
+  }
+
+  private def extractWithJson4sNative[T](
+    json: String,
+    formats: Formats,
+    clazz: Class[T]): T = {
+
+    Extraction.extract(parse(json), TypeInfo(clazz, None))(formats).asInstanceOf[T]
+  }
+
+  private def extractWithGson[T](
+    json: String,
+    clazz: Class[T],
+    gsonTypeAdapterFactories: Seq[TypeAdapterFactory]): T = {
+
+    gson(gsonTypeAdapterFactories).fromJson(json, clazz)
+  }
+
+  private def gson(gsonTypeAdapterFactories: Seq[TypeAdapterFactory]): Gson = {
+    val gsonBuilder = new GsonBuilder()
+    gsonTypeAdapterFactories.foreach { typeAdapterFactory =>
+      gsonBuilder.registerTypeAdapterFactory(typeAdapterFactory)
+    }
+
+    gsonBuilder.create()
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/JsonExtractorOption.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/JsonExtractorOption.scala b/core/src/main/scala/org/apache/predictionio/workflow/JsonExtractorOption.scala
new file mode 100644
index 0000000..a7915a6
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/JsonExtractorOption.scala
@@ -0,0 +1,23 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+object JsonExtractorOption extends Enumeration {
+  type JsonExtractorOption = Value
+  val Json4sNative = Value
+  val Gson = Value
+  val Both = Value
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/PersistentModelManifest.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/PersistentModelManifest.scala b/core/src/main/scala/org/apache/predictionio/workflow/PersistentModelManifest.scala
new file mode 100644
index 0000000..7cf7ede
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/PersistentModelManifest.scala
@@ -0,0 +1,18 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+case class PersistentModelManifest(className: String)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/Workflow.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/Workflow.scala b/core/src/main/scala/org/apache/predictionio/workflow/Workflow.scala
new file mode 100644
index 0000000..d88c8d0
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/Workflow.scala
@@ -0,0 +1,135 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+import org.apache.predictionio.annotation.Experimental
+import org.apache.predictionio.controller.EngineParams
+import org.apache.predictionio.controller.EngineParamsGenerator
+import org.apache.predictionio.controller.Evaluation
+import org.apache.predictionio.core.BaseEngine
+import org.apache.predictionio.core.BaseEvaluator
+import org.apache.predictionio.core.BaseEvaluatorResult
+import org.apache.predictionio.data.storage.EvaluationInstance
+
+/** Collection of workflow creation methods.
+  * @group Workflow
+  */
+object Workflow {
+  // evaluator is already instantiated.
+  // This is an undocumented way of using evaluator. Still experimental.
+  // evaluatorParams is used to write into EngineInstance, will be shown in
+  // dashboard.
+  /*
+  def runEval[EI, Q, P, A, ER <: AnyRef](
+      engine: BaseEngine[EI, Q, P, A],
+      engineParams: EngineParams,
+      evaluator: BaseEvaluator[EI, Q, P, A, ER],
+      evaluatorParams: Params,
+      env: Map[String, String] = WorkflowUtils.pioEnvVars,
+      params: WorkflowParams = WorkflowParams()) {
+
+    implicit lazy val formats = Utils.json4sDefaultFormats +
+      new NameParamsSerializer
+
+    val engineInstance = EngineInstance(
+      id = "",
+      status = "INIT",
+      startTime = DateTime.now,
+      endTime = DateTime.now,
+      engineId = "",
+      engineVersion = "",
+      engineVariant = "",
+      engineFactory = "FIXME",
+      evaluatorClass = evaluator.getClass.getName(),
+      batch = params.batch,
+      env = env,
+      sparkConf = params.sparkEnv,
+      dataSourceParams = write(engineParams.dataSourceParams),
+      preparatorParams = write(engineParams.preparatorParams),
+      algorithmsParams = write(engineParams.algorithmParamsList),
+      servingParams = write(engineParams.servingParams),
+      evaluatorParams = write(evaluatorParams),
+      evaluatorResults = "",
+      evaluatorResultsHTML = "",
+      evaluatorResultsJSON = "")
+
+    CoreWorkflow.runEval(
+      engine = engine,
+      engineParams = engineParams,
+      engineInstance = engineInstance,
+      evaluator = evaluator,
+      evaluatorParams = evaluatorParams,
+      env = env,
+      params = params)
+  }
+  */
+
+  def runEvaluation(
+      evaluation: Evaluation,
+      engineParamsGenerator: EngineParamsGenerator,
+      env: Map[String, String] = WorkflowUtils.pioEnvVars,
+      evaluationInstance: EvaluationInstance = EvaluationInstance(),
+      params: WorkflowParams = WorkflowParams()) {
+    runEvaluationTypeless(
+      evaluation = evaluation,
+      engine = evaluation.engine,
+      engineParamsList = engineParamsGenerator.engineParamsList,
+      evaluationInstance = evaluationInstance,
+      evaluator = evaluation.evaluator,
+      env = env,
+      params = params
+    )
+  }
+
+  def runEvaluationTypeless[
+      EI, Q, P, A, EEI, EQ, EP, EA, ER <: BaseEvaluatorResult](
+      evaluation: Evaluation,
+      engine: BaseEngine[EI, Q, P, A],
+      engineParamsList: Seq[EngineParams],
+      evaluationInstance: EvaluationInstance,
+      evaluator: BaseEvaluator[EEI, EQ, EP, EA, ER],
+      env: Map[String, String] = WorkflowUtils.pioEnvVars,
+      params: WorkflowParams = WorkflowParams()) {
+    runEvaluationViaCoreWorkflow(
+      evaluation = evaluation,
+      engine = engine,
+      engineParamsList = engineParamsList,
+      evaluationInstance = evaluationInstance,
+      evaluator = evaluator.asInstanceOf[BaseEvaluator[EI, Q, P, A, ER]],
+      env = env,
+      params = params)
+  }
+
+  /** :: Experimental :: */
+  @Experimental
+  def runEvaluationViaCoreWorkflow[EI, Q, P, A, R <: BaseEvaluatorResult](
+      evaluation: Evaluation,
+      engine: BaseEngine[EI, Q, P, A],
+      engineParamsList: Seq[EngineParams],
+      evaluationInstance: EvaluationInstance,
+      evaluator: BaseEvaluator[EI, Q, P, A, R],
+      env: Map[String, String] = WorkflowUtils.pioEnvVars,
+      params: WorkflowParams = WorkflowParams()) {
+    CoreWorkflow.runEvaluation(
+      evaluation = evaluation,
+      engine = engine,
+      engineParamsList = engineParamsList,
+      evaluationInstance = evaluationInstance,
+      evaluator = evaluator,
+      env = env,
+      params = params)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/WorkflowContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/WorkflowContext.scala b/core/src/main/scala/org/apache/predictionio/workflow/WorkflowContext.scala
new file mode 100644
index 0000000..2abb79a
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/WorkflowContext.scala
@@ -0,0 +1,45 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+import grizzled.slf4j.Logging
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkConf
+
+import scala.language.existentials
+
+// FIXME: move to better location.
+object WorkflowContext extends Logging {
+  def apply(
+      batch: String = "",
+      executorEnv: Map[String, String] = Map(),
+      sparkEnv: Map[String, String] = Map(),
+      mode: String = ""
+    ): SparkContext = {
+    val conf = new SparkConf()
+    val prefix = if (mode == "") "PredictionIO" else s"PredictionIO ${mode}"
+    conf.setAppName(s"${prefix}: ${batch}")
+    debug(s"Executor environment received: ${executorEnv}")
+    executorEnv.map(kv => conf.setExecutorEnv(kv._1, kv._2))
+    debug(s"SparkConf executor environment: ${conf.getExecutorEnv}")
+    debug(s"Application environment received: ${sparkEnv}")
+    conf.setAll(sparkEnv)
+    val sparkConfString = conf.getAll.toSeq
+    debug(s"SparkConf environment: $sparkConfString")
+    new SparkContext(conf)
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/WorkflowParams.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/WorkflowParams.scala b/core/src/main/scala/org/apache/predictionio/workflow/WorkflowParams.scala
new file mode 100644
index 0000000..8727a50
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/WorkflowParams.scala
@@ -0,0 +1,42 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+/** Workflow parameters.
+  *
+  * @param batch Batch label of the run.
+  * @param verbose Verbosity level.
+  * @param saveModel Controls whether trained models are persisted.
+  * @param sparkEnv Spark properties that will be set in SparkConf.setAll().
+  * @param skipSanityCheck Skips all data sanity check.
+  * @param stopAfterRead Stops workflow after reading from data source.
+  * @param stopAfterPrepare Stops workflow after data preparation.
+  * @group Workflow
+  */
+case class WorkflowParams(
+  batch: String = "",
+  verbose: Int = 2,
+  saveModel: Boolean = true,
+  sparkEnv: Map[String, String] =
+    Map[String, String]("spark.executor.extraClassPath" -> "."),
+  skipSanityCheck: Boolean = false,
+  stopAfterRead: Boolean = false,
+  stopAfterPrepare: Boolean = false) {
+  // Temporary workaround for WorkflowParamsBuilder for Java. It doesn't support
+  // custom spark environment yet.
+  def this(batch: String, verbose: Int, saveModel: Boolean)
+  = this(batch, verbose, saveModel, Map[String, String]())
+}


[24/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/Utils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/Utils.scala b/core/src/main/scala/io/prediction/controller/Utils.scala
deleted file mode 100644
index 5098fba..0000000
--- a/core/src/main/scala/io/prediction/controller/Utils.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.workflow.KryoInstantiator
-
-import org.json4s._
-import org.json4s.ext.JodaTimeSerializers
-
-import scala.io.Source
-
-import _root_.java.io.File
-import _root_.java.io.FileOutputStream
-
-/** Controller utilities.
-  *
-  * @group Helper
-  */
-object Utils {
-  /** Default JSON4S serializers for PredictionIO controllers. */
-  val json4sDefaultFormats = DefaultFormats.lossless ++ JodaTimeSerializers.all
-
-  /** Save a model object as a file to a temporary location on local filesystem.
-    * It will first try to use the location indicated by the environmental
-    * variable PIO_FS_TMPDIR, then fall back to the java.io.tmpdir property.
-    *
-    * @param id Used as the filename of the file.
-    * @param model Model object.
-    */
-  def save(id: String, model: Any): Unit = {
-    val tmpdir = sys.env.getOrElse("PIO_FS_TMPDIR", System.getProperty("java.io.tmpdir"))
-    val modelFile = tmpdir + File.separator + id
-    (new File(tmpdir)).mkdirs
-    val fos = new FileOutputStream(modelFile)
-    val kryo = KryoInstantiator.newKryoInjection
-    fos.write(kryo(model))
-    fos.close
-  }
-
-  /** Load a model object from a file in a temporary location on local
-    * filesystem. It will first try to use the location indicated by the
-    * environmental variable PIO_FS_TMPDIR, then fall back to the java.io.tmpdir
-    * property.
-    *
-    * @param id Used as the filename of the file.
-    */
-  def load(id: String): Any = {
-    val tmpdir = sys.env.getOrElse("PIO_FS_TMPDIR", System.getProperty("java.io.tmpdir"))
-    val modelFile = tmpdir + File.separator + id
-    val src = Source.fromFile(modelFile)(scala.io.Codec.ISO8859)
-    val kryo = KryoInstantiator.newKryoInjection
-    val m = kryo.invert(src.map(_.toByte).toArray).get
-    src.close
-    m
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/java/JavaEngineParamsGenerator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/java/JavaEngineParamsGenerator.scala b/core/src/main/scala/io/prediction/controller/java/JavaEngineParamsGenerator.scala
deleted file mode 100644
index f932012..0000000
--- a/core/src/main/scala/io/prediction/controller/java/JavaEngineParamsGenerator.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.EngineParams
-import io.prediction.controller.EngineParamsGenerator
-
-import scala.collection.JavaConversions.asScalaBuffer
-
-/** Define an engine parameter generator in Java
-  *
-  * Implementations of this abstract class can be supplied to "pio eval" as the second
-  * command line argument.
-  *
-  * @group Evaluation
-  */
-abstract class JavaEngineParamsGenerator extends EngineParamsGenerator {
-
-  /** Set the list of [[EngineParams]].
-    *
-    * @param engineParams A list of engine params
-    */
-  def setEngineParamsList(engineParams: java.util.List[_ <: EngineParams]) {
-    engineParamsList = asScalaBuffer(engineParams)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/java/JavaEvaluation.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/java/JavaEvaluation.scala b/core/src/main/scala/io/prediction/controller/java/JavaEvaluation.scala
deleted file mode 100644
index 3db89bf..0000000
--- a/core/src/main/scala/io/prediction/controller/java/JavaEvaluation.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.Evaluation
-import io.prediction.controller.Metric
-import io.prediction.core.BaseEngine
-
-import scala.collection.JavaConversions.asScalaBuffer
-
-/** Define an evaluation in Java.
-  *
-  * Implementations of this abstract class can be supplied to "pio eval" as the first
-  * argument.
-  *
-  * @group Evaluation
-  */
-
-abstract class JavaEvaluation extends Evaluation {
-  /** Set the [[BaseEngine]] and [[Metric]] for this [[Evaluation]]
-    *
-    * @param baseEngine [[BaseEngine]] for this [[JavaEvaluation]]
-    * @param metric [[Metric]] for this [[JavaEvaluation]]
-    * @tparam EI Evaluation information class
-    * @tparam Q Query class
-    * @tparam P Predicted result class
-    * @tparam A Actual result class
-    */
-  def setEngineMetric[EI, Q, P, A](
-    baseEngine: BaseEngine[EI, Q, P, A],
-    metric: Metric[EI, Q, P, A, _]) {
-
-    engineMetric = (baseEngine, metric)
-  }
-
-  /** Set the [[BaseEngine]] and [[Metric]]s for this [[JavaEvaluation]]
-    *
-    * @param baseEngine [[BaseEngine]] for this [[JavaEvaluation]]
-    * @param metric [[Metric]] for this [[JavaEvaluation]]
-    * @param metrics Other [[Metric]]s for this [[JavaEvaluation]]
-    * @tparam EI Evaluation information class
-    * @tparam Q Query class
-    * @tparam P Predicted result class
-    * @tparam A Actual result class
-    */
-  def setEngineMetrics[EI, Q, P, A](
-    baseEngine: BaseEngine[EI, Q, P, A],
-    metric: Metric[EI, Q, P, A, _],
-    metrics: java.util.List[_ <: Metric[EI, Q, P, A, _]]) {
-
-    engineMetrics = (baseEngine, metric, asScalaBuffer(metrics))
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/java/LJavaAlgorithm.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/java/LJavaAlgorithm.scala b/core/src/main/scala/io/prediction/controller/java/LJavaAlgorithm.scala
deleted file mode 100644
index ba6ed2d..0000000
--- a/core/src/main/scala/io/prediction/controller/java/LJavaAlgorithm.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.LAlgorithm
-
-import scala.reflect.ClassTag
-
-/** Base class of a Java local algorithm. Refer to [[LAlgorithm]] for documentation.
-  *
-  * @tparam PD Prepared data class.
-  * @tparam M Trained model class.
-  * @tparam Q Input query class.
-  * @tparam P Output prediction class.
-  * @group Algorithm
-  */
-abstract class LJavaAlgorithm[PD, M, Q, P]
-  extends LAlgorithm[PD, M, Q, P]()(ClassTag.AnyRef.asInstanceOf[ClassTag[M]])

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/java/LJavaDataSource.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/java/LJavaDataSource.scala b/core/src/main/scala/io/prediction/controller/java/LJavaDataSource.scala
deleted file mode 100644
index dfafba4..0000000
--- a/core/src/main/scala/io/prediction/controller/java/LJavaDataSource.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.LDataSource
-
-import scala.reflect.ClassTag
-
-/** Base class of a Java local data source. Refer to [[LDataSource]] for documentation.
-  *
-  * @tparam TD Training data class.
-  * @tparam EI Evaluation Info class.
-  * @tparam Q Input query class.
-  * @tparam A Actual value class.
-  * @group Data Source
-  */
-abstract class LJavaDataSource[TD, EI, Q, A]
-  extends LDataSource[TD, EI, Q, A]()(ClassTag.AnyRef.asInstanceOf[ClassTag[TD]])

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/java/LJavaPreparator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/java/LJavaPreparator.scala b/core/src/main/scala/io/prediction/controller/java/LJavaPreparator.scala
deleted file mode 100644
index 321a100..0000000
--- a/core/src/main/scala/io/prediction/controller/java/LJavaPreparator.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.LPreparator
-
-import scala.reflect.ClassTag
-
-/** Base class of a Java local preparator. Refer to [[LPreparator]] for documentation.
-  *
-  * @tparam TD Training data class.
-  * @tparam PD Prepared data class.
-  * @group Preparator
-  */
-abstract class LJavaPreparator[TD, PD]
-  extends LPreparator[TD, PD]()(ClassTag.AnyRef.asInstanceOf[ClassTag[PD]])

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/java/LJavaServing.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/java/LJavaServing.scala b/core/src/main/scala/io/prediction/controller/java/LJavaServing.scala
deleted file mode 100644
index f664c38..0000000
--- a/core/src/main/scala/io/prediction/controller/java/LJavaServing.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.LServing
-
-/** Base class of Java local serving. Refer to [[LServing]] for documentation.
-  *
-  * @tparam Q Input query class.
-  * @tparam P Output prediction class.
-  * @group Serving
-  */
-abstract class LJavaServing[Q, P] extends LServing[Q, P]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/java/P2LJavaAlgorithm.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/java/P2LJavaAlgorithm.scala b/core/src/main/scala/io/prediction/controller/java/P2LJavaAlgorithm.scala
deleted file mode 100644
index fcf81a0..0000000
--- a/core/src/main/scala/io/prediction/controller/java/P2LJavaAlgorithm.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.P2LAlgorithm
-
-import scala.reflect.ClassTag
-
-/** Base class of a Java parallel-to-local algorithm. Refer to [[P2LAlgorithm]] for documentation.
-  *
-  * @tparam PD Prepared data class.
-  * @tparam M Trained model class.
-  * @tparam Q Input query class.
-  * @tparam P Output prediction class.
-  * @group Algorithm
-  */
-abstract class P2LJavaAlgorithm[PD, M, Q, P]
-  extends P2LAlgorithm[PD, M, Q, P]()(
-    ClassTag.AnyRef.asInstanceOf[ClassTag[M]],
-    ClassTag.AnyRef.asInstanceOf[ClassTag[Q]])

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/java/PJavaAlgorithm.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/java/PJavaAlgorithm.scala b/core/src/main/scala/io/prediction/controller/java/PJavaAlgorithm.scala
deleted file mode 100644
index d3a370a..0000000
--- a/core/src/main/scala/io/prediction/controller/java/PJavaAlgorithm.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.PAlgorithm
-
-/** Base class of a Java parallel algorithm. Refer to [[PAlgorithm]] for documentation.
-  *
-  * @tparam PD Prepared data class.
-  * @tparam M Trained model class.
-  * @tparam Q Input query class.
-  * @tparam P Output prediction class.
-  * @group Algorithm
-  */
-abstract class PJavaAlgorithm[PD, M, Q, P] extends PAlgorithm[PD, M, Q, P]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/java/PJavaDataSource.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/java/PJavaDataSource.scala b/core/src/main/scala/io/prediction/controller/java/PJavaDataSource.scala
deleted file mode 100644
index 11b962d..0000000
--- a/core/src/main/scala/io/prediction/controller/java/PJavaDataSource.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.PDataSource
-
-/** Base class of a Java parallel data source. Refer to [[PDataSource]] for documentation.
-  *
-  * @tparam TD Training data class.
-  * @tparam EI Evaluation Info class.
-  * @tparam Q Input query class.
-  * @tparam A Actual value class.
-  * @group Data Source
-  */
-abstract class PJavaDataSource[TD, EI, Q, A] extends PDataSource[TD, EI, Q, A]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/java/PJavaPreparator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/java/PJavaPreparator.scala b/core/src/main/scala/io/prediction/controller/java/PJavaPreparator.scala
deleted file mode 100644
index 2a9c8f9..0000000
--- a/core/src/main/scala/io/prediction/controller/java/PJavaPreparator.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import io.prediction.controller.PPreparator
-
-/** Base class of a Java parallel preparator. Refer to [[PPreparator]] for documentation
-  *
-  * @tparam TD Training data class.
-  * @tparam PD Prepared data class.
-  * @group Preparator
-  */
-abstract class PJavaPreparator[TD, PD] extends PPreparator[TD, PD]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/java/SerializableComparator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/java/SerializableComparator.scala b/core/src/main/scala/io/prediction/controller/java/SerializableComparator.scala
deleted file mode 100644
index 0e92f32..0000000
--- a/core/src/main/scala/io/prediction/controller/java/SerializableComparator.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller.java
-
-import java.util.Comparator
-
-trait SerializableComparator[T] extends Comparator[T] with java.io.Serializable

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/controller/package.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/controller/package.scala b/core/src/main/scala/io/prediction/controller/package.scala
deleted file mode 100644
index bcb4b0d..0000000
--- a/core/src/main/scala/io/prediction/controller/package.scala
+++ /dev/null
@@ -1,168 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction
-
-/** Provides building blocks for writing a complete prediction engine
-  * consisting of DataSource, Preparator, Algorithm, Serving, and Evaluation.
-  *
-  * == Start Building an Engine ==
-  * The starting point of a prediction engine is the [[Engine]] class.
-  *
-  * == The DASE Paradigm ==
-  * The building blocks together form the DASE paradigm. Learn more about DASE
-  * [[http://docs.prediction.io/customize/ here]].
-  *
-  * == Types of Building Blocks ==
-  * Depending on the problem you are solving, you would need to pick appropriate
-  * flavors of building blocks.
-  *
-  * === Engines ===
-  * There are 3 typical engine configurations:
-  *
-  *  1. [[PDataSource]], [[PPreparator]], [[P2LAlgorithm]], [[LServing]]
-  *  2. [[PDataSource]], [[PPreparator]], [[PAlgorithm]], [[LServing]]
-  *  3. [[LDataSource]], [[LPreparator]], [[LAlgorithm]], [[LServing]]
-  *
-  * In both configurations 1 and 2, data is sourced and prepared in a
-  * parallelized fashion, with data type as RDD.
-  *
-  * The difference between configurations 1 and 2 come at the algorithm stage.
-  * In configuration 1, the algorithm operates on potentially large data as RDDs
-  * in the Spark cluster, and eventually outputs a model that is small enough to
-  * fit in a single machine.
-  *
-  * On the other hand, configuration 2 outputs a model that is potentially too
-  * large to fit in a single machine, and must reside in the Spark cluster as
-  * RDD(s).
-  *
-  * With configuration 1 ([[P2LAlgorithm]]), PredictionIO will automatically
-  * try to persist the model to local disk or HDFS if the model is serializable.
-  *
-  * With configuration 2 ([[PAlgorithm]]), PredictionIO will not automatically
-  * try to persist the model, unless the model implements the [[PersistentModel]]
-  * trait.
-  *
-  * In special circumstances where both the data and the model are small,
-  * configuration 3 may be used. Beware that RDDs cannot be used with
-  * configuration 3.
-  *
-  * === Data Source ===
-  * [[PDataSource]] is probably the most used data source base class with the
-  * ability to process RDD-based data. [[LDataSource]] '''cannot''' handle
-  * RDD-based data. Use only when you have a special requirement.
-  *
-  * === Preparator ===
-  * With [[PDataSource]], you must pick [[PPreparator]]. The same applies to
-  * [[LDataSource]] and [[LPreparator]].
-  *
-  * === Algorithm ===
-  * The workhorse of the engine comes in 3 different flavors.
-  *
-  * ==== P2LAlgorithm ====
-  * Produces a model that is small enough to fit in a single machine from
-  * [[PDataSource]] and [[PPreparator]]. The model '''cannot''' contain any RDD.
-  * If the produced model is serializable, PredictionIO will try to
-  * automatically persist it. In addition, P2LAlgorithm.batchPredict is
-  * already implemented for [[Evaluation]] purpose.
-  *
-  * ==== PAlgorithm ====
-  * Produces a model that could contain RDDs from [[PDataSource]] and
-  * [[PPreparator]]. PredictionIO will not try to persist it automatically
-  * unless the model implements [[PersistentModel]]. [[PAlgorithm.batchPredict]]
-  * must be implemented for [[Evaluation]].
-  *
-  * ==== LAlgorithm ====
-  * Produces a model that is small enough to fit in a single machine from
-  * [[LDataSource]] and [[LPreparator]]. The model '''cannot''' contain any RDD.
-  * If the produced model is serializable, PredictionIO will try to
-  * automatically persist it. In addition, LAlgorithm.batchPredict is
-  * already implemented for [[Evaluation]] purpose.
-  *
-  * === Serving ===
-  * The serving component comes with only 1 flavor--[[LServing]]. At the serving
-  * stage, it is assumed that the result being served is already at a human-
-  * consumable size.
-  *
-  * == Model Persistence ==
-  * PredictionIO tries its best to persist trained models automatically. Please
-  * refer to [[LAlgorithm.makePersistentModel]],
-  * [[P2LAlgorithm.makePersistentModel]], and [[PAlgorithm.makePersistentModel]]
-  * for descriptions on different strategies.
-  */
-package object controller {
-
-  /** Base class of several helper types that represent emptiness
-    *
-    * @group Helper
-    */
-  class SerializableClass() extends Serializable
-
-  /** Empty data source parameters.
-    * @group Helper
-    */
-  type EmptyDataSourceParams = EmptyParams
-
-  /** Empty data parameters.
-    * @group Helper
-    */
-  type EmptyDataParams = EmptyParams
-
-  /** Empty evaluation info.
-    * @group Helper
-    */
-  type EmptyEvaluationInfo = SerializableClass
-
-  /** Empty preparator parameters.
-    * @group Helper
-    */
-  type EmptyPreparatorParams = EmptyParams
-
-  /** Empty algorithm parameters.
-    * @group Helper
-    */
-  type EmptyAlgorithmParams = EmptyParams
-
-  /** Empty serving parameters.
-    * @group Helper
-    */
-  type EmptyServingParams = EmptyParams
-
-  /** Empty metrics parameters.
-    * @group Helper
-    */
-  type EmptyMetricsParams = EmptyParams
-
-  /** Empty training data.
-    * @group Helper
-    */
-  type EmptyTrainingData = SerializableClass
-
-  /** Empty prepared data.
-    * @group Helper
-    */
-  type EmptyPreparedData = SerializableClass
-
-  /** Empty model.
-    * @group Helper
-    */
-  type EmptyModel = SerializableClass
-
-  /** Empty actual result.
-    * @group Helper
-    */
-  type EmptyActualResult = SerializableClass
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/core/AbstractDoer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/core/AbstractDoer.scala b/core/src/main/scala/io/prediction/core/AbstractDoer.scala
deleted file mode 100644
index 0635b27..0000000
--- a/core/src/main/scala/io/prediction/core/AbstractDoer.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.core
-
-import grizzled.slf4j.Logging
-import io.prediction.annotation.DeveloperApi
-import io.prediction.controller.Params
-
-/** :: DeveloperApi ::
-  * Base class for all controllers
-  */
-@DeveloperApi
-abstract class AbstractDoer extends Serializable
-
-/** :: DeveloperApi ::
-  * Provides facility to instantiate controller classes
-  */
-@DeveloperApi
-object Doer extends Logging {
-  /** :: DeveloperApi ::
-    * Instantiates a controller class using supplied controller parameters as
-    * constructor parameters
-    *
-    * @param cls Class of the controller class
-    * @param params Parameters of the controller class
-    * @tparam C Controller class
-    * @return An instance of the controller class
-    */
-  @DeveloperApi
-  def apply[C <: AbstractDoer] (
-    cls: Class[_ <: C], params: Params): C = {
-
-    // Subclasses only allows two kind of constructors.
-    // 1. Constructor with P <: Params.
-    // 2. Emtpy constructor.
-    // First try (1), if failed, try (2).
-    try {
-      val constr = cls.getConstructor(params.getClass)
-      constr.newInstance(params)
-    } catch {
-      case e: NoSuchMethodException => try {
-        val zeroConstr = cls.getConstructor()
-        zeroConstr.newInstance()
-      } catch {
-        case e: NoSuchMethodException =>
-          error(s"${params.getClass.getName} was used as the constructor " +
-            s"argument to ${e.getMessage}, but no constructor can handle it. " +
-            "Aborting.")
-          sys.exit(1)
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala b/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala
deleted file mode 100644
index a3d3fad..0000000
--- a/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala
+++ /dev/null
@@ -1,123 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.core
-
-import com.google.gson.TypeAdapterFactory
-import io.prediction.annotation.DeveloperApi
-import io.prediction.controller.Params
-import io.prediction.controller.Utils
-import net.jodah.typetools.TypeResolver
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-
-/** :: DeveloperApi ::
-  * Base trait with default custom query serializer, exposed to engine developer
-  * via [[io.prediction.controller.CustomQuerySerializer]]
-  */
-@DeveloperApi
-trait BaseQuerySerializer {
-  /** :: DeveloperApi ::
-    * Serializer for Scala query classes using
-    * [[io.prediction.controller.Utils.json4sDefaultFormats]]
-    */
-  @DeveloperApi
-  @transient lazy val querySerializer = Utils.json4sDefaultFormats
-
-  /** :: DeveloperApi ::
-    * Serializer for Java query classes using Gson
-    */
-  @DeveloperApi
-  @transient lazy val gsonTypeAdapterFactories = Seq.empty[TypeAdapterFactory]
-}
-
-/** :: DeveloperApi ::
-  * Base class of all algorithm controllers
-  *
-  * @tparam PD Prepared data class
-  * @tparam M Model class
-  * @tparam Q Query class
-  * @tparam P Predicted result class
-  */
-@DeveloperApi
-abstract class BaseAlgorithm[PD, M, Q, P]
-  extends AbstractDoer with BaseQuerySerializer {
-  /** :: DeveloperApi ::
-    * Engine developers should not use this directly. This is called by workflow
-    * to train a model.
-    *
-    * @param sc Spark context
-    * @param pd Prepared data
-    * @return Trained model
-    */
-  @DeveloperApi
-  def trainBase(sc: SparkContext, pd: PD): M
-
-  /** :: DeveloperApi ::
-    * Engine developers should not use this directly. This is called by
-    * evaluation workflow to perform batch prediction.
-    *
-    * @param sc Spark context
-    * @param bm Model
-    * @param qs Batch of queries
-    * @return Batch of predicted results
-    */
-  @DeveloperApi
-  def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
-  : RDD[(Long, P)]
-
-  /** :: DeveloperApi ::
-    * Engine developers should not use this directly. Called by serving to
-    * perform a single prediction.
-    *
-    * @param bm Model
-    * @param q Query
-    * @return Predicted result
-    */
-  @DeveloperApi
-  def predictBase(bm: Any, q: Q): P
-
-  /** :: DeveloperApi ::
-    * Engine developers should not use this directly. Prepare a model for
-    * persistence in the downstream consumer. PredictionIO supports 3 types of
-    * model persistence: automatic persistence, manual persistence, and
-    * re-training on deployment. This method provides a way for downstream
-    * modules to determine which mode the model should be persisted.
-    *
-    * @param sc Spark context
-    * @param modelId Model ID
-    * @param algoParams Algorithm parameters that trained this model
-    * @param bm Model
-    * @return The model itself for automatic persistence, an instance of
-    *         [[io.prediction.workflow.PersistentModelManifest]] for manual
-    *         persistence, or Unit for re-training on deployment
-    */
-  @DeveloperApi
-  def makePersistentModel(
-    sc: SparkContext,
-    modelId: String,
-    algoParams: Params,
-    bm: Any): Any = Unit
-
-  /** :: DeveloperApi ::
-    * Obtains the type signature of query for this algorithm
-    *
-    * @return Type signature of query
-    */
-  def queryClass: Class[Q] = {
-    val types = TypeResolver.resolveRawArguments(classOf[BaseAlgorithm[PD, M, Q, P]], getClass)
-    types(2).asInstanceOf[Class[Q]]
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/core/BaseDataSource.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/core/BaseDataSource.scala b/core/src/main/scala/io/prediction/core/BaseDataSource.scala
deleted file mode 100644
index dd1157d..0000000
--- a/core/src/main/scala/io/prediction/core/BaseDataSource.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.core
-
-import io.prediction.annotation.DeveloperApi
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-
-/** :: DeveloperApi ::
-  * Base class of all data source controllers
-  *
-  * @tparam TD Training data class
-  * @tparam EI Evaluation information class
-  * @tparam Q Query class
-  * @tparam A Actual result class
-  */
-@DeveloperApi
-abstract class BaseDataSource[TD, EI, Q, A] extends AbstractDoer {
-  /** :: DeveloperApi ::
-    * Engine developer should not use this directly. This is called by workflow
-    * to read training data.
-    *
-    * @param sc Spark context
-    * @return Training data
-    */
-  @DeveloperApi
-  def readTrainingBase(sc: SparkContext): TD
-
-  /** :: DeveloperApi ::
-    * Engine developer should not use this directly. This is called by
-    * evaluation workflow to read training and validation data.
-    *
-    * @param sc Spark context
-    * @return Sets of training data, evaluation information, queries, and actual
-    *         results
-    */
-  @DeveloperApi
-  def readEvalBase(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])]
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/core/BaseEngine.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/core/BaseEngine.scala b/core/src/main/scala/io/prediction/core/BaseEngine.scala
deleted file mode 100644
index 5356fa7..0000000
--- a/core/src/main/scala/io/prediction/core/BaseEngine.scala
+++ /dev/null
@@ -1,100 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.core
-
-import io.prediction.annotation.DeveloperApi
-import io.prediction.controller.EngineParams
-import io.prediction.workflow.JsonExtractorOption.JsonExtractorOption
-import io.prediction.workflow.WorkflowParams
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-import org.json4s.JValue
-
-/** :: DeveloperApi ::
-  * Base class of all engine controller classes
-  *
-  * @tparam EI Evaluation information class
-  * @tparam Q Query class
-  * @tparam P Predicted result class
-  * @tparam A Actual result class
-  */
-@DeveloperApi
-abstract class BaseEngine[EI, Q, P, A] extends Serializable {
-  /** :: DeveloperApi ::
-    * Implement this method so that training this engine would return a list of
-    * models.
-    *
-    * @param sc An instance of SparkContext.
-    * @param engineParams An instance of [[EngineParams]] for running a single training.
-    * @param params An instance of [[WorkflowParams]] that controls the workflow.
-    * @return A list of models.
-    */
-  @DeveloperApi
-  def train(
-    sc: SparkContext,
-    engineParams: EngineParams,
-    engineInstanceId: String,
-    params: WorkflowParams): Seq[Any]
-
-  /** :: DeveloperApi ::
-    * Implement this method so that [[io.prediction.controller.Evaluation]] can
-    * use this method to generate inputs for [[io.prediction.controller.Metric]].
-    *
-    * @param sc An instance of SparkContext.
-    * @param engineParams An instance of [[EngineParams]] for running a single evaluation.
-    * @param params An instance of [[WorkflowParams]] that controls the workflow.
-    * @return A list of evaluation information and RDD of query, predicted
-    *         result, and actual result tuple tuple.
-    */
-  @DeveloperApi
-  def eval(
-    sc: SparkContext,
-    engineParams: EngineParams,
-    params: WorkflowParams): Seq[(EI, RDD[(Q, P, A)])]
-
-  /** :: DeveloperApi ::
-    * Override this method to further optimize the process that runs multiple
-    * evaluations (during tuning, for example). By default, this method calls
-    * [[eval]] for each element in the engine parameters list.
-    *
-    * @param sc An instance of SparkContext.
-    * @param engineParamsList A list of [[EngineParams]] for running batch evaluation.
-    * @param params An instance of [[WorkflowParams]] that controls the workflow.
-    * @return A list of engine parameters and evaluation result (from [[eval]]) tuples.
-    */
-  @DeveloperApi
-  def batchEval(
-    sc: SparkContext,
-    engineParamsList: Seq[EngineParams],
-    params: WorkflowParams)
-  : Seq[(EngineParams, Seq[(EI, RDD[(Q, P, A)])])] = {
-    engineParamsList.map { engineParams =>
-      (engineParams, eval(sc, engineParams, params))
-    }
-  }
-
-  /** :: DeveloperApi ::
-    * Implement this method to convert a JValue (read from an engine variant
-    * JSON file) to an instance of [[EngineParams]].
-    *
-    * @param variantJson Content of the engine variant JSON as JValue.
-    * @param jsonExtractor Content of the engine variant JSON as JValue.
-    * @return An instance of [[EngineParams]] converted from JSON.
-    */
-  @DeveloperApi
-  def jValueToEngineParams(variantJson: JValue, jsonExtractor: JsonExtractorOption): EngineParams =
-    throw new NotImplementedError("JSON to EngineParams is not implemented.")
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/core/BaseEvaluator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/core/BaseEvaluator.scala b/core/src/main/scala/io/prediction/core/BaseEvaluator.scala
deleted file mode 100644
index 23fe826..0000000
--- a/core/src/main/scala/io/prediction/core/BaseEvaluator.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.core
-
-import io.prediction.annotation.DeveloperApi
-import io.prediction.annotation.Experimental
-import io.prediction.controller.EngineParams
-import io.prediction.controller.Evaluation
-import io.prediction.workflow.WorkflowParams
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-
-/** :: DeveloperApi ::
-  * Base class of all evaluator controller classes
-  *
-  * @tparam EI Evaluation information class
-  * @tparam Q Query class
-  * @tparam P Predicted result class
-  * @tparam A Actual result class
-  * @tparam ER Evaluation result class
-  */
-@DeveloperApi
-abstract class BaseEvaluator[EI, Q, P, A, ER <: BaseEvaluatorResult]
-  extends AbstractDoer {
-  /** :: DeveloperApi ::
-    * Engine developers should not use this directly. This is called by
-    * evaluation workflow to perform evaluation.
-    *
-    * @param sc Spark context
-    * @param evaluation Evaluation to run
-    * @param engineEvalDataSet Sets of engine parameters and data for evaluation
-    * @param params Evaluation workflow parameters
-    * @return Evaluation result
-    */
-  @DeveloperApi
-  def evaluateBase(
-    sc: SparkContext,
-    evaluation: Evaluation,
-    engineEvalDataSet: Seq[(EngineParams, Seq[(EI, RDD[(Q, P, A)])])],
-    params: WorkflowParams): ER
-}
-
-/** Base trait of evaluator result */
-trait BaseEvaluatorResult extends Serializable {
-  /** A short description of the result */
-  def toOneLiner(): String = ""
-
-  /** HTML portion of the rendered evaluator results */
-  def toHTML(): String = ""
-
-  /** JSON portion of the rendered evaluator results */
-  def toJSON(): String = ""
-
-  /** :: Experimental ::
-    * Indicate if this result is inserted into database
-    */
-  @Experimental
-  val noSave: Boolean = false
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/core/BasePreparator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/core/BasePreparator.scala b/core/src/main/scala/io/prediction/core/BasePreparator.scala
deleted file mode 100644
index d6d0e45..0000000
--- a/core/src/main/scala/io/prediction/core/BasePreparator.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.core
-
-import io.prediction.annotation.DeveloperApi
-import org.apache.spark.SparkContext
-
-/** :: DeveloperApi ::
-  * Base class of all preparator controller classes
-  *
-  * Dev note: Probably will add an extra parameter for ad hoc JSON formatter
-  *
-  * @tparam TD Training data class
-  * @tparam PD Prepared data class
-  */
-@DeveloperApi
-abstract class BasePreparator[TD, PD]
-  extends AbstractDoer {
-  /** :: DeveloperApi ::
-    * Engine developers should not use this directly. This is called by training
-    * workflow to prepare data before handing it over to algorithm
-    *
-    * @param sc Spark context
-    * @param td Training data
-    * @return Prepared data
-    */
-  @DeveloperApi
-  def prepareBase(sc: SparkContext, td: TD): PD
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/core/BaseServing.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/core/BaseServing.scala b/core/src/main/scala/io/prediction/core/BaseServing.scala
deleted file mode 100644
index d8bde9e..0000000
--- a/core/src/main/scala/io/prediction/core/BaseServing.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.core
-
-import io.prediction.annotation.DeveloperApi
-import io.prediction.annotation.Experimental
-
-/** :: DeveloperApi ::
-  * Base class of all serving controller classes
-  *
-  * @tparam Q Query class
-  * @tparam P Predicted result class
-  */
-@DeveloperApi
-abstract class BaseServing[Q, P]
-  extends AbstractDoer {
-  /** :: Experimental ::
-    * Engine developers should not use this directly. This is called by serving
-    * layer to supplement process the query before sending it to algorithms.
-    *
-    * @param q Query
-    * @return A supplement Query
-    */
-  @Experimental
-  def supplementBase(q: Q): Q
-
-  /** :: DeveloperApi ::
-    * Engine developers should not use this directly. This is called by serving
-    * layer to combine multiple predicted results from multiple algorithms, and
-    * custom business logic before serving to the end user.
-    *
-    * @param q Query
-    * @param ps List of predicted results
-    * @return A single predicted result
-    */
-  @DeveloperApi
-  def serveBase(q: Q, ps: Seq[P]): P
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/core/package.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/core/package.scala b/core/src/main/scala/io/prediction/core/package.scala
deleted file mode 100644
index c7586c7..0000000
--- a/core/src/main/scala/io/prediction/core/package.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction
-
-/** Core base classes of PredictionIO controller components. Engine developers
-  * should not use these directly.
-  */
-package object core {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/package.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/package.scala b/core/src/main/scala/io/prediction/package.scala
deleted file mode 100644
index 3e3cc80..0000000
--- a/core/src/main/scala/io/prediction/package.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io
-
-/** PredictionIO Scala API */
-package object prediction {}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala b/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala
deleted file mode 100644
index ad93b1a..0000000
--- a/core/src/main/scala/io/prediction/workflow/CoreWorkflow.scala
+++ /dev/null
@@ -1,163 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-import io.prediction.controller.EngineParams
-import io.prediction.controller.Evaluation
-import io.prediction.core.BaseEngine
-import io.prediction.core.BaseEvaluator
-import io.prediction.core.BaseEvaluatorResult
-import io.prediction.data.storage.EngineInstance
-import io.prediction.data.storage.EvaluationInstance
-import io.prediction.data.storage.Model
-import io.prediction.data.storage.Storage
-
-import com.github.nscala_time.time.Imports.DateTime
-import grizzled.slf4j.Logger
-
-import scala.language.existentials
-
-/** CoreWorkflow handles PredictionIO metadata and environment variables of
-  * training and evaluation.
-  */
-object CoreWorkflow {
-  @transient lazy val logger = Logger[this.type]
-  @transient lazy val engineInstances = Storage.getMetaDataEngineInstances
-  @transient lazy val evaluationInstances =
-    Storage.getMetaDataEvaluationInstances()
-
-  def runTrain[EI, Q, P, A](
-      engine: BaseEngine[EI, Q, P, A],
-      engineParams: EngineParams,
-      engineInstance: EngineInstance,
-      env: Map[String, String] = WorkflowUtils.pioEnvVars,
-      params: WorkflowParams = WorkflowParams()) {
-    logger.debug("Starting SparkContext")
-    val mode = "training"
-    WorkflowUtils.checkUpgrade(mode, engineInstance.engineFactory)
-
-    val batch = if (params.batch.nonEmpty) {
-      s"{engineInstance.engineFactory} (${params.batch}})"
-    } else {
-      engineInstance.engineFactory
-    }
-    val sc = WorkflowContext(
-      batch,
-      env,
-      params.sparkEnv,
-      mode.capitalize)
-
-    try {
-
-      val models: Seq[Any] = engine.train(
-        sc = sc,
-        engineParams = engineParams,
-        engineInstanceId = engineInstance.id,
-        params = params
-      )
-
-      val instanceId = Storage.getMetaDataEngineInstances
-
-      val kryo = KryoInstantiator.newKryoInjection
-
-      logger.info("Inserting persistent model")
-      Storage.getModelDataModels.insert(Model(
-        id = engineInstance.id,
-        models = kryo(models)))
-
-      logger.info("Updating engine instance")
-      val engineInstances = Storage.getMetaDataEngineInstances
-      engineInstances.update(engineInstance.copy(
-        status = "COMPLETED",
-        endTime = DateTime.now
-        ))
-
-      logger.info("Training completed successfully.")
-    } catch {
-      case e @(
-          _: StopAfterReadInterruption |
-          _: StopAfterPrepareInterruption) => {
-        logger.info(s"Training interrupted by $e.")
-      }
-    } finally {
-      logger.debug("Stopping SparkContext")
-      sc.stop()
-    }
-  }
-
-  def runEvaluation[EI, Q, P, A, R <: BaseEvaluatorResult](
-      evaluation: Evaluation,
-      engine: BaseEngine[EI, Q, P, A],
-      engineParamsList: Seq[EngineParams],
-      evaluationInstance: EvaluationInstance,
-      evaluator: BaseEvaluator[EI, Q, P, A, R],
-      env: Map[String, String] = WorkflowUtils.pioEnvVars,
-      params: WorkflowParams = WorkflowParams()) {
-    logger.info("runEvaluation started")
-    logger.debug("Start SparkContext")
-
-    val mode = "evaluation"
-
-    WorkflowUtils.checkUpgrade(mode, engine.getClass.getName)
-
-    val batch = if (params.batch.nonEmpty) {
-      s"{evaluation.getClass.getName} (${params.batch}})"
-    } else {
-      evaluation.getClass.getName
-    }
-    val sc = WorkflowContext(
-      batch,
-      env,
-      params.sparkEnv,
-      mode.capitalize)
-    val evaluationInstanceId = evaluationInstances.insert(evaluationInstance)
-
-    logger.info(s"Starting evaluation instance ID: $evaluationInstanceId")
-
-    val evaluatorResult: BaseEvaluatorResult = EvaluationWorkflow.runEvaluation(
-      sc,
-      evaluation,
-      engine,
-      engineParamsList,
-      evaluator,
-      params)
-
-    if (evaluatorResult.noSave) {
-      logger.info(s"This evaluation result is not inserted into database: $evaluatorResult")
-    } else {
-      val evaluatedEvaluationInstance = evaluationInstance.copy(
-        status = "EVALCOMPLETED",
-        id = evaluationInstanceId,
-        endTime = DateTime.now,
-        evaluatorResults = evaluatorResult.toOneLiner,
-        evaluatorResultsHTML = evaluatorResult.toHTML,
-        evaluatorResultsJSON = evaluatorResult.toJSON
-      )
-
-      logger.info(s"Updating evaluation instance with result: $evaluatorResult")
-
-      evaluationInstances.update(evaluatedEvaluationInstance)
-    }
-
-    logger.debug("Stop SparkContext")
-
-    sc.stop()
-
-    logger.info("runEvaluation completed")
-  }
-}
-
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/io/prediction/workflow/CreateServer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/io/prediction/workflow/CreateServer.scala b/core/src/main/scala/io/prediction/workflow/CreateServer.scala
deleted file mode 100644
index a664187..0000000
--- a/core/src/main/scala/io/prediction/workflow/CreateServer.scala
+++ /dev/null
@@ -1,737 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.workflow
-
-import java.io.PrintWriter
-import java.io.Serializable
-import java.io.StringWriter
-import java.util.concurrent.TimeUnit
-
-import akka.actor._
-import akka.event.Logging
-import akka.io.IO
-import akka.pattern.ask
-import akka.util.Timeout
-import com.github.nscala_time.time.Imports.DateTime
-import com.twitter.bijection.Injection
-import com.twitter.chill.KryoBase
-import com.twitter.chill.KryoInjection
-import com.twitter.chill.ScalaKryoInstantiator
-import com.typesafe.config.ConfigFactory
-import de.javakaffee.kryoserializers.SynchronizedCollectionsSerializer
-import grizzled.slf4j.Logging
-import io.prediction.authentication.KeyAuthentication
-import io.prediction.configuration.SSLConfiguration
-import io.prediction.controller.Engine
-import io.prediction.controller.Params
-import io.prediction.controller.Utils
-import io.prediction.controller.WithPrId
-import io.prediction.core.BaseAlgorithm
-import io.prediction.core.BaseServing
-import io.prediction.core.Doer
-import io.prediction.data.storage.EngineInstance
-import io.prediction.data.storage.EngineManifest
-import io.prediction.data.storage.Storage
-import io.prediction.workflow.JsonExtractorOption.JsonExtractorOption
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.write
-import spray.can.Http
-import spray.can.server.ServerSettings
-import spray.http.MediaTypes._
-import spray.http._
-import spray.httpx.Json4sSupport
-import spray.routing._
-import spray.routing.authentication.{UserPass, BasicAuth}
-
-import scala.concurrent.ExecutionContext.Implicits.global
-import scala.concurrent.Future
-import scala.concurrent.duration._
-import scala.concurrent.future
-import scala.language.existentials
-import scala.util.Failure
-import scala.util.Random
-import scala.util.Success
-import scalaj.http.HttpOptions
-
-class KryoInstantiator(classLoader: ClassLoader) extends ScalaKryoInstantiator {
-  override def newKryo(): KryoBase = {
-    val kryo = super.newKryo()
-    kryo.setClassLoader(classLoader)
-    SynchronizedCollectionsSerializer.registerSerializers(kryo)
-    kryo
-  }
-}
-
-object KryoInstantiator extends Serializable {
-  def newKryoInjection : Injection[Any, Array[Byte]] = {
-    val kryoInstantiator = new KryoInstantiator(getClass.getClassLoader)
-    KryoInjection.instance(kryoInstantiator)
-  }
-}
-
-case class ServerConfig(
-  batch: String = "",
-  engineInstanceId: String = "",
-  engineId: Option[String] = None,
-  engineVersion: Option[String] = None,
-  engineVariant: String = "",
-  env: Option[String] = None,
-  ip: String = "0.0.0.0",
-  port: Int = 8000,
-  feedback: Boolean = false,
-  eventServerIp: String = "0.0.0.0",
-  eventServerPort: Int = 7070,
-  accessKey: Option[String] = None,
-  logUrl: Option[String] = None,
-  logPrefix: Option[String] = None,
-  logFile: Option[String] = None,
-  verbose: Boolean = false,
-  debug: Boolean = false,
-  jsonExtractor: JsonExtractorOption = JsonExtractorOption.Both)
-
-case class StartServer()
-case class BindServer()
-case class StopServer()
-case class ReloadServer()
-case class UpgradeCheck()
-
-
-object CreateServer extends Logging {
-  val actorSystem = ActorSystem("pio-server")
-  val engineInstances = Storage.getMetaDataEngineInstances
-  val engineManifests = Storage.getMetaDataEngineManifests
-  val modeldata = Storage.getModelDataModels
-
-  def main(args: Array[String]): Unit = {
-    val parser = new scopt.OptionParser[ServerConfig]("CreateServer") {
-      opt[String]("batch") action { (x, c) =>
-        c.copy(batch = x)
-      } text("Batch label of the deployment.")
-      opt[String]("engineId") action { (x, c) =>
-        c.copy(engineId = Some(x))
-      } text("Engine ID.")
-      opt[String]("engineVersion") action { (x, c) =>
-        c.copy(engineVersion = Some(x))
-      } text("Engine version.")
-      opt[String]("engine-variant") required() action { (x, c) =>
-        c.copy(engineVariant = x)
-      } text("Engine variant JSON.")
-      opt[String]("ip") action { (x, c) =>
-        c.copy(ip = x)
-      }
-      opt[String]("env") action { (x, c) =>
-        c.copy(env = Some(x))
-      } text("Comma-separated list of environmental variables (in 'FOO=BAR' " +
-        "format) to pass to the Spark execution environment.")
-      opt[Int]("port") action { (x, c) =>
-        c.copy(port = x)
-      } text("Port to bind to (default: 8000).")
-      opt[String]("engineInstanceId") required() action { (x, c) =>
-        c.copy(engineInstanceId = x)
-      } text("Engine instance ID.")
-      opt[Unit]("feedback") action { (_, c) =>
-        c.copy(feedback = true)
-      } text("Enable feedback loop to event server.")
-      opt[String]("event-server-ip") action { (x, c) =>
-        c.copy(eventServerIp = x)
-      }
-      opt[Int]("event-server-port") action { (x, c) =>
-        c.copy(eventServerPort = x)
-      } text("Event server port. Default: 7070")
-      opt[String]("accesskey") action { (x, c) =>
-        c.copy(accessKey = Some(x))
-      } text("Event server access key.")
-      opt[String]("log-url") action { (x, c) =>
-        c.copy(logUrl = Some(x))
-      }
-      opt[String]("log-prefix") action { (x, c) =>
-        c.copy(logPrefix = Some(x))
-      }
-      opt[String]("log-file") action { (x, c) =>
-        c.copy(logFile = Some(x))
-      }
-      opt[Unit]("verbose") action { (x, c) =>
-        c.copy(verbose = true)
-      } text("Enable verbose output.")
-      opt[Unit]("debug") action { (x, c) =>
-        c.copy(debug = true)
-      } text("Enable debug output.")
-      opt[String]("json-extractor") action { (x, c) =>
-        c.copy(jsonExtractor = JsonExtractorOption.withName(x))
-      }
-    }
-
-    parser.parse(args, ServerConfig()) map { sc =>
-      WorkflowUtils.modifyLogging(sc.verbose)
-      engineInstances.get(sc.engineInstanceId) map { engineInstance =>
-        val engineId = sc.engineId.getOrElse(engineInstance.engineId)
-        val engineVersion = sc.engineVersion.getOrElse(
-          engineInstance.engineVersion)
-        engineManifests.get(engineId, engineVersion) map { manifest =>
-          val engineFactoryName = engineInstance.engineFactory
-          val upgrade = actorSystem.actorOf(Props(
-            classOf[UpgradeActor],
-            engineFactoryName))
-          actorSystem.scheduler.schedule(
-            0.seconds,
-            1.days,
-            upgrade,
-            UpgradeCheck())
-          val master = actorSystem.actorOf(Props(
-            classOf[MasterActor],
-            sc,
-            engineInstance,
-            engineFactoryName,
-            manifest),
-          "master")
-          implicit val timeout = Timeout(5.seconds)
-          master ? StartServer()
-          actorSystem.awaitTermination
-        } getOrElse {
-          error(s"Invalid engine ID or version. Aborting server.")
-        }
-      } getOrElse {
-        error(s"Invalid engine instance ID. Aborting server.")
-      }
-    }
-  }
-
-  def createServerActorWithEngine[TD, EIN, PD, Q, P, A](
-    sc: ServerConfig,
-    engineInstance: EngineInstance,
-    engine: Engine[TD, EIN, PD, Q, P, A],
-    engineLanguage: EngineLanguage.Value,
-    manifest: EngineManifest): ActorRef = {
-
-    val engineParams = engine.engineInstanceToEngineParams(engineInstance, sc.jsonExtractor)
-
-    val kryo = KryoInstantiator.newKryoInjection
-
-    val modelsFromEngineInstance =
-      kryo.invert(modeldata.get(engineInstance.id).get.models).get.
-      asInstanceOf[Seq[Any]]
-
-    val batch = if (engineInstance.batch.nonEmpty) {
-      s"${engineInstance.engineFactory} (${engineInstance.batch})"
-    } else {
-      engineInstance.engineFactory
-    }
-
-    val sparkContext = WorkflowContext(
-      batch = batch,
-      executorEnv = engineInstance.env,
-      mode = "Serving",
-      sparkEnv = engineInstance.sparkConf)
-
-    val models = engine.prepareDeploy(
-      sparkContext,
-      engineParams,
-      engineInstance.id,
-      modelsFromEngineInstance,
-      params = WorkflowParams()
-    )
-
-    val algorithms = engineParams.algorithmParamsList.map { case (n, p) =>
-      Doer(engine.algorithmClassMap(n), p)
-    }
-
-    val servingParamsWithName = engineParams.servingParams
-
-    val serving = Doer(engine.servingClassMap(servingParamsWithName._1),
-      servingParamsWithName._2)
-
-    actorSystem.actorOf(
-      Props(
-        classOf[ServerActor[Q, P]],
-        sc,
-        engineInstance,
-        engine,
-        engineLanguage,
-        manifest,
-        engineParams.dataSourceParams._2,
-        engineParams.preparatorParams._2,
-        algorithms,
-        engineParams.algorithmParamsList.map(_._2),
-        models,
-        serving,
-        engineParams.servingParams._2))
-  }
-}
-
-class UpgradeActor(engineClass: String) extends Actor {
-  val log = Logging(context.system, this)
-  implicit val system = context.system
-  def receive: Actor.Receive = {
-    case x: UpgradeCheck =>
-      WorkflowUtils.checkUpgrade("deployment", engineClass)
-  }
-}
-
-class MasterActor (
-    sc: ServerConfig,
-    engineInstance: EngineInstance,
-    engineFactoryName: String,
-    manifest: EngineManifest) extends Actor with SSLConfiguration with KeyAuthentication {
-  val log = Logging(context.system, this)
-  implicit val system = context.system
-  var sprayHttpListener: Option[ActorRef] = None
-  var currentServerActor: Option[ActorRef] = None
-  var retry = 3
-
-  def undeploy(ip: String, port: Int): Unit = {
-    val serverUrl = s"https://${ip}:${port}"
-    log.info(
-      s"Undeploying any existing engine instance at $serverUrl")
-    try {
-      val code = scalaj.http.Http(s"$serverUrl/stop")
-        .option(HttpOptions.allowUnsafeSSL)
-        .param(ServerKey.param, ServerKey.get)
-        .method("POST").asString.code
-      code match {
-        case 200 => Unit
-        case 404 => log.error(
-          s"Another process is using $serverUrl. Unable to undeploy.")
-        case _ => log.error(
-          s"Another process is using $serverUrl, or an existing " +
-          s"engine server is not responding properly (HTTP $code). " +
-          "Unable to undeploy.")
-      }
-    } catch {
-      case e: java.net.ConnectException =>
-        log.warning(s"Nothing at $serverUrl")
-      case _: Throwable =>
-        log.error("Another process might be occupying " +
-          s"$ip:$port. Unable to undeploy.")
-    }
-  }
-
-  def receive: Actor.Receive = {
-    case x: StartServer =>
-      val actor = createServerActor(
-        sc,
-        engineInstance,
-        engineFactoryName,
-        manifest)
-      currentServerActor = Some(actor)
-      undeploy(sc.ip, sc.port)
-      self ! BindServer()
-    case x: BindServer =>
-      currentServerActor map { actor =>
-        val settings = ServerSettings(system)
-        IO(Http) ! Http.Bind(
-          actor,
-          interface = sc.ip,
-          port = sc.port,
-          settings = Some(settings.copy(sslEncryption = true)))
-      } getOrElse {
-        log.error("Cannot bind a non-existing server backend.")
-      }
-    case x: StopServer =>
-      log.info(s"Stop server command received.")
-      sprayHttpListener.map { l =>
-        log.info("Server is shutting down.")
-        l ! Http.Unbind(5.seconds)
-        system.shutdown
-      } getOrElse {
-        log.warning("No active server is running.")
-      }
-    case x: ReloadServer =>
-      log.info("Reload server command received.")
-      val latestEngineInstance =
-        CreateServer.engineInstances.getLatestCompleted(
-          manifest.id,
-          manifest.version,
-          engineInstance.engineVariant)
-      latestEngineInstance map { lr =>
-        val actor = createServerActor(sc, lr, engineFactoryName, manifest)
-        sprayHttpListener.map { l =>
-          l ! Http.Unbind(5.seconds)
-          val settings = ServerSettings(system)
-          IO(Http) ! Http.Bind(
-            actor,
-            interface = sc.ip,
-            port = sc.port,
-            settings = Some(settings.copy(sslEncryption = true)))
-          currentServerActor.get ! Kill
-          currentServerActor = Some(actor)
-        } getOrElse {
-          log.warning("No active server is running. Abort reloading.")
-        }
-      } getOrElse {
-        log.warning(
-          s"No latest completed engine instance for ${manifest.id} " +
-          s"${manifest.version}. Abort reloading.")
-      }
-    case x: Http.Bound =>
-      val serverUrl = s"https://${sc.ip}:${sc.port}"
-      log.info(s"Engine is deployed and running. Engine API is live at ${serverUrl}.")
-      sprayHttpListener = Some(sender)
-    case x: Http.CommandFailed =>
-      if (retry > 0) {
-        retry -= 1
-        log.error(s"Bind failed. Retrying... ($retry more trial(s))")
-        context.system.scheduler.scheduleOnce(1.seconds) {
-          self ! BindServer()
-        }
-      } else {
-        log.error("Bind failed. Shutting down.")
-        system.shutdown
-      }
-  }
-
-  def createServerActor(
-      sc: ServerConfig,
-      engineInstance: EngineInstance,
-      engineFactoryName: String,
-      manifest: EngineManifest): ActorRef = {
-    val (engineLanguage, engineFactory) =
-      WorkflowUtils.getEngine(engineFactoryName, getClass.getClassLoader)
-    val engine = engineFactory()
-
-    // EngineFactory return a base engine, which may not be deployable.
-    if (!engine.isInstanceOf[Engine[_,_,_,_,_,_]]) {
-      throw new NoSuchMethodException(s"Engine $engine is not deployable")
-    }
-
-    val deployableEngine = engine.asInstanceOf[Engine[_,_,_,_,_,_]]
-
-    CreateServer.createServerActorWithEngine(
-      sc,
-      engineInstance,
-      // engine,
-      deployableEngine,
-      engineLanguage,
-      manifest)
-  }
-}
-
-class ServerActor[Q, P](
-    val args: ServerConfig,
-    val engineInstance: EngineInstance,
-    val engine: Engine[_, _, _, Q, P, _],
-    val engineLanguage: EngineLanguage.Value,
-    val manifest: EngineManifest,
-    val dataSourceParams: Params,
-    val preparatorParams: Params,
-    val algorithms: Seq[BaseAlgorithm[_, _, Q, P]],
-    val algorithmsParams: Seq[Params],
-    val models: Seq[Any],
-    val serving: BaseServing[Q, P],
-    val servingParams: Params) extends Actor with HttpService with KeyAuthentication {
-  val serverStartTime = DateTime.now
-  val log = Logging(context.system, this)
-
-  var requestCount: Int = 0
-  var avgServingSec: Double = 0.0
-  var lastServingSec: Double = 0.0
-
-  /** The following is required by HttpService */
-  def actorRefFactory: ActorContext = context
-
-  implicit val timeout = Timeout(5, TimeUnit.SECONDS)
-  val pluginsActorRef =
-    context.actorOf(Props(classOf[PluginsActor], args.engineVariant), "PluginsActor")
-  val pluginContext = EngineServerPluginContext(log, args.engineVariant)
-
-  def receive: Actor.Receive = runRoute(myRoute)
-
-  val feedbackEnabled = if (args.feedback) {
-    if (args.accessKey.isEmpty) {
-      log.error("Feedback loop cannot be enabled because accessKey is empty.")
-      false
-    } else {
-      true
-    }
-  } else false
-
-  def remoteLog(logUrl: String, logPrefix: String, message: String): Unit = {
-    implicit val formats = Utils.json4sDefaultFormats
-    try {
-      scalaj.http.Http(logUrl).postData(
-        logPrefix + write(Map(
-          "engineInstance" -> engineInstance,
-          "message" -> message))).asString
-    } catch {
-      case e: Throwable =>
-        log.error(s"Unable to send remote log: ${e.getMessage}")
-    }
-  }
-
-  def getStackTraceString(e: Throwable): String = {
-    val writer = new StringWriter()
-    val printWriter = new PrintWriter(writer)
-    e.printStackTrace(printWriter)
-    writer.toString
-  }
-
-  val myRoute =
-    path("") {
-      get {
-        respondWithMediaType(`text/html`) {
-          detach() {
-            complete {
-              html.index(
-                args,
-                manifest,
-                engineInstance,
-                algorithms.map(_.toString),
-                algorithmsParams.map(_.toString),
-                models.map(_.toString),
-                dataSourceParams.toString,
-                preparatorParams.toString,
-                servingParams.toString,
-                serverStartTime,
-                feedbackEnabled,
-                args.eventServerIp,
-                args.eventServerPort,
-                requestCount,
-                avgServingSec,
-                lastServingSec
-              ).toString
-            }
-          }
-        }
-      }
-    } ~
-    path("queries.json") {
-      post {
-        detach() {
-          entity(as[String]) { queryString =>
-            try {
-              val servingStartTime = DateTime.now
-              val jsonExtractorOption = args.jsonExtractor
-              val queryTime = DateTime.now
-              // Extract Query from Json
-              val query = JsonExtractor.extract(
-                jsonExtractorOption,
-                queryString,
-                algorithms.head.queryClass,
-                algorithms.head.querySerializer,
-                algorithms.head.gsonTypeAdapterFactories
-              )
-              val queryJValue = JsonExtractor.toJValue(
-                jsonExtractorOption,
-                query,
-                algorithms.head.querySerializer,
-                algorithms.head.gsonTypeAdapterFactories)
-              // Deploy logic. First call Serving.supplement, then Algo.predict,
-              // finally Serving.serve.
-              val supplementedQuery = serving.supplementBase(query)
-              // TODO: Parallelize the following.
-              val predictions = algorithms.zipWithIndex.map { case (a, ai) =>
-                a.predictBase(models(ai), supplementedQuery)
-              }
-              // Notice that it is by design to call Serving.serve with the
-              // *original* query.
-              val prediction = serving.serveBase(query, predictions)
-              val predictionJValue = JsonExtractor.toJValue(
-                jsonExtractorOption,
-                prediction,
-                algorithms.head.querySerializer,
-                algorithms.head.gsonTypeAdapterFactories)
-              /** Handle feedback to Event Server
-                * Send the following back to the Event Server
-                * - appId
-                * - engineInstanceId
-                * - query
-                * - prediction
-                * - prId
-                */
-              val result = if (feedbackEnabled) {
-                implicit val formats =
-                  algorithms.headOption map { alg =>
-                    alg.querySerializer
-                  } getOrElse {
-                    Utils.json4sDefaultFormats
-                  }
-                // val genPrId = Random.alphanumeric.take(64).mkString
-                def genPrId: String = Random.alphanumeric.take(64).mkString
-                val newPrId = prediction match {
-                  case id: WithPrId =>
-                    val org = id.prId
-                    if (org.isEmpty) genPrId else org
-                  case _ => genPrId
-                }
-
-                // also save Query's prId as prId of this pio_pr predict events
-                val queryPrId =
-                  query match {
-                    case id: WithPrId =>
-                      Map("prId" -> id.prId)
-                    case _ =>
-                      Map()
-                  }
-                val data = Map(
-                  // "appId" -> dataSourceParams.asInstanceOf[ParamsWithAppId].appId,
-                  "event" -> "predict",
-                  "eventTime" -> queryTime.toString(),
-                  "entityType" -> "pio_pr", // prediction result
-                  "entityId" -> newPrId,
-                  "properties" -> Map(
-                    "engineInstanceId" -> engineInstance.id,
-                    "query" -> query,
-                    "prediction" -> prediction)) ++ queryPrId
-                // At this point args.accessKey should be Some(String).
-                val accessKey = args.accessKey.getOrElse("")
-                val f: Future[Int] = future {
-                  scalaj.http.Http(
-                    s"http://${args.eventServerIp}:${args.eventServerPort}/" +
-                    s"events.json?accessKey=$accessKey").postData(
-                    write(data)).header(
-                    "content-type", "application/json").asString.code
-                }
-                f onComplete {
-                  case Success(code) => {
-                    if (code != 201) {
-                      log.error(s"Feedback event failed. Status code: $code."
-                        + s"Data: ${write(data)}.")
-                    }
-                  }
-                  case Failure(t) => {
-                    log.error(s"Feedback event failed: ${t.getMessage}") }
-                }
-                // overwrite prId in predictedResult
-                // - if it is WithPrId,
-                //   then overwrite with new prId
-                // - if it is not WithPrId, no prId injection
-                if (prediction.isInstanceOf[WithPrId]) {
-                  predictionJValue merge parse(s"""{"prId" : "$newPrId"}""")
-                } else {
-                  predictionJValue
-                }
-              } else predictionJValue
-
-              val pluginResult =
-                pluginContext.outputBlockers.values.foldLeft(result) { case (r, p) =>
-                  p.process(engineInstance, queryJValue, r, pluginContext)
-                }
-
-              // Bookkeeping
-              val servingEndTime = DateTime.now
-              lastServingSec =
-                (servingEndTime.getMillis - servingStartTime.getMillis) / 1000.0
-              avgServingSec =
-                ((avgServingSec * requestCount) + lastServingSec) /
-                (requestCount + 1)
-              requestCount += 1
-
-              respondWithMediaType(`application/json`) {
-                complete(compact(render(pluginResult)))
-              }
-            } catch {
-              case e: MappingException =>
-                log.error(
-                  s"Query '$queryString' is invalid. Reason: ${e.getMessage}")
-                args.logUrl map { url =>
-                  remoteLog(
-                    url,
-                    args.logPrefix.getOrElse(""),
-                    s"Query:\n$queryString\n\nStack Trace:\n" +
-                      s"${getStackTraceString(e)}\n\n")
-                  }
-                complete(StatusCodes.BadRequest, e.getMessage)
-              case e: Throwable =>
-                val msg = s"Query:\n$queryString\n\nStack Trace:\n" +
-                  s"${getStackTraceString(e)}\n\n"
-                log.error(msg)
-                args.logUrl map { url =>
-                  remoteLog(
-                    url,
-                    args.logPrefix.getOrElse(""),
-                    msg)
-                  }
-                complete(StatusCodes.InternalServerError, msg)
-            }
-          }
-        }
-      }
-    } ~
-    path("reload") {
-      authenticate(withAccessKeyFromFile) { request =>
-        post {
-          complete {
-            context.actorSelection("/user/master") ! ReloadServer()
-            "Reloading..."
-          }
-        }
-      }
-    } ~
-    path("stop") {
-      authenticate(withAccessKeyFromFile) { request =>
-        post {
-          complete {
-            context.system.scheduler.scheduleOnce(1.seconds) {
-              context.actorSelection("/user/master") ! StopServer()
-            }
-            "Shutting down..."
-          }
-        }
-      }
-    } ~
-    pathPrefix("assets") {
-      getFromResourceDirectory("assets")
-    } ~
-    path("plugins.json") {
-      import EngineServerJson4sSupport._
-      get {
-        respondWithMediaType(MediaTypes.`application/json`) {
-          complete {
-            Map("plugins" -> Map(
-              "outputblockers" -> pluginContext.outputBlockers.map { case (n, p) =>
-                n -> Map(
-                  "name" -> p.pluginName,
-                  "description" -> p.pluginDescription,
-                  "class" -> p.getClass.getName,
-                  "params" -> pluginContext.pluginParams(p.pluginName))
-              },
-              "outputsniffers" -> pluginContext.outputSniffers.map { case (n, p) =>
-                n -> Map(
-                  "name" -> p.pluginName,
-                  "description" -> p.pluginDescription,
-                  "class" -> p.getClass.getName,
-                  "params" -> pluginContext.pluginParams(p.pluginName))
-              }
-            ))
-          }
-        }
-      }
-    } ~
-    path("plugins" / Segments) { segments =>
-      import EngineServerJson4sSupport._
-      get {
-        respondWithMediaType(MediaTypes.`application/json`) {
-          complete {
-            val pluginArgs = segments.drop(2)
-            val pluginType = segments(0)
-            val pluginName = segments(1)
-            pluginType match {
-              case EngineServerPlugin.outputSniffer =>
-                pluginsActorRef ? PluginsActor.HandleREST(
-                  pluginName = pluginName,
-                  pluginArgs = pluginArgs) map {
-                  _.asInstanceOf[String]
-                }
-            }
-          }
-        }
-      }
-    }
-}
-
-object EngineServerJson4sSupport extends Json4sSupport {
-  implicit def json4sFormats: Formats = DefaultFormats
-}


[05/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/io/prediction/e2/engine/CategoricalNaiveBayesTest.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/io/prediction/e2/engine/CategoricalNaiveBayesTest.scala b/e2/src/test/scala/io/prediction/e2/engine/CategoricalNaiveBayesTest.scala
deleted file mode 100644
index 2e3eadd..0000000
--- a/e2/src/test/scala/io/prediction/e2/engine/CategoricalNaiveBayesTest.scala
+++ /dev/null
@@ -1,132 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  * http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package io.prediction.e2.engine
-
-import io.prediction.e2.fixture.{NaiveBayesFixture, SharedSparkContext}
-import org.scalatest.{Matchers, FlatSpec}
-
-import scala.language.reflectiveCalls
-
-class CategoricalNaiveBayesTest extends FlatSpec with Matchers
-with SharedSparkContext with NaiveBayesFixture {
-  val Tolerance = .0001
-  val labeledPoints = fruit.labeledPoints
-
-  "Model" should "have log priors and log likelihoods" in {
-    val labeledPointsRdd = sc.parallelize(labeledPoints)
-    val model = CategoricalNaiveBayes.train(labeledPointsRdd)
-
-    model.priors(fruit.Banana) should be(-.7885 +- Tolerance)
-    model.priors(fruit.Orange) should be(-1.7047 +- Tolerance)
-    model.priors(fruit.OtherFruit) should be(-1.0116 +- Tolerance)
-
-    model.likelihoods(fruit.Banana)(0)(fruit.Long) should
-      be(-.2231 +- Tolerance)
-    model.likelihoods(fruit.Banana)(0)(fruit.NotLong) should
-      be(-1.6094 +- Tolerance)
-    model.likelihoods(fruit.Banana)(1)(fruit.Sweet) should
-      be(-.2231 +- Tolerance)
-    model.likelihoods(fruit.Banana)(1)(fruit.NotSweet) should
-      be(-1.6094 +- Tolerance)
-    model.likelihoods(fruit.Banana)(2)(fruit.Yellow) should
-      be(-.2231 +- Tolerance)
-    model.likelihoods(fruit.Banana)(2)(fruit.NotYellow) should
-      be(-1.6094 +- Tolerance)
-
-    model.likelihoods(fruit.Orange)(0) should not contain key(fruit.Long)
-    model.likelihoods(fruit.Orange)(0)(fruit.NotLong) should be(0.0)
-    model.likelihoods(fruit.Orange)(1)(fruit.Sweet) should
-      be(-.6931 +- Tolerance)
-    model.likelihoods(fruit.Orange)(1)(fruit.NotSweet) should
-      be(-.6931 +- Tolerance)
-    model.likelihoods(fruit.Orange)(2)(fruit.NotYellow) should be(0.0)
-    model.likelihoods(fruit.Orange)(2) should not contain key(fruit.Yellow)
-
-    model.likelihoods(fruit.OtherFruit)(0)(fruit.Long) should
-      be(-.6931 +- Tolerance)
-    model.likelihoods(fruit.OtherFruit)(0)(fruit.NotLong) should
-      be(-.6931 +- Tolerance)
-    model.likelihoods(fruit.OtherFruit)(1)(fruit.Sweet) should
-      be(-.2877 +- Tolerance)
-    model.likelihoods(fruit.OtherFruit)(1)(fruit.NotSweet) should
-      be(-1.3863 +- Tolerance)
-    model.likelihoods(fruit.OtherFruit)(2)(fruit.Yellow) should
-      be(-1.3863 +- Tolerance)
-    model.likelihoods(fruit.OtherFruit)(2)(fruit.NotYellow) should
-      be(-.2877 +- Tolerance)
-  }
-
-  "Model's log score" should "be the log score of the given point" in {
-    val labeledPointsRdd = sc.parallelize(labeledPoints)
-    val model = CategoricalNaiveBayes.train(labeledPointsRdd)
-
-    val score = model.logScore(LabeledPoint(
-      fruit.Banana,
-      Array(fruit.Long, fruit.NotSweet, fruit.NotYellow))
-    )
-
-    score should not be None
-    score.get should be(-4.2304 +- Tolerance)
-  }
-
-  it should "be negative infinity for a point with a non-existing feature" in {
-    val labeledPointsRdd = sc.parallelize(labeledPoints)
-    val model = CategoricalNaiveBayes.train(labeledPointsRdd)
-
-    val score = model.logScore(LabeledPoint(
-      fruit.Banana,
-      Array(fruit.Long, fruit.NotSweet, "Not Exist"))
-    )
-
-    score should not be None
-    score.get should be(Double.NegativeInfinity)
-  }
-
-  it should "be none for a point with a non-existing label" in {
-    val labeledPointsRdd = sc.parallelize(labeledPoints)
-    val model = CategoricalNaiveBayes.train(labeledPointsRdd)
-
-    val score = model.logScore(LabeledPoint(
-      "Not Exist",
-      Array(fruit.Long, fruit.NotSweet, fruit.Yellow))
-    )
-
-    score should be(None)
-  }
-
-  it should "use the provided default likelihood function" in {
-    val labeledPointsRdd = sc.parallelize(labeledPoints)
-    val model = CategoricalNaiveBayes.train(labeledPointsRdd)
-
-    val score = model.logScore(
-      LabeledPoint(
-        fruit.Banana,
-        Array(fruit.Long, fruit.NotSweet, "Not Exist")
-      ),
-      ls => ls.min - math.log(2)
-    )
-
-    score should not be None
-    score.get should be(-4.9236 +- Tolerance)
-  }
-
-  "Model predict" should "return the correct label" in {
-    val labeledPointsRdd = sc.parallelize(labeledPoints)
-    val model = CategoricalNaiveBayes.train(labeledPointsRdd)
-
-    val label = model.predict(Array(fruit.Long, fruit.Sweet, fruit.Yellow))
-    label should be(fruit.Banana)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/io/prediction/e2/engine/MarkovChainTest.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/io/prediction/e2/engine/MarkovChainTest.scala b/e2/src/test/scala/io/prediction/e2/engine/MarkovChainTest.scala
deleted file mode 100644
index a33a30a..0000000
--- a/e2/src/test/scala/io/prediction/e2/engine/MarkovChainTest.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-package io.prediction.e2.engine
-
-import io.prediction.e2.fixture.{MarkovChainFixture, SharedSparkContext}
-import org.apache.spark.mllib.linalg.Vectors
-import org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
-import org.scalatest.{FlatSpec, Matchers}
-
-import scala.language.reflectiveCalls
-
-class MarkovChainTest extends FlatSpec with Matchers with SharedSparkContext
-with MarkovChainFixture {
-
-  "Markov chain training" should "produce a model" in {
-    val matrix =
-      new CoordinateMatrix(sc.parallelize(twoByTwoMatrix.matrixEntries))
-    val model = MarkovChain.train(matrix, 2)
-
-    model.n should be(2)
-    model.transitionVectors.collect() should contain theSameElementsAs Seq(
-      (0, Vectors.sparse(2, Array(0, 1), Array(0.3, 0.7))),
-      (1, Vectors.sparse(2, Array(0, 1), Array(0.5, 0.5)))
-    )
-  }
-
-  it should "contains probabilities of the top N only" in {
-    val matrix =
-      new CoordinateMatrix(sc.parallelize(fiveByFiveMatrix.matrixEntries))
-    val model = MarkovChain.train(matrix, 2)
-
-    model.n should be(2)
-    (0, Vectors.sparse(5, Array(1, 2), Array(.6, .4)))
-    model.transitionVectors.collect() should contain theSameElementsAs Seq(
-      (0, Vectors.sparse(5, Array(1, 2), Array(.6, .4))),
-      (1, Vectors.sparse(5, Array(2, 4), Array(9.0 / 25, 8.0 / 25))),
-      (2, Vectors.sparse(5, Array(1, 4), Array(10.0 / 28, 10.0 / 28))),
-      (3, Vectors.sparse(5, Array(3, 4), Array(3.0 / 9, 4.0 / 9))),
-      (4, Vectors.sparse(5, Array(3, 4), Array(8.0 / 25, 0.4)))
-    )
-  }
-
-  "Model predict" should "calculate the probablities of new states" in {
-    val matrix =
-      new CoordinateMatrix(sc.parallelize(twoByTwoMatrix.matrixEntries))
-    val model = MarkovChain.train(matrix, 2)
-    val nextState = model.predict(Seq(0.4, 0.6))
-
-    nextState should contain theSameElementsInOrderAs Seq(0.42, 0.58)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/io/prediction/e2/evaluation/CrossValidationTest.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/io/prediction/e2/evaluation/CrossValidationTest.scala b/e2/src/test/scala/io/prediction/e2/evaluation/CrossValidationTest.scala
deleted file mode 100644
index ead51b2..0000000
--- a/e2/src/test/scala/io/prediction/e2/evaluation/CrossValidationTest.scala
+++ /dev/null
@@ -1,111 +0,0 @@
-package io.prediction.e2.evaluation
-
-import org.scalatest.{Matchers, Inspectors, FlatSpec}
-import org.apache.spark.rdd.RDD
-import io.prediction.e2.fixture.SharedSparkContext
-import io.prediction.e2.engine.LabeledPoint
-
-object CrossValidationTest {
-  case class TrainingData(labeledPoints: Seq[LabeledPoint])
-  case class Query(features: Array[String])
-  case class ActualResult(label: String)
-
-  case class EmptyEvaluationParams()
-
-  def toTrainingData(labeledPoints: RDD[LabeledPoint]) = TrainingData(labeledPoints.collect().toSeq)
-  def toQuery(labeledPoint: LabeledPoint) = Query(labeledPoint.features)
-  def toActualResult(labeledPoint: LabeledPoint) = ActualResult(labeledPoint.label)
-
-}
-
-
-class CrossValidationTest extends FlatSpec with Matchers with Inspectors
-with SharedSparkContext{
-
-
-  val Label1 = "l1"
-  val Label2 = "l2"
-  val Label3 = "l3"
-  val Label4 = "l4"
-  val Attribute1 = "a1"
-  val NotAttribute1 = "na1"
-  val Attribute2 = "a2"
-  val NotAttribute2 = "na2"
-
-  val labeledPoints = Seq(
-    LabeledPoint(Label1, Array(Attribute1, Attribute2)),
-    LabeledPoint(Label2, Array(NotAttribute1, Attribute2)),
-    LabeledPoint(Label3, Array(Attribute1, NotAttribute2)),
-    LabeledPoint(Label4, Array(NotAttribute1, NotAttribute2))
-  )
-
-  val dataCount = labeledPoints.size
-  val evalKs = (1 to dataCount)
-  val emptyParams = new CrossValidationTest.EmptyEvaluationParams()
-  type Fold = (
-    CrossValidationTest.TrainingData,
-    CrossValidationTest.EmptyEvaluationParams,
-    RDD[(CrossValidationTest.Query, CrossValidationTest.ActualResult)])
-
-  def toTestTrain(dataSplit: Fold): (Seq[LabeledPoint], Seq[LabeledPoint]) = {
-    val trainingData = dataSplit._1.labeledPoints
-    val queryActual = dataSplit._3
-    val testingData = queryActual.map { case (query, actual) =>
-      LabeledPoint(actual.label, query.features)
-    }
-    (trainingData, testingData.collect().toSeq)
-  }
-
-  def splitData(k: Int, labeledPointsRDD: RDD[LabeledPoint]): Seq[Fold] = {
-    CommonHelperFunctions.splitData[
-      LabeledPoint,
-      CrossValidationTest.TrainingData,
-      CrossValidationTest.EmptyEvaluationParams,
-      CrossValidationTest.Query,
-      CrossValidationTest.ActualResult](
-        k,
-        labeledPointsRDD,
-        emptyParams,
-        CrossValidationTest.toTrainingData,
-        CrossValidationTest.toQuery,
-        CrossValidationTest.toActualResult)
-  }
-
-  "Fold count" should "equal evalK" in {
-    val labeledPointsRDD = sc.parallelize(labeledPoints)
-    val lengths = evalKs.map(k => splitData(k, labeledPointsRDD).length)
-    lengths should be(evalKs)
-  }
-
-  "Testing data size" should  "be within 1 of total / evalK" in {
-    val labeledPointsRDD = sc.parallelize(labeledPoints)
-    val splits = evalKs.map(k => k -> splitData(k, labeledPointsRDD))
-    val diffs = splits.map { case (k, folds) =>
-      folds.map(fold => fold._3.count() - dataCount / k)
-    }
-    forAll(diffs) {foldDiffs => foldDiffs.max should be <=  1L}
-    diffs.map(folds => folds.sum) should be(evalKs.map(k => dataCount % k))
-  }
-
-  "Training + testing" should "equal original dataset" in {
-    val labeledPointsRDD = sc.parallelize(labeledPoints)
-    forAll(evalKs) {k =>
-      val split = splitData(k, labeledPointsRDD)
-      forAll(split) {fold =>
-        val(training, testing) = toTestTrain(fold)
-        (training ++ testing).toSet should be(labeledPoints.toSet)
-      }
-    }
-  }
-
-  "Training and testing" should "be disjoint" in {
-    val labeledPointsRDD = sc.parallelize(labeledPoints)
-    forAll(evalKs) { k =>
-      val split = splitData(k, labeledPointsRDD)
-      forAll(split) { fold =>
-        val (training, testing) = toTestTrain(fold)
-        training.toSet.intersect(testing.toSet) should be('empty)
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/io/prediction/e2/fixture/BinaryVectorizerFixture.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/io/prediction/e2/fixture/BinaryVectorizerFixture.scala b/e2/src/test/scala/io/prediction/e2/fixture/BinaryVectorizerFixture.scala
deleted file mode 100644
index 56ebbd8..0000000
--- a/e2/src/test/scala/io/prediction/e2/fixture/BinaryVectorizerFixture.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.e2.fixture
-
-import scala.collection.immutable.HashMap
-import scala.collection.immutable.HashSet
-import org.apache.spark.mllib.linalg.Vector
-
-trait BinaryVectorizerFixture {
-
-  def base = {
-    new {
-      val maps : Seq[HashMap[String, String]] = Seq(
-        HashMap("food" -> "orange", "music" -> "rock", "hobby" -> "scala"),
-        HashMap("food" -> "orange", "music" -> "pop", "hobby" ->"running"),
-        HashMap("food" -> "banana", "music" -> "rock", "hobby" -> "guitar"),
-        HashMap("food" -> "banana", "music" -> "rock", "hobby" -> "guitar")
-      )
-
-      val properties = HashSet("food", "hobby")
-    }
-  }
-
-
-  def testArrays = {
-    new {
-      // Test case for checking food value not listed in base.maps, and
-      // property not in properties.
-      val one = Array(("food", "burger"), ("music", "rock"), ("hobby", "scala"))
-
-      // Test case for making sure indices are preserved.
-      val twoA = Array(("food", "orange"), ("hobby", "scala"))
-      val twoB = Array(("food", "banana"), ("hobby", "scala"))
-      val twoC = Array(("hobby", "guitar"))
-    }
-  }
-
-  def vecSum (vec1 : Vector, vec2 : Vector) : Array[Double] = {
-    (0 until vec1.size).map(
-      k => vec1(k) + vec2(k)
-    ).toArray
-  }
-
-}
-
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/io/prediction/e2/fixture/MarkovChainFixture.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/io/prediction/e2/fixture/MarkovChainFixture.scala b/e2/src/test/scala/io/prediction/e2/fixture/MarkovChainFixture.scala
deleted file mode 100644
index e47d49e..0000000
--- a/e2/src/test/scala/io/prediction/e2/fixture/MarkovChainFixture.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-package io.prediction.e2.fixture
-
-import org.apache.spark.mllib.linalg.distributed.MatrixEntry
-
-trait MarkovChainFixture {
-  def twoByTwoMatrix = {
-    new {
-      val matrixEntries = Seq(
-        MatrixEntry(0, 0, 3),
-        MatrixEntry(0, 1, 7),
-        MatrixEntry(1, 0, 10),
-        MatrixEntry(1, 1, 10)
-      )
-    }
-  }
-  
-  def fiveByFiveMatrix = {
-    new {
-      val matrixEntries = Seq(
-        MatrixEntry(0, 1, 12),
-        MatrixEntry(0, 2, 8),
-        MatrixEntry(1, 0, 3),
-        MatrixEntry(1, 1, 3),
-        MatrixEntry(1, 2, 9),
-        MatrixEntry(1, 3, 2),
-        MatrixEntry(1, 4, 8),
-        MatrixEntry(2, 1, 10),
-        MatrixEntry(2, 2, 8),
-        MatrixEntry(2, 4, 10),
-        MatrixEntry(3, 0, 2),
-        MatrixEntry(3, 3, 3),
-        MatrixEntry(3, 4, 4),
-        MatrixEntry(4, 1, 7),
-        MatrixEntry(4, 3, 8),
-        MatrixEntry(4, 4, 10)
-      )
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/io/prediction/e2/fixture/NaiveBayesFixture.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/io/prediction/e2/fixture/NaiveBayesFixture.scala b/e2/src/test/scala/io/prediction/e2/fixture/NaiveBayesFixture.scala
deleted file mode 100644
index 97dd663..0000000
--- a/e2/src/test/scala/io/prediction/e2/fixture/NaiveBayesFixture.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package io.prediction.e2.fixture
-
-import io.prediction.e2.engine.LabeledPoint
-
-trait NaiveBayesFixture {
-
-  def fruit = {
-    new {
-      val Banana = "Banana"
-      val Orange = "Orange"
-      val OtherFruit = "Other Fruit"
-      val NotLong = "Not Long"
-      val Long = "Long"
-      val NotSweet = "Not Sweet"
-      val Sweet = "Sweet"
-      val NotYellow = "Not Yellow"
-      val Yellow = "Yellow"
-
-      val labeledPoints = Seq(
-        LabeledPoint(Banana, Array(Long, Sweet, Yellow)),
-        LabeledPoint(Banana, Array(Long, Sweet, Yellow)),
-        LabeledPoint(Banana, Array(Long, Sweet, Yellow)),
-        LabeledPoint(Banana, Array(Long, Sweet, Yellow)),
-        LabeledPoint(Banana, Array(NotLong, NotSweet, NotYellow)),
-        LabeledPoint(Orange, Array(NotLong, Sweet, NotYellow)),
-        LabeledPoint(Orange, Array(NotLong, NotSweet, NotYellow)),
-        LabeledPoint(OtherFruit, Array(Long, Sweet, NotYellow)),
-        LabeledPoint(OtherFruit, Array(NotLong, Sweet, NotYellow)),
-        LabeledPoint(OtherFruit, Array(Long, Sweet, Yellow)),
-        LabeledPoint(OtherFruit, Array(NotLong, NotSweet, NotYellow))
-      )
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/io/prediction/e2/fixture/SharedSparkContext.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/io/prediction/e2/fixture/SharedSparkContext.scala b/e2/src/test/scala/io/prediction/e2/fixture/SharedSparkContext.scala
deleted file mode 100644
index 74dd814..0000000
--- a/e2/src/test/scala/io/prediction/e2/fixture/SharedSparkContext.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package io.prediction.e2.fixture
-
-import org.apache.spark.{SparkConf, SparkContext}
-import org.scalatest.{BeforeAndAfterAll, Suite}
-
-trait SharedSparkContext extends BeforeAndAfterAll {
-  self: Suite =>
-  @transient private var _sc: SparkContext = _
-
-  def sc: SparkContext = _sc
-
-  var conf = new SparkConf(false)
-
-  override def beforeAll() {
-    _sc = new SparkContext("local", "test", conf)
-    super.beforeAll()
-  }
-
-  override def afterAll() {
-    LocalSparkContext.stop(_sc)
-
-    _sc = null
-    super.afterAll()
-  }
-}
-
-object LocalSparkContext {
-  def stop(sc: SparkContext) {
-    if (sc != null) {
-      sc.stop()
-    }
-    // To avoid Akka rebinding to the same port, since it doesn't unbind
-    // immediately on shutdown
-    System.clearProperty("spark.driver.port")
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/org/apache/predictionio/e2/engine/BinaryVectorizerTest.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/org/apache/predictionio/e2/engine/BinaryVectorizerTest.scala b/e2/src/test/scala/org/apache/predictionio/e2/engine/BinaryVectorizerTest.scala
new file mode 100644
index 0000000..576b8c6
--- /dev/null
+++ b/e2/src/test/scala/org/apache/predictionio/e2/engine/BinaryVectorizerTest.scala
@@ -0,0 +1,56 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.e2.engine
+
+import org.apache.predictionio.e2.fixture.BinaryVectorizerFixture
+import org.apache.predictionio.e2.fixture.SharedSparkContext
+import org.apache.spark.mllib.linalg.Vectors
+import org.apache.spark.rdd.RDD
+import org.scalatest.FlatSpec
+import org.scalatest.Matchers
+import scala.collection.immutable.HashMap
+
+
+import scala.language.reflectiveCalls
+
+class BinaryVectorizerTest extends FlatSpec with Matchers with SharedSparkContext
+with BinaryVectorizerFixture{
+
+  "toBinary" should "produce the following summed values:" in {
+    val testCase = BinaryVectorizer(sc.parallelize(base.maps), base.properties)
+    val vectorTwoA = testCase.toBinary(testArrays.twoA)
+    val vectorTwoB = testCase.toBinary(testArrays.twoB)
+
+
+    // Make sure vectors produced are the same size.
+    vectorTwoA.size should be (vectorTwoB.size)
+
+    // // Test case for checking food value not listed in base.maps.
+    testCase.toBinary(testArrays.one).toArray.sum should be (1.0)
+
+    // Test cases for making sure indices are preserved.
+    val sumOne = vecSum(vectorTwoA, vectorTwoB)
+
+    exactly (1, sumOne) should be (2.0)
+    exactly (2,sumOne) should be (0.0)
+    exactly (2, sumOne) should be (1.0)
+
+    val sumTwo = vecSum(Vectors.dense(sumOne), testCase.toBinary(testArrays.twoC))
+
+    exactly (3, sumTwo) should be (1.0)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/org/apache/predictionio/e2/engine/CategoricalNaiveBayesTest.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/org/apache/predictionio/e2/engine/CategoricalNaiveBayesTest.scala b/e2/src/test/scala/org/apache/predictionio/e2/engine/CategoricalNaiveBayesTest.scala
new file mode 100644
index 0000000..4373d7d
--- /dev/null
+++ b/e2/src/test/scala/org/apache/predictionio/e2/engine/CategoricalNaiveBayesTest.scala
@@ -0,0 +1,132 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  * http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+package org.apache.predictionio.e2.engine
+
+import org.apache.predictionio.e2.fixture.{NaiveBayesFixture, SharedSparkContext}
+import org.scalatest.{Matchers, FlatSpec}
+
+import scala.language.reflectiveCalls
+
+class CategoricalNaiveBayesTest extends FlatSpec with Matchers
+with SharedSparkContext with NaiveBayesFixture {
+  val Tolerance = .0001
+  val labeledPoints = fruit.labeledPoints
+
+  "Model" should "have log priors and log likelihoods" in {
+    val labeledPointsRdd = sc.parallelize(labeledPoints)
+    val model = CategoricalNaiveBayes.train(labeledPointsRdd)
+
+    model.priors(fruit.Banana) should be(-.7885 +- Tolerance)
+    model.priors(fruit.Orange) should be(-1.7047 +- Tolerance)
+    model.priors(fruit.OtherFruit) should be(-1.0116 +- Tolerance)
+
+    model.likelihoods(fruit.Banana)(0)(fruit.Long) should
+      be(-.2231 +- Tolerance)
+    model.likelihoods(fruit.Banana)(0)(fruit.NotLong) should
+      be(-1.6094 +- Tolerance)
+    model.likelihoods(fruit.Banana)(1)(fruit.Sweet) should
+      be(-.2231 +- Tolerance)
+    model.likelihoods(fruit.Banana)(1)(fruit.NotSweet) should
+      be(-1.6094 +- Tolerance)
+    model.likelihoods(fruit.Banana)(2)(fruit.Yellow) should
+      be(-.2231 +- Tolerance)
+    model.likelihoods(fruit.Banana)(2)(fruit.NotYellow) should
+      be(-1.6094 +- Tolerance)
+
+    model.likelihoods(fruit.Orange)(0) should not contain key(fruit.Long)
+    model.likelihoods(fruit.Orange)(0)(fruit.NotLong) should be(0.0)
+    model.likelihoods(fruit.Orange)(1)(fruit.Sweet) should
+      be(-.6931 +- Tolerance)
+    model.likelihoods(fruit.Orange)(1)(fruit.NotSweet) should
+      be(-.6931 +- Tolerance)
+    model.likelihoods(fruit.Orange)(2)(fruit.NotYellow) should be(0.0)
+    model.likelihoods(fruit.Orange)(2) should not contain key(fruit.Yellow)
+
+    model.likelihoods(fruit.OtherFruit)(0)(fruit.Long) should
+      be(-.6931 +- Tolerance)
+    model.likelihoods(fruit.OtherFruit)(0)(fruit.NotLong) should
+      be(-.6931 +- Tolerance)
+    model.likelihoods(fruit.OtherFruit)(1)(fruit.Sweet) should
+      be(-.2877 +- Tolerance)
+    model.likelihoods(fruit.OtherFruit)(1)(fruit.NotSweet) should
+      be(-1.3863 +- Tolerance)
+    model.likelihoods(fruit.OtherFruit)(2)(fruit.Yellow) should
+      be(-1.3863 +- Tolerance)
+    model.likelihoods(fruit.OtherFruit)(2)(fruit.NotYellow) should
+      be(-.2877 +- Tolerance)
+  }
+
+  "Model's log score" should "be the log score of the given point" in {
+    val labeledPointsRdd = sc.parallelize(labeledPoints)
+    val model = CategoricalNaiveBayes.train(labeledPointsRdd)
+
+    val score = model.logScore(LabeledPoint(
+      fruit.Banana,
+      Array(fruit.Long, fruit.NotSweet, fruit.NotYellow))
+    )
+
+    score should not be None
+    score.get should be(-4.2304 +- Tolerance)
+  }
+
+  it should "be negative infinity for a point with a non-existing feature" in {
+    val labeledPointsRdd = sc.parallelize(labeledPoints)
+    val model = CategoricalNaiveBayes.train(labeledPointsRdd)
+
+    val score = model.logScore(LabeledPoint(
+      fruit.Banana,
+      Array(fruit.Long, fruit.NotSweet, "Not Exist"))
+    )
+
+    score should not be None
+    score.get should be(Double.NegativeInfinity)
+  }
+
+  it should "be none for a point with a non-existing label" in {
+    val labeledPointsRdd = sc.parallelize(labeledPoints)
+    val model = CategoricalNaiveBayes.train(labeledPointsRdd)
+
+    val score = model.logScore(LabeledPoint(
+      "Not Exist",
+      Array(fruit.Long, fruit.NotSweet, fruit.Yellow))
+    )
+
+    score should be(None)
+  }
+
+  it should "use the provided default likelihood function" in {
+    val labeledPointsRdd = sc.parallelize(labeledPoints)
+    val model = CategoricalNaiveBayes.train(labeledPointsRdd)
+
+    val score = model.logScore(
+      LabeledPoint(
+        fruit.Banana,
+        Array(fruit.Long, fruit.NotSweet, "Not Exist")
+      ),
+      ls => ls.min - math.log(2)
+    )
+
+    score should not be None
+    score.get should be(-4.9236 +- Tolerance)
+  }
+
+  "Model predict" should "return the correct label" in {
+    val labeledPointsRdd = sc.parallelize(labeledPoints)
+    val model = CategoricalNaiveBayes.train(labeledPointsRdd)
+
+    val label = model.predict(Array(fruit.Long, fruit.Sweet, fruit.Yellow))
+    label should be(fruit.Banana)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/org/apache/predictionio/e2/engine/MarkovChainTest.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/org/apache/predictionio/e2/engine/MarkovChainTest.scala b/e2/src/test/scala/org/apache/predictionio/e2/engine/MarkovChainTest.scala
new file mode 100644
index 0000000..137095a
--- /dev/null
+++ b/e2/src/test/scala/org/apache/predictionio/e2/engine/MarkovChainTest.scala
@@ -0,0 +1,49 @@
+package org.apache.predictionio.e2.engine
+
+import org.apache.predictionio.e2.fixture.{MarkovChainFixture, SharedSparkContext}
+import org.apache.spark.mllib.linalg.Vectors
+import org.apache.spark.mllib.linalg.distributed.CoordinateMatrix
+import org.scalatest.{FlatSpec, Matchers}
+
+import scala.language.reflectiveCalls
+
+class MarkovChainTest extends FlatSpec with Matchers with SharedSparkContext
+with MarkovChainFixture {
+
+  "Markov chain training" should "produce a model" in {
+    val matrix =
+      new CoordinateMatrix(sc.parallelize(twoByTwoMatrix.matrixEntries))
+    val model = MarkovChain.train(matrix, 2)
+
+    model.n should be(2)
+    model.transitionVectors.collect() should contain theSameElementsAs Seq(
+      (0, Vectors.sparse(2, Array(0, 1), Array(0.3, 0.7))),
+      (1, Vectors.sparse(2, Array(0, 1), Array(0.5, 0.5)))
+    )
+  }
+
+  it should "contains probabilities of the top N only" in {
+    val matrix =
+      new CoordinateMatrix(sc.parallelize(fiveByFiveMatrix.matrixEntries))
+    val model = MarkovChain.train(matrix, 2)
+
+    model.n should be(2)
+    (0, Vectors.sparse(5, Array(1, 2), Array(.6, .4)))
+    model.transitionVectors.collect() should contain theSameElementsAs Seq(
+      (0, Vectors.sparse(5, Array(1, 2), Array(.6, .4))),
+      (1, Vectors.sparse(5, Array(2, 4), Array(9.0 / 25, 8.0 / 25))),
+      (2, Vectors.sparse(5, Array(1, 4), Array(10.0 / 28, 10.0 / 28))),
+      (3, Vectors.sparse(5, Array(3, 4), Array(3.0 / 9, 4.0 / 9))),
+      (4, Vectors.sparse(5, Array(3, 4), Array(8.0 / 25, 0.4)))
+    )
+  }
+
+  "Model predict" should "calculate the probablities of new states" in {
+    val matrix =
+      new CoordinateMatrix(sc.parallelize(twoByTwoMatrix.matrixEntries))
+    val model = MarkovChain.train(matrix, 2)
+    val nextState = model.predict(Seq(0.4, 0.6))
+
+    nextState should contain theSameElementsInOrderAs Seq(0.42, 0.58)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/org/apache/predictionio/e2/evaluation/CrossValidationTest.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/org/apache/predictionio/e2/evaluation/CrossValidationTest.scala b/e2/src/test/scala/org/apache/predictionio/e2/evaluation/CrossValidationTest.scala
new file mode 100644
index 0000000..d15b927
--- /dev/null
+++ b/e2/src/test/scala/org/apache/predictionio/e2/evaluation/CrossValidationTest.scala
@@ -0,0 +1,111 @@
+package org.apache.predictionio.e2.evaluation
+
+import org.scalatest.{Matchers, Inspectors, FlatSpec}
+import org.apache.spark.rdd.RDD
+import org.apache.predictionio.e2.fixture.SharedSparkContext
+import org.apache.predictionio.e2.engine.LabeledPoint
+
+object CrossValidationTest {
+  case class TrainingData(labeledPoints: Seq[LabeledPoint])
+  case class Query(features: Array[String])
+  case class ActualResult(label: String)
+
+  case class EmptyEvaluationParams()
+
+  def toTrainingData(labeledPoints: RDD[LabeledPoint]) = TrainingData(labeledPoints.collect().toSeq)
+  def toQuery(labeledPoint: LabeledPoint) = Query(labeledPoint.features)
+  def toActualResult(labeledPoint: LabeledPoint) = ActualResult(labeledPoint.label)
+
+}
+
+
+class CrossValidationTest extends FlatSpec with Matchers with Inspectors
+with SharedSparkContext{
+
+
+  val Label1 = "l1"
+  val Label2 = "l2"
+  val Label3 = "l3"
+  val Label4 = "l4"
+  val Attribute1 = "a1"
+  val NotAttribute1 = "na1"
+  val Attribute2 = "a2"
+  val NotAttribute2 = "na2"
+
+  val labeledPoints = Seq(
+    LabeledPoint(Label1, Array(Attribute1, Attribute2)),
+    LabeledPoint(Label2, Array(NotAttribute1, Attribute2)),
+    LabeledPoint(Label3, Array(Attribute1, NotAttribute2)),
+    LabeledPoint(Label4, Array(NotAttribute1, NotAttribute2))
+  )
+
+  val dataCount = labeledPoints.size
+  val evalKs = (1 to dataCount)
+  val emptyParams = new CrossValidationTest.EmptyEvaluationParams()
+  type Fold = (
+    CrossValidationTest.TrainingData,
+    CrossValidationTest.EmptyEvaluationParams,
+    RDD[(CrossValidationTest.Query, CrossValidationTest.ActualResult)])
+
+  def toTestTrain(dataSplit: Fold): (Seq[LabeledPoint], Seq[LabeledPoint]) = {
+    val trainingData = dataSplit._1.labeledPoints
+    val queryActual = dataSplit._3
+    val testingData = queryActual.map { case (query, actual) =>
+      LabeledPoint(actual.label, query.features)
+    }
+    (trainingData, testingData.collect().toSeq)
+  }
+
+  def splitData(k: Int, labeledPointsRDD: RDD[LabeledPoint]): Seq[Fold] = {
+    CommonHelperFunctions.splitData[
+      LabeledPoint,
+      CrossValidationTest.TrainingData,
+      CrossValidationTest.EmptyEvaluationParams,
+      CrossValidationTest.Query,
+      CrossValidationTest.ActualResult](
+        k,
+        labeledPointsRDD,
+        emptyParams,
+        CrossValidationTest.toTrainingData,
+        CrossValidationTest.toQuery,
+        CrossValidationTest.toActualResult)
+  }
+
+  "Fold count" should "equal evalK" in {
+    val labeledPointsRDD = sc.parallelize(labeledPoints)
+    val lengths = evalKs.map(k => splitData(k, labeledPointsRDD).length)
+    lengths should be(evalKs)
+  }
+
+  "Testing data size" should  "be within 1 of total / evalK" in {
+    val labeledPointsRDD = sc.parallelize(labeledPoints)
+    val splits = evalKs.map(k => k -> splitData(k, labeledPointsRDD))
+    val diffs = splits.map { case (k, folds) =>
+      folds.map(fold => fold._3.count() - dataCount / k)
+    }
+    forAll(diffs) {foldDiffs => foldDiffs.max should be <=  1L}
+    diffs.map(folds => folds.sum) should be(evalKs.map(k => dataCount % k))
+  }
+
+  "Training + testing" should "equal original dataset" in {
+    val labeledPointsRDD = sc.parallelize(labeledPoints)
+    forAll(evalKs) {k =>
+      val split = splitData(k, labeledPointsRDD)
+      forAll(split) {fold =>
+        val(training, testing) = toTestTrain(fold)
+        (training ++ testing).toSet should be(labeledPoints.toSet)
+      }
+    }
+  }
+
+  "Training and testing" should "be disjoint" in {
+    val labeledPointsRDD = sc.parallelize(labeledPoints)
+    forAll(evalKs) { k =>
+      val split = splitData(k, labeledPointsRDD)
+      forAll(split) { fold =>
+        val (training, testing) = toTestTrain(fold)
+        training.toSet.intersect(testing.toSet) should be('empty)
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/org/apache/predictionio/e2/fixture/BinaryVectorizerFixture.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/org/apache/predictionio/e2/fixture/BinaryVectorizerFixture.scala b/e2/src/test/scala/org/apache/predictionio/e2/fixture/BinaryVectorizerFixture.scala
new file mode 100644
index 0000000..76d8db3
--- /dev/null
+++ b/e2/src/test/scala/org/apache/predictionio/e2/fixture/BinaryVectorizerFixture.scala
@@ -0,0 +1,59 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.e2.fixture
+
+import scala.collection.immutable.HashMap
+import scala.collection.immutable.HashSet
+import org.apache.spark.mllib.linalg.Vector
+
+trait BinaryVectorizerFixture {
+
+  def base = {
+    new {
+      val maps : Seq[HashMap[String, String]] = Seq(
+        HashMap("food" -> "orange", "music" -> "rock", "hobby" -> "scala"),
+        HashMap("food" -> "orange", "music" -> "pop", "hobby" ->"running"),
+        HashMap("food" -> "banana", "music" -> "rock", "hobby" -> "guitar"),
+        HashMap("food" -> "banana", "music" -> "rock", "hobby" -> "guitar")
+      )
+
+      val properties = HashSet("food", "hobby")
+    }
+  }
+
+
+  def testArrays = {
+    new {
+      // Test case for checking food value not listed in base.maps, and
+      // property not in properties.
+      val one = Array(("food", "burger"), ("music", "rock"), ("hobby", "scala"))
+
+      // Test case for making sure indices are preserved.
+      val twoA = Array(("food", "orange"), ("hobby", "scala"))
+      val twoB = Array(("food", "banana"), ("hobby", "scala"))
+      val twoC = Array(("hobby", "guitar"))
+    }
+  }
+
+  def vecSum (vec1 : Vector, vec2 : Vector) : Array[Double] = {
+    (0 until vec1.size).map(
+      k => vec1(k) + vec2(k)
+    ).toArray
+  }
+
+}
+
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/org/apache/predictionio/e2/fixture/MarkovChainFixture.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/org/apache/predictionio/e2/fixture/MarkovChainFixture.scala b/e2/src/test/scala/org/apache/predictionio/e2/fixture/MarkovChainFixture.scala
new file mode 100644
index 0000000..a214be0
--- /dev/null
+++ b/e2/src/test/scala/org/apache/predictionio/e2/fixture/MarkovChainFixture.scala
@@ -0,0 +1,39 @@
+package org.apache.predictionio.e2.fixture
+
+import org.apache.spark.mllib.linalg.distributed.MatrixEntry
+
+trait MarkovChainFixture {
+  def twoByTwoMatrix = {
+    new {
+      val matrixEntries = Seq(
+        MatrixEntry(0, 0, 3),
+        MatrixEntry(0, 1, 7),
+        MatrixEntry(1, 0, 10),
+        MatrixEntry(1, 1, 10)
+      )
+    }
+  }
+  
+  def fiveByFiveMatrix = {
+    new {
+      val matrixEntries = Seq(
+        MatrixEntry(0, 1, 12),
+        MatrixEntry(0, 2, 8),
+        MatrixEntry(1, 0, 3),
+        MatrixEntry(1, 1, 3),
+        MatrixEntry(1, 2, 9),
+        MatrixEntry(1, 3, 2),
+        MatrixEntry(1, 4, 8),
+        MatrixEntry(2, 1, 10),
+        MatrixEntry(2, 2, 8),
+        MatrixEntry(2, 4, 10),
+        MatrixEntry(3, 0, 2),
+        MatrixEntry(3, 3, 3),
+        MatrixEntry(3, 4, 4),
+        MatrixEntry(4, 1, 7),
+        MatrixEntry(4, 3, 8),
+        MatrixEntry(4, 4, 10)
+      )
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/org/apache/predictionio/e2/fixture/NaiveBayesFixture.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/org/apache/predictionio/e2/fixture/NaiveBayesFixture.scala b/e2/src/test/scala/org/apache/predictionio/e2/fixture/NaiveBayesFixture.scala
new file mode 100644
index 0000000..483f366
--- /dev/null
+++ b/e2/src/test/scala/org/apache/predictionio/e2/fixture/NaiveBayesFixture.scala
@@ -0,0 +1,48 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+package org.apache.predictionio.e2.fixture
+
+import org.apache.predictionio.e2.engine.LabeledPoint
+
+trait NaiveBayesFixture {
+
+  def fruit = {
+    new {
+      val Banana = "Banana"
+      val Orange = "Orange"
+      val OtherFruit = "Other Fruit"
+      val NotLong = "Not Long"
+      val Long = "Long"
+      val NotSweet = "Not Sweet"
+      val Sweet = "Sweet"
+      val NotYellow = "Not Yellow"
+      val Yellow = "Yellow"
+
+      val labeledPoints = Seq(
+        LabeledPoint(Banana, Array(Long, Sweet, Yellow)),
+        LabeledPoint(Banana, Array(Long, Sweet, Yellow)),
+        LabeledPoint(Banana, Array(Long, Sweet, Yellow)),
+        LabeledPoint(Banana, Array(Long, Sweet, Yellow)),
+        LabeledPoint(Banana, Array(NotLong, NotSweet, NotYellow)),
+        LabeledPoint(Orange, Array(NotLong, Sweet, NotYellow)),
+        LabeledPoint(Orange, Array(NotLong, NotSweet, NotYellow)),
+        LabeledPoint(OtherFruit, Array(Long, Sweet, NotYellow)),
+        LabeledPoint(OtherFruit, Array(NotLong, Sweet, NotYellow)),
+        LabeledPoint(OtherFruit, Array(Long, Sweet, Yellow)),
+        LabeledPoint(OtherFruit, Array(NotLong, NotSweet, NotYellow))
+      )
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/e2/src/test/scala/org/apache/predictionio/e2/fixture/SharedSparkContext.scala
----------------------------------------------------------------------
diff --git a/e2/src/test/scala/org/apache/predictionio/e2/fixture/SharedSparkContext.scala b/e2/src/test/scala/org/apache/predictionio/e2/fixture/SharedSparkContext.scala
new file mode 100644
index 0000000..d0d762e
--- /dev/null
+++ b/e2/src/test/scala/org/apache/predictionio/e2/fixture/SharedSparkContext.scala
@@ -0,0 +1,51 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+package org.apache.predictionio.e2.fixture
+
+import org.apache.spark.{SparkConf, SparkContext}
+import org.scalatest.{BeforeAndAfterAll, Suite}
+
+trait SharedSparkContext extends BeforeAndAfterAll {
+  self: Suite =>
+  @transient private var _sc: SparkContext = _
+
+  def sc: SparkContext = _sc
+
+  var conf = new SparkConf(false)
+
+  override def beforeAll() {
+    _sc = new SparkContext("local", "test", conf)
+    super.beforeAll()
+  }
+
+  override def afterAll() {
+    LocalSparkContext.stop(_sc)
+
+    _sc = null
+    super.afterAll()
+  }
+}
+
+object LocalSparkContext {
+  def stop(sc: SparkContext) {
+    if (sc != null) {
+      sc.stop()
+    }
+    // To avoid Akka rebinding to the same port, since it doesn't unbind
+    // immediately on shutdown
+    System.clearProperty("spark.driver.port")
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/RegisterEngine.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/RegisterEngine.scala b/tools/src/main/scala/io/prediction/tools/RegisterEngine.scala
deleted file mode 100644
index 74324c9..0000000
--- a/tools/src/main/scala/io/prediction/tools/RegisterEngine.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools
-
-import java.io.File
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.EngineManifest
-import io.prediction.data.storage.EngineManifestSerializer
-import io.prediction.data.storage.Storage
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.FileSystem
-import org.apache.hadoop.fs.Path
-import org.json4s._
-import org.json4s.native.Serialization.read
-
-import scala.io.Source
-
-object RegisterEngine extends Logging {
-  val engineManifests = Storage.getMetaDataEngineManifests
-  implicit val formats = DefaultFormats + new EngineManifestSerializer
-
-  def registerEngine(
-      jsonManifest: File,
-      engineFiles: Seq[File],
-      copyLocal: Boolean = false): Unit = {
-    val jsonString = try {
-      Source.fromFile(jsonManifest).mkString
-    } catch {
-      case e: java.io.FileNotFoundException =>
-        error(s"Engine manifest file not found: ${e.getMessage}. Aborting.")
-        sys.exit(1)
-    }
-    val engineManifest = read[EngineManifest](jsonString)
-
-    info(s"Registering engine ${engineManifest.id} ${engineManifest.version}")
-    engineManifests.update(
-      engineManifest.copy(files = engineFiles.map(_.toURI.toString)), true)
-  }
-
-  def unregisterEngine(jsonManifest: File): Unit = {
-    val jsonString = try {
-      Source.fromFile(jsonManifest).mkString
-    } catch {
-      case e: java.io.FileNotFoundException =>
-        error(s"Engine manifest file not found: ${e.getMessage}. Aborting.")
-        sys.exit(1)
-    }
-    val fileEngineManifest = read[EngineManifest](jsonString)
-    val engineManifest = engineManifests.get(
-      fileEngineManifest.id,
-      fileEngineManifest.version)
-
-    engineManifest map { em =>
-      val conf = new Configuration
-      val fs = FileSystem.get(conf)
-
-      em.files foreach { f =>
-        val path = new Path(f)
-        info(s"Removing ${f}")
-        fs.delete(path, false)
-      }
-
-      engineManifests.delete(em.id, em.version)
-      info(s"Unregistered engine ${em.id} ${em.version}")
-    } getOrElse {
-      error(s"${fileEngineManifest.id} ${fileEngineManifest.version} is not " +
-        "registered.")
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/RunServer.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/RunServer.scala b/tools/src/main/scala/io/prediction/tools/RunServer.scala
deleted file mode 100644
index eb65e87..0000000
--- a/tools/src/main/scala/io/prediction/tools/RunServer.scala
+++ /dev/null
@@ -1,178 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools
-
-import java.io.File
-import java.net.URI
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.EngineManifest
-import io.prediction.tools.console.ConsoleArgs
-import io.prediction.workflow.WorkflowUtils
-
-import scala.sys.process._
-
-object RunServer extends Logging {
-  def runServer(
-      ca: ConsoleArgs,
-      core: File,
-      em: EngineManifest,
-      engineInstanceId: String): Int = {
-    val pioEnvVars = sys.env.filter(kv => kv._1.startsWith("PIO_")).map(kv =>
-      s"${kv._1}=${kv._2}"
-    ).mkString(",")
-
-    val sparkHome = ca.common.sparkHome.getOrElse(
-      sys.env.getOrElse("SPARK_HOME", "."))
-
-    val extraFiles = WorkflowUtils.thirdPartyConfFiles
-
-    val driverClassPathIndex =
-      ca.common.sparkPassThrough.indexOf("--driver-class-path")
-    val driverClassPathPrefix =
-      if (driverClassPathIndex != -1) {
-        Seq(ca.common.sparkPassThrough(driverClassPathIndex + 1))
-      } else {
-        Seq()
-      }
-    val extraClasspaths =
-      driverClassPathPrefix ++ WorkflowUtils.thirdPartyClasspaths
-
-    val deployModeIndex =
-      ca.common.sparkPassThrough.indexOf("--deploy-mode")
-    val deployMode = if (deployModeIndex != -1) {
-      ca.common.sparkPassThrough(deployModeIndex + 1)
-    } else {
-      "client"
-    }
-
-    val mainJar =
-      if (ca.build.uberJar) {
-        if (deployMode == "cluster") {
-          em.files.filter(_.startsWith("hdfs")).head
-        } else {
-          em.files.filterNot(_.startsWith("hdfs")).head
-        }
-      } else {
-        if (deployMode == "cluster") {
-          em.files.filter(_.contains("pio-assembly")).head
-        } else {
-          core.getCanonicalPath
-        }
-      }
-
-    val jarFiles = (em.files ++ Option(new File(ca.common.pioHome.get, "plugins")
-      .listFiles()).getOrElse(Array.empty[File]).map(_.getAbsolutePath)).mkString(",")
-
-    val sparkSubmit =
-      Seq(Seq(sparkHome, "bin", "spark-submit").mkString(File.separator)) ++
-      ca.common.sparkPassThrough ++
-      Seq(
-        "--class",
-        "io.prediction.workflow.CreateServer",
-        "--name",
-        s"PredictionIO Engine Instance: ${engineInstanceId}") ++
-      (if (!ca.build.uberJar) {
-        Seq("--jars", jarFiles)
-      } else Seq()) ++
-      (if (extraFiles.size > 0) {
-        Seq("--files", extraFiles.mkString(","))
-      } else {
-        Seq()
-      }) ++
-      (if (extraClasspaths.size > 0) {
-        Seq("--driver-class-path", extraClasspaths.mkString(":"))
-      } else {
-        Seq()
-      }) ++
-      (if (ca.common.sparkKryo) {
-        Seq(
-          "--conf",
-          "spark.serializer=org.apache.spark.serializer.KryoSerializer")
-      } else {
-        Seq()
-      }) ++
-      Seq(
-        mainJar,
-        "--engineInstanceId",
-        engineInstanceId,
-        "--ip",
-        ca.deploy.ip,
-        "--port",
-        ca.deploy.port.toString,
-        "--event-server-ip",
-        ca.eventServer.ip,
-        "--event-server-port",
-        ca.eventServer.port.toString) ++
-      (if (ca.accessKey.accessKey != "") {
-        Seq("--accesskey", ca.accessKey.accessKey)
-      } else {
-        Seq()
-      }) ++
-      (if (ca.eventServer.enabled) Seq("--feedback") else Seq()) ++
-      (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Seq()) ++
-      (if (ca.common.verbose) Seq("--verbose") else Seq()) ++
-      ca.deploy.logUrl.map(x => Seq("--log-url", x)).getOrElse(Seq()) ++
-      ca.deploy.logPrefix.map(x => Seq("--log-prefix", x)).getOrElse(Seq()) ++
-      Seq("--json-extractor", ca.common.jsonExtractor.toString)
-
-    info(s"Submission command: ${sparkSubmit.mkString(" ")}")
-
-    val proc =
-      Process(sparkSubmit, None, "CLASSPATH" -> "", "SPARK_YARN_USER_ENV" -> pioEnvVars).run()
-    Runtime.getRuntime.addShutdownHook(new Thread(new Runnable {
-      def run(): Unit = {
-        proc.destroy()
-      }
-    }))
-    proc.exitValue()
-  }
-
-  def newRunServer(
-    ca: ConsoleArgs,
-    em: EngineManifest,
-    engineInstanceId: String): Int = {
-    val jarFiles = em.files.map(new URI(_)) ++
-      Option(new File(ca.common.pioHome.get, "plugins").listFiles())
-        .getOrElse(Array.empty[File]).map(_.toURI)
-    val args = Seq(
-      "--engineInstanceId",
-      engineInstanceId,
-      "--engine-variant",
-      ca.common.variantJson.toURI.toString,
-      "--ip",
-      ca.deploy.ip,
-      "--port",
-      ca.deploy.port.toString,
-      "--event-server-ip",
-      ca.eventServer.ip,
-      "--event-server-port",
-      ca.eventServer.port.toString) ++
-      (if (ca.accessKey.accessKey != "") {
-        Seq("--accesskey", ca.accessKey.accessKey)
-      } else {
-        Nil
-      }) ++
-      (if (ca.eventServer.enabled) Seq("--feedback") else Nil) ++
-      (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Nil) ++
-      (if (ca.common.verbose) Seq("--verbose") else Nil) ++
-      ca.deploy.logUrl.map(x => Seq("--log-url", x)).getOrElse(Nil) ++
-      ca.deploy.logPrefix.map(x => Seq("--log-prefix", x)).getOrElse(Nil) ++
-      Seq("--json-extractor", ca.common.jsonExtractor.toString)
-
-    Runner.runOnSpark("io.prediction.workflow.CreateServer", args, ca, jarFiles)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala b/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala
deleted file mode 100644
index b18690e..0000000
--- a/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala
+++ /dev/null
@@ -1,212 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools
-
-import java.io.File
-import java.net.URI
-
-import grizzled.slf4j.Logging
-import io.prediction.data.storage.EngineManifest
-import io.prediction.tools.console.ConsoleArgs
-import io.prediction.workflow.WorkflowUtils
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.FileSystem
-import org.apache.hadoop.fs.Path
-
-import scala.sys.process._
-
-object RunWorkflow extends Logging {
-  def runWorkflow(
-      ca: ConsoleArgs,
-      core: File,
-      em: EngineManifest,
-      variantJson: File): Int = {
-    // Collect and serialize PIO_* environmental variables
-    val pioEnvVars = sys.env.filter(kv => kv._1.startsWith("PIO_")).map(kv =>
-      s"${kv._1}=${kv._2}"
-    ).mkString(",")
-
-    val sparkHome = ca.common.sparkHome.getOrElse(
-      sys.env.getOrElse("SPARK_HOME", "."))
-
-    val hadoopConf = new Configuration
-    val hdfs = FileSystem.get(hadoopConf)
-
-    val driverClassPathIndex =
-      ca.common.sparkPassThrough.indexOf("--driver-class-path")
-    val driverClassPathPrefix =
-      if (driverClassPathIndex != -1) {
-        Seq(ca.common.sparkPassThrough(driverClassPathIndex + 1))
-      } else {
-        Seq()
-      }
-    val extraClasspaths =
-      driverClassPathPrefix ++ WorkflowUtils.thirdPartyClasspaths
-
-    val deployModeIndex =
-      ca.common.sparkPassThrough.indexOf("--deploy-mode")
-    val deployMode = if (deployModeIndex != -1) {
-      ca.common.sparkPassThrough(deployModeIndex + 1)
-    } else {
-      "client"
-    }
-
-    val extraFiles = WorkflowUtils.thirdPartyConfFiles
-
-    val mainJar =
-      if (ca.build.uberJar) {
-        if (deployMode == "cluster") {
-          em.files.filter(_.startsWith("hdfs")).head
-        } else {
-          em.files.filterNot(_.startsWith("hdfs")).head
-        }
-      } else {
-        if (deployMode == "cluster") {
-          em.files.filter(_.contains("pio-assembly")).head
-        } else {
-          core.getCanonicalPath
-        }
-      }
-
-    val workMode =
-      ca.common.evaluation.map(_ => "Evaluation").getOrElse("Training")
-
-    val engineLocation = Seq(
-      sys.env("PIO_FS_ENGINESDIR"),
-      em.id,
-      em.version)
-
-    if (deployMode == "cluster") {
-      val dstPath = new Path(engineLocation.mkString(Path.SEPARATOR))
-      info("Cluster deploy mode detected. Trying to copy " +
-        s"${variantJson.getCanonicalPath} to " +
-        s"${hdfs.makeQualified(dstPath).toString}.")
-      hdfs.copyFromLocalFile(new Path(variantJson.toURI), dstPath)
-    }
-
-    val sparkSubmit =
-      Seq(Seq(sparkHome, "bin", "spark-submit").mkString(File.separator)) ++
-      ca.common.sparkPassThrough ++
-      Seq(
-        "--class",
-        "io.prediction.workflow.CreateWorkflow",
-        "--name",
-        s"PredictionIO $workMode: ${em.id} ${em.version} (${ca.common.batch})") ++
-      (if (!ca.build.uberJar) {
-        Seq("--jars", em.files.mkString(","))
-      } else Seq()) ++
-      (if (extraFiles.size > 0) {
-        Seq("--files", extraFiles.mkString(","))
-      } else {
-        Seq()
-      }) ++
-      (if (extraClasspaths.size > 0) {
-        Seq("--driver-class-path", extraClasspaths.mkString(":"))
-      } else {
-        Seq()
-      }) ++
-      (if (ca.common.sparkKryo) {
-        Seq(
-          "--conf",
-          "spark.serializer=org.apache.spark.serializer.KryoSerializer")
-      } else {
-        Seq()
-      }) ++
-      Seq(
-        mainJar,
-        "--env",
-        pioEnvVars,
-        "--engine-id",
-        em.id,
-        "--engine-version",
-        em.version,
-        "--engine-variant",
-        if (deployMode == "cluster") {
-          hdfs.makeQualified(new Path(
-            (engineLocation :+ variantJson.getName).mkString(Path.SEPARATOR))).
-            toString
-        } else {
-          variantJson.getCanonicalPath
-        },
-        "--verbosity",
-        ca.common.verbosity.toString) ++
-      ca.common.engineFactory.map(
-        x => Seq("--engine-factory", x)).getOrElse(Seq()) ++
-      ca.common.engineParamsKey.map(
-        x => Seq("--engine-params-key", x)).getOrElse(Seq()) ++
-      (if (deployMode == "cluster") Seq("--deploy-mode", "cluster") else Seq()) ++
-      (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Seq()) ++
-      (if (ca.common.verbose) Seq("--verbose") else Seq()) ++
-      (if (ca.common.skipSanityCheck) Seq("--skip-sanity-check") else Seq()) ++
-      (if (ca.common.stopAfterRead) Seq("--stop-after-read") else Seq()) ++
-      (if (ca.common.stopAfterPrepare) {
-        Seq("--stop-after-prepare")
-      } else {
-        Seq()
-      }) ++
-      ca.common.evaluation.map(x => Seq("--evaluation-class", x)).
-        getOrElse(Seq()) ++
-      // If engineParamsGenerator is specified, it overrides the evaluation.
-      ca.common.engineParamsGenerator.orElse(ca.common.evaluation)
-        .map(x => Seq("--engine-params-generator-class", x))
-        .getOrElse(Seq()) ++
-      (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Seq()) ++
-      Seq("--json-extractor", ca.common.jsonExtractor.toString)
-
-    info(s"Submission command: ${sparkSubmit.mkString(" ")}")
-    Process(sparkSubmit, None, "CLASSPATH" -> "", "SPARK_YARN_USER_ENV" -> pioEnvVars).!
-  }
-
-  def newRunWorkflow(ca: ConsoleArgs, em: EngineManifest): Int = {
-    val jarFiles = em.files.map(new URI(_))
-    val args = Seq(
-      "--engine-id",
-      em.id,
-      "--engine-version",
-      em.version,
-      "--engine-variant",
-      ca.common.variantJson.toURI.toString,
-      "--verbosity",
-      ca.common.verbosity.toString) ++
-      ca.common.engineFactory.map(
-        x => Seq("--engine-factory", x)).getOrElse(Seq()) ++
-      ca.common.engineParamsKey.map(
-        x => Seq("--engine-params-key", x)).getOrElse(Seq()) ++
-      (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Seq()) ++
-      (if (ca.common.verbose) Seq("--verbose") else Seq()) ++
-      (if (ca.common.skipSanityCheck) Seq("--skip-sanity-check") else Seq()) ++
-      (if (ca.common.stopAfterRead) Seq("--stop-after-read") else Seq()) ++
-      (if (ca.common.stopAfterPrepare) {
-        Seq("--stop-after-prepare")
-      } else {
-        Seq()
-      }) ++
-      ca.common.evaluation.map(x => Seq("--evaluation-class", x)).
-        getOrElse(Seq()) ++
-      // If engineParamsGenerator is specified, it overrides the evaluation.
-      ca.common.engineParamsGenerator.orElse(ca.common.evaluation)
-        .map(x => Seq("--engine-params-generator-class", x))
-        .getOrElse(Seq()) ++
-      (if (ca.common.batch != "") Seq("--batch", ca.common.batch) else Seq()) ++
-      Seq("--json-extractor", ca.common.jsonExtractor.toString)
-
-    Runner.runOnSpark(
-      "io.prediction.workflow.CreateWorkflow",
-      args,
-      ca,
-      jarFiles)
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/Runner.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/Runner.scala b/tools/src/main/scala/io/prediction/tools/Runner.scala
deleted file mode 100644
index 3156660..0000000
--- a/tools/src/main/scala/io/prediction/tools/Runner.scala
+++ /dev/null
@@ -1,211 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools
-
-import java.io.File
-import java.net.URI
-
-import grizzled.slf4j.Logging
-import io.prediction.tools.console.ConsoleArgs
-import io.prediction.workflow.WorkflowUtils
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.FileSystem
-import org.apache.hadoop.fs.Path
-
-import scala.sys.process._
-
-object Runner extends Logging {
-  def envStringToMap(env: String): Map[String, String] =
-    env.split(',').flatMap(p =>
-      p.split('=') match {
-        case Array(k, v) => List(k -> v)
-        case _ => Nil
-      }
-    ).toMap
-
-  def argumentValue(arguments: Seq[String], argumentName: String): Option[String] = {
-    val argumentIndex = arguments.indexOf(argumentName)
-    try {
-      arguments(argumentIndex) // just to make it error out if index is -1
-      Some(arguments(argumentIndex + 1))
-    } catch {
-      case e: IndexOutOfBoundsException => None
-    }
-  }
-
-  def handleScratchFile(
-      fileSystem: Option[FileSystem],
-      uri: Option[URI],
-      localFile: File): String = {
-    val localFilePath = localFile.getCanonicalPath
-    (fileSystem, uri) match {
-      case (Some(fs), Some(u)) =>
-        val dest = fs.makeQualified(Path.mergePaths(
-          new Path(u),
-          new Path(localFilePath)))
-        info(s"Copying $localFile to ${dest.toString}")
-        fs.copyFromLocalFile(new Path(localFilePath), dest)
-        dest.toUri.toString
-      case _ => localFile.toURI.toString
-    }
-  }
-
-  def cleanup(fs: Option[FileSystem], uri: Option[URI]): Unit = {
-    (fs, uri) match {
-      case (Some(f), Some(u)) =>
-        f.close()
-      case _ => Unit
-    }
-  }
-
-  def detectFilePaths(
-      fileSystem: Option[FileSystem],
-      uri: Option[URI],
-      args: Seq[String]): Seq[String] = {
-    args map { arg =>
-      val f = try {
-        new File(new URI(arg))
-      } catch {
-        case e: Throwable => new File(arg)
-      }
-      if (f.exists()) {
-        handleScratchFile(fileSystem, uri, f)
-      } else {
-        arg
-      }
-    }
-  }
-
-  def runOnSpark(
-      className: String,
-      classArgs: Seq[String],
-      ca: ConsoleArgs,
-      extraJars: Seq[URI]): Int = {
-    // Return error for unsupported cases
-    val deployMode =
-      argumentValue(ca.common.sparkPassThrough, "--deploy-mode").getOrElse("client")
-    val master =
-      argumentValue(ca.common.sparkPassThrough, "--master").getOrElse("local")
-
-    (ca.common.scratchUri, deployMode, master) match {
-      case (Some(u), "client", m) if m != "yarn-cluster" =>
-        error("--scratch-uri cannot be set when deploy mode is client")
-        return 1
-      case (_, "cluster", m) if m.startsWith("spark://") =>
-        error("Using cluster deploy mode with Spark standalone cluster is not supported")
-        return 1
-      case _ => Unit
-    }
-
-    // Initialize HDFS API for scratch URI
-    val fs = ca.common.scratchUri map { uri =>
-      FileSystem.get(uri, new Configuration())
-    }
-
-    // Collect and serialize PIO_* environmental variables
-    val pioEnvVars = sys.env.filter(kv => kv._1.startsWith("PIO_")).map(kv =>
-      s"${kv._1}=${kv._2}"
-    ).mkString(",")
-
-    // Location of Spark
-    val sparkHome = ca.common.sparkHome.getOrElse(
-      sys.env.getOrElse("SPARK_HOME", "."))
-
-    // Local path to PredictionIO assembly JAR
-    val mainJar = handleScratchFile(
-      fs,
-      ca.common.scratchUri,
-      console.Console.coreAssembly(ca.common.pioHome.get))
-
-    // Extra JARs that are needed by the driver
-    val driverClassPathPrefix =
-      argumentValue(ca.common.sparkPassThrough, "--driver-class-path") map { v =>
-        Seq(v)
-      } getOrElse {
-        Nil
-      }
-
-    val extraClasspaths =
-      driverClassPathPrefix ++ WorkflowUtils.thirdPartyClasspaths
-
-    // Extra files that are needed to be passed to --files
-    val extraFiles = WorkflowUtils.thirdPartyConfFiles map { f =>
-      handleScratchFile(fs, ca.common.scratchUri, new File(f))
-    }
-
-    val deployedJars = extraJars map { j =>
-      handleScratchFile(fs, ca.common.scratchUri, new File(j))
-    }
-
-    val sparkSubmitCommand =
-      Seq(Seq(sparkHome, "bin", "spark-submit").mkString(File.separator))
-
-    val sparkSubmitJars = if (extraJars.nonEmpty) {
-      Seq("--jars", deployedJars.map(_.toString).mkString(","))
-    } else {
-      Nil
-    }
-
-    val sparkSubmitFiles = if (extraFiles.nonEmpty) {
-      Seq("--files", extraFiles.mkString(","))
-    } else {
-      Nil
-    }
-
-    val sparkSubmitExtraClasspaths = if (extraClasspaths.nonEmpty) {
-      Seq("--driver-class-path", extraClasspaths.mkString(":"))
-    } else {
-      Nil
-    }
-
-    val sparkSubmitKryo = if (ca.common.sparkKryo) {
-      Seq(
-        "--conf",
-        "spark.serializer=org.apache.spark.serializer.KryoSerializer")
-    } else {
-      Nil
-    }
-
-    val verbose = if (ca.common.verbose) Seq("--verbose") else Nil
-
-    val sparkSubmit = Seq(
-      sparkSubmitCommand,
-      ca.common.sparkPassThrough,
-      Seq("--class", className),
-      sparkSubmitJars,
-      sparkSubmitFiles,
-      sparkSubmitExtraClasspaths,
-      sparkSubmitKryo,
-      Seq(mainJar),
-      detectFilePaths(fs, ca.common.scratchUri, classArgs),
-      Seq("--env", pioEnvVars),
-      verbose).flatten.filter(_ != "")
-    info(s"Submission command: ${sparkSubmit.mkString(" ")}")
-    val proc = Process(
-      sparkSubmit,
-      None,
-      "CLASSPATH" -> "",
-      "SPARK_YARN_USER_ENV" -> pioEnvVars).run()
-    Runtime.getRuntime.addShutdownHook(new Thread(new Runnable {
-      def run(): Unit = {
-        cleanup(fs, ca.common.scratchUri)
-        proc.destroy()
-      }
-    }))
-    cleanup(fs, ca.common.scratchUri)
-    proc.exitValue()
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/admin/AdminAPI.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/admin/AdminAPI.scala b/tools/src/main/scala/io/prediction/tools/admin/AdminAPI.scala
deleted file mode 100644
index c5ec913..0000000
--- a/tools/src/main/scala/io/prediction/tools/admin/AdminAPI.scala
+++ /dev/null
@@ -1,156 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools.admin
-
-import akka.actor.{Actor, ActorSystem, Props}
-import akka.event.Logging
-import akka.io.IO
-import akka.util.Timeout
-import io.prediction.data.api.StartServer
-import io.prediction.data.storage.Storage
-import org.json4s.{Formats, DefaultFormats}
-
-import java.util.concurrent.TimeUnit
-
-import spray.can.Http
-import spray.http.{MediaTypes, StatusCodes}
-import spray.httpx.Json4sSupport
-import spray.routing._
-
-import scala.concurrent.ExecutionContext
-
-class AdminServiceActor(val commandClient: CommandClient)
-  extends HttpServiceActor {
-
-  object Json4sProtocol extends Json4sSupport {
-    implicit def json4sFormats: Formats = DefaultFormats
-  }
-
-  import Json4sProtocol._
-
-  val log = Logging(context.system, this)
-
-  // we use the enclosing ActorContext's or ActorSystem's dispatcher for our
-  // Futures
-  implicit def executionContext: ExecutionContext = actorRefFactory.dispatcher
-  implicit val timeout: Timeout = Timeout(5, TimeUnit.SECONDS)
-
-  // for better message response
-  val rejectionHandler = RejectionHandler {
-    case MalformedRequestContentRejection(msg, _) :: _ =>
-      complete(StatusCodes.BadRequest, Map("message" -> msg))
-    case MissingQueryParamRejection(msg) :: _ =>
-      complete(StatusCodes.NotFound,
-        Map("message" -> s"missing required query parameter ${msg}."))
-    case AuthenticationFailedRejection(cause, challengeHeaders) :: _ =>
-      complete(StatusCodes.Unauthorized, challengeHeaders,
-        Map("message" -> s"Invalid accessKey."))
-  }
-
-  val jsonPath = """(.+)\.json$""".r
-
-  val route: Route =
-    pathSingleSlash {
-      get {
-        respondWithMediaType(MediaTypes.`application/json`) {
-          complete(Map("status" -> "alive"))
-        }
-      }
-    } ~
-      path("cmd" / "app" / Segment / "data") {
-        appName => {
-          delete {
-            respondWithMediaType(MediaTypes.`application/json`) {
-              complete(commandClient.futureAppDataDelete(appName))
-            }
-          }
-        }
-      } ~
-      path("cmd" / "app" / Segment) {
-        appName => {
-          delete {
-            respondWithMediaType(MediaTypes.`application/json`) {
-              complete(commandClient.futureAppDelete(appName))
-            }
-          }
-        }
-      } ~
-      path("cmd" / "app") {
-        get {
-          respondWithMediaType(MediaTypes.`application/json`) {
-            complete(commandClient.futureAppList())
-          }
-        } ~
-          post {
-            entity(as[AppRequest]) {
-              appArgs => respondWithMediaType(MediaTypes.`application/json`) {
-                complete(commandClient.futureAppNew(appArgs))
-              }
-            }
-          }
-      }
-  def receive: Actor.Receive = runRoute(route)
-}
-
-class AdminServerActor(val commandClient: CommandClient) extends Actor {
-  val log = Logging(context.system, this)
-  val child = context.actorOf(
-    Props(classOf[AdminServiceActor], commandClient),
-    "AdminServiceActor")
-
-  implicit val system = context.system
-
-  def receive: PartialFunction[Any, Unit] = {
-    case StartServer(host, portNum) => {
-      IO(Http) ! Http.Bind(child, interface = host, port = portNum)
-
-    }
-    case m: Http.Bound => log.info("Bound received. AdminServer is ready.")
-    case m: Http.CommandFailed => log.error("Command failed.")
-    case _ => log.error("Unknown message.")
-  }
-}
-
-case class AdminServerConfig(
-  ip: String = "localhost",
-  port: Int = 7071
-)
-
-object AdminServer {
-  def createAdminServer(config: AdminServerConfig): Unit = {
-    implicit val system = ActorSystem("AdminServerSystem")
-
-    val commandClient = new CommandClient(
-      appClient = Storage.getMetaDataApps,
-      accessKeyClient = Storage.getMetaDataAccessKeys,
-      eventClient = Storage.getLEvents()
-    )
-
-    val serverActor = system.actorOf(
-      Props(classOf[AdminServerActor], commandClient),
-      "AdminServerActor")
-    serverActor ! StartServer(config.ip, config.port)
-    system.awaitTermination
-  }
-}
-
-object AdminRun {
-  def main (args: Array[String]) {
-    AdminServer.createAdminServer(AdminServerConfig(
-      ip = "localhost",
-      port = 7071))
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala b/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala
deleted file mode 100644
index 924b6f0..0000000
--- a/tools/src/main/scala/io/prediction/tools/admin/CommandClient.scala
+++ /dev/null
@@ -1,160 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools.admin
-
-import io.prediction.data.storage._
-
-import scala.concurrent.{ExecutionContext, Future}
-
-abstract class BaseResponse()
-
-case class GeneralResponse(
-  status: Int = 0,
-  message: String = ""
-) extends BaseResponse()
-
-case class AppRequest(
-  id: Int = 0,
-  name: String = "",
-  description: String = ""
-)
-
-case class TrainRequest(
-  enginePath: String = ""
-)
-case class AppResponse(
-  id: Int = 0,
-  name: String = "",
-  keys: Seq[AccessKey]
-) extends BaseResponse()
-
-case class AppNewResponse(
-  status: Int = 0,
-  message: String = "",
-  id: Int = 0,
-  name: String = "",
-  key: String
-) extends BaseResponse()
-
-case class AppListResponse(
-  status: Int = 0,
-  message: String = "",
-  apps: Seq[AppResponse]
-) extends BaseResponse()
-
-class CommandClient(
-  val appClient: Apps,
-  val accessKeyClient: AccessKeys,
-  val eventClient: LEvents
-) {
-
-  def futureAppNew(req: AppRequest)(implicit ec: ExecutionContext): Future[BaseResponse] = Future {
-    val response = appClient.getByName(req.name) map { app =>
-      GeneralResponse(0, s"App ${req.name} already exists. Aborting.")
-    } getOrElse {
-      appClient.get(req.id) map {
-        app2 =>
-          GeneralResponse(0,
-              s"App ID ${app2.id} already exists and maps to the app '${app2.name}'. " +
-              "Aborting.")
-      } getOrElse {
-        val appid = appClient.insert(App(
-          id = Option(req.id).getOrElse(0),
-          name = req.name,
-          description = Option(req.description)))
-        appid map { id =>
-          val dbInit = eventClient.init(id)
-          val r = if (dbInit) {
-            val accessKey = AccessKey(
-              key = "",
-              appid = id,
-              events = Seq())
-            val accessKey2 = accessKeyClient.insert(AccessKey(
-              key = "",
-              appid = id,
-              events = Seq()))
-            accessKey2 map { k =>
-              new AppNewResponse(1,"App created successfully.",id, req.name, k)
-            } getOrElse {
-              GeneralResponse(0, s"Unable to create new access key.")
-            }
-          } else {
-            GeneralResponse(0, s"Unable to initialize Event Store for this app ID: ${id}.")
-          }
-          r
-        } getOrElse {
-          GeneralResponse(0, s"Unable to create new app.")
-        }
-      }
-    }
-    response
-  }
-
-  def futureAppList()(implicit ec: ExecutionContext): Future[AppListResponse] = Future {
-    val apps = appClient.getAll().sortBy(_.name)
-    val appsRes = apps.map {
-      app => {
-        new AppResponse(app.id, app.name, accessKeyClient.getByAppid(app.id))
-      }
-    }
-    new AppListResponse(1, "Successful retrieved app list.", appsRes)
-  }
-
-  def futureAppDataDelete(appName: String)
-      (implicit ec: ExecutionContext): Future[GeneralResponse] = Future {
-    val response = appClient.getByName(appName) map { app =>
-      val data = if (eventClient.remove(app.id)) {
-        GeneralResponse(1, s"Removed Event Store for this app ID: ${app.id}")
-      } else {
-        GeneralResponse(0, s"Error removing Event Store for this app.")
-      }
-
-      val dbInit = eventClient.init(app.id)
-      val data2 = if (dbInit) {
-        GeneralResponse(1, s"Initialized Event Store for this app ID: ${app.id}.")
-      } else {
-        GeneralResponse(0, s"Unable to initialize Event Store for this appId:" +
-          s" ${app.id}.")
-      }
-      GeneralResponse(data.status * data2.status, data.message + data2.message)
-    } getOrElse {
-      GeneralResponse(0, s"App ${appName} does not exist.")
-    }
-    response
-  }
-
-  def futureAppDelete(appName: String)
-      (implicit ec: ExecutionContext): Future[GeneralResponse] = Future {
-
-    val response = appClient.getByName(appName) map { app =>
-      val data = if (eventClient.remove(app.id)) {
-        Storage.getMetaDataApps.delete(app.id)
-        GeneralResponse(1, s"App successfully deleted")
-      } else {
-        GeneralResponse(0, s"Error removing Event Store for app ${app.name}.");
-      }
-      data
-    } getOrElse {
-      GeneralResponse(0, s"App ${appName} does not exist.")
-    }
-    response
-  }
-
-  def futureTrain(req: TrainRequest)
-      (implicit ec: ExecutionContext): Future[GeneralResponse] = Future {
-    null
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/admin/README.md
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/admin/README.md b/tools/src/main/scala/io/prediction/tools/admin/README.md
deleted file mode 100644
index 475a3de..0000000
--- a/tools/src/main/scala/io/prediction/tools/admin/README.md
+++ /dev/null
@@ -1,161 +0,0 @@
-## Admin API (under development)
-
-### Start Admin HTTP Server without bin/pio (for development)
-
-NOTE: elasticsearch and hbase should be running first.
-
-```
-$ sbt/sbt "tools/compile"
-$ set -a
-$ source conf/pio-env.sh
-$ set +a
-$ sbt/sbt "tools/run-main io.prediction.tools.admin.AdminRun"
-```
-
-### Unit test (Very minimal)
-
-```
-$ set -a
-$ source conf/pio-env.sh
-$ set +a
-$ sbt/sbt "tools/test-only io.prediction.tools.admin.AdminAPISpec"
-```
-
-### Start with pio command adminserver
-
-```
-$ pio adminserver
-```
-
-Admin Server url defaults to `http://localhost:7071`
-
-The host and port can be specified by using the 'ip' and 'port' parameters
-
-```
-$ pio adminserver --ip 127.0.0.1 --port 7080
-```
-
-### Current Supported Commands
-
-#### Check status
-
-```
-$ curl -i http://localhost:7071/
-
-{"status":"alive"}
-```
-
-#### Get list of apps
-
-```
-$ curl -i -X GET http://localhost:7071/cmd/app
-
-{"status":1,"message":"Successful retrieved app list.","apps":[{"id":12,"name":"scratch","keys":[{"key":"gtPgVMIr3uthus1QJWFBcIjNf6d1SNuhaOWQAgdLbOBP1eRWMNIJWl6SkHgI1OoN","appid":12,"events":[]}]},{"id":17,"name":"test-ecommercerec","keys":[{"key":"zPkr6sBwQoBwBjVHK2hsF9u26L38ARSe19QzkdYentuomCtYSuH0vXP5fq7advo4","appid":17,"events":[]}]}]}
-```
-
-#### Create a new app
-
-```
-$ curl -i -X POST http://localhost:7071/cmd/app \
--H "Content-Type: application/json" \
--d '{ "name" : "my_new_app" }'
-
-{"status":1,"message":"App created successfully.","id":19,"name":"my_new_app","keys":[{"key":"","appid":19,"events":[]}]}
-```
-
-#### Delete data of app
-
-```
-$ curl -i -X DELETE http://localhost:7071/cmd/app/my_new_app/data
-```
-
-#### Delete app
-
-```
-$ curl -i -X DELETE http://localhost:7071/cmd/app/my_new_app
-
-{"status":1,"message":"App successfully deleted"}
-```
-
-
-## API Doc (To be updated)
-
-### app list:
-GET http://localhost:7071/cmd/app
-
-OK Response:
-{
-  \u201cstatus\u201d: <STATUS>,
-  \u201cmessage\u201d: <MESSAGE>,
-  \u201capps\u201d : [
-    { \u201cname': \u201c<APP_NAME>\u201d,
-      \u201cid': <APP_ID>,
-      \u201caccessKey' : \u201c<ACCESS_KEY>\u201d },
-    { \u201cname': \u201c<APP_NAME>\u201d,
-      \u201cid': <APP_ID>,
-      \u201caccessKey' : \u201c<ACCESS_KEY>\u201d }, ... ]
-}
-
-Error Response:
-{\u201cstatus\u201d: <STATUS>, \u201cmessage\u201d : \u201c<MESSAGE>\u201d}
-
-### app new
-POST http://localhost:7071/cmd/app
-Request Body:
-{ name\u201d: \u201c<APP_NAME>\u201d, // required
-  \u201cid\u201d: <APP_ID>, // optional
-  \u201cdescription\u201d: \u201c<DESCRIPTION>\u201d } // optional
-
-OK Response:
-{ \u201cstatus\u201d: <STATUS>,
-  \u201cmessage\u201d: <MESSAGE>,
-  \u201capp\u201d : {
-    \u201cname\u201d: \u201c<APP_NAME>\u201d,
-    \u201cid\u201d: <APP_ID>,
-    \u201caccessKey\u201d : \u201c<ACCESS_KEY>\u201d }
-}
-
-Error Response:
-{ \u201cstatus\u201d: <STATUS>, \u201cmessage\u201d : \u201c<MESSAGE>\u201d}
-
-### app delete
-DELETE http://localhost:7071/cmd/app/{appName}
-
-OK Response:
-{ "status": <STATUS>, "message" : \u201c<MESSAGE>\u201d}
-
-Error Response:
-{ \u201cstatus\u201d: <STATUS>, \u201cmessage\u201d : \u201c<MESSAGE>\u201d}
-
-### app data-delete
-DELETE http://localhost:7071/cmd/app/{appName}/data
-
-OK Response:
-{ "status": <STATUS>, "message" : \u201c<MESSAGE>\u201d}
-
-Error Response:
-{ \u201cstatus\u201d: <STATUS>, \u201cmessage\u201d : \u201c<MESSAGE>\u201d }
-
-
-### train TBD
-
-#### Training request:
-POST http://localhost:7071/cmd/train
-Request body: TBD
-
-OK Response: TBD
-
-Error Response: TBD
-
-#### Get training status:
-GET http://localhost:7071/cmd/train/{engineInstanceId}
-
-OK Response: TBD
-INIT
-TRAINING
-DONE
-ERROR
-
-Error Response: TBD
-
-### deploy TBD

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/tools/src/main/scala/io/prediction/tools/console/AccessKey.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/io/prediction/tools/console/AccessKey.scala b/tools/src/main/scala/io/prediction/tools/console/AccessKey.scala
deleted file mode 100644
index 85955e8..0000000
--- a/tools/src/main/scala/io/prediction/tools/console/AccessKey.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.tools.console
-
-import io.prediction.data.storage
-
-import grizzled.slf4j.Logging
-
-case class AccessKeyArgs(
-  accessKey: String = "",
-  events: Seq[String] = Seq())
-
-object AccessKey extends Logging {
-  def create(ca: ConsoleArgs): Int = {
-    val apps = storage.Storage.getMetaDataApps
-    apps.getByName(ca.app.name) map { app =>
-      val accessKeys = storage.Storage.getMetaDataAccessKeys
-      val accessKey = accessKeys.insert(storage.AccessKey(
-        key = ca.accessKey.accessKey,
-        appid = app.id,
-        events = ca.accessKey.events))
-      accessKey map { k =>
-        info(s"Created new access key: ${k}")
-        0
-      } getOrElse {
-        error(s"Unable to create new access key.")
-        1
-      }
-    } getOrElse {
-      error(s"App ${ca.app.name} does not exist. Aborting.")
-      1
-    }
-  }
-
-  def list(ca: ConsoleArgs): Int = {
-    val keys =
-      if (ca.app.name == "") {
-        storage.Storage.getMetaDataAccessKeys.getAll
-      } else {
-        val apps = storage.Storage.getMetaDataApps
-        apps.getByName(ca.app.name) map { app =>
-          storage.Storage.getMetaDataAccessKeys.getByAppid(app.id)
-        } getOrElse {
-          error(s"App ${ca.app.name} does not exist. Aborting.")
-          return 1
-        }
-      }
-    val title = "Access Key(s)"
-    info(f"$title%64s | App ID | Allowed Event(s)")
-    keys.sortBy(k => k.appid) foreach { k =>
-      val events =
-        if (k.events.size > 0) k.events.sorted.mkString(",") else "(all)"
-      info(f"${k.key}%64s | ${k.appid}%6d | $events%s")
-    }
-    info(s"Finished listing ${keys.size} access key(s).")
-    0
-  }
-
-  def delete(ca: ConsoleArgs): Int = {
-    try {
-      storage.Storage.getMetaDataAccessKeys.delete(ca.accessKey.accessKey)
-      info(s"Deleted access key ${ca.accessKey.accessKey}.")
-      0
-    } catch {
-      case e: Exception =>
-        error(s"Error deleting access key ${ca.accessKey.accessKey}.", e)
-        1
-    }
-  }
-}


[32/34] incubator-predictionio git commit: fix more private scopes

Posted by do...@apache.org.
fix more private scopes


Project: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/commit/64280b19
Tree: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/tree/64280b19
Diff: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/diff/64280b19

Branch: refs/heads/develop
Commit: 64280b195e23a910371c88105ad6b9414a404c44
Parents: d7c1417
Author: Xusen Yin <yi...@gmail.com>
Authored: Thu Jul 7 17:16:29 2016 -0700
Committer: Xusen Yin <yi...@gmail.com>
Committed: Thu Jul 7 17:16:29 2016 -0700

----------------------------------------------------------------------
 .../scala/org/apache/predictionio/controller/Deployment.scala    | 2 +-
 .../scala/org/apache/predictionio/controller/Evaluation.scala    | 4 ++--
 .../main/scala/org/apache/predictionio/controller/Metric.scala   | 4 ++--
 3 files changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/64280b19/core/src/main/scala/org/apache/predictionio/controller/Deployment.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/Deployment.scala b/core/src/main/scala/org/apache/predictionio/controller/Deployment.scala
index 76fe0b3..fa7f3f1 100644
--- a/core/src/main/scala/org/apache/predictionio/controller/Deployment.scala
+++ b/core/src/main/scala/org/apache/predictionio/controller/Deployment.scala
@@ -34,7 +34,7 @@ trait Deployment extends EngineFactory {
   }
 
   /** Returns the [[Engine]] contained in this [[Deployment]]. */
-  private [prediction]
+  private[predictionio]
   def engine: BaseEngine[_, _, _, _] = {
     assert(engineSet, "Engine not set")
     _engine

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/64280b19/core/src/main/scala/org/apache/predictionio/controller/Evaluation.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/Evaluation.scala b/core/src/main/scala/org/apache/predictionio/controller/Evaluation.scala
index c720c4f..79fa3a2 100644
--- a/core/src/main/scala/org/apache/predictionio/controller/Evaluation.scala
+++ b/core/src/main/scala/org/apache/predictionio/controller/Evaluation.scala
@@ -32,7 +32,7 @@ trait Evaluation extends Deployment {
   protected [this] var _evaluatorSet: Boolean = false
   protected [this] var _evaluator: BaseEvaluator[_, _, _, _, _ <: BaseEvaluatorResult] = _
 
-  private [prediction]
+  private[predictionio]
   def evaluator: BaseEvaluator[_, _, _, _, _ <: BaseEvaluatorResult] = {
     assert(_evaluatorSet, "Evaluator not set")
     _evaluator
@@ -95,7 +95,7 @@ trait Evaluation extends Deployment {
         outputPath = "best.json"))
   }
 
-  private [prediction]
+  private[predictionio]
   def engineMetrics: (BaseEngine[_, _, _, _], Metric[_, _, _, _, _]) = {
     throw new NotImplementedError("This method is to keep the compiler happy")
   }

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/64280b19/core/src/main/scala/org/apache/predictionio/controller/Metric.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/Metric.scala b/core/src/main/scala/org/apache/predictionio/controller/Metric.scala
index cc27984..712d537 100644
--- a/core/src/main/scala/org/apache/predictionio/controller/Metric.scala
+++ b/core/src/main/scala/org/apache/predictionio/controller/Metric.scala
@@ -54,7 +54,7 @@ extends Serializable {
   def compare(r0: R, r1: R): Int = rOrder.compare(r0, r1)
 }
 
-private [prediction] trait StatsMetricHelper[EI, Q, P, A] {
+private[predictionio] trait StatsMetricHelper[EI, Q, P, A] {
   def calculate(q: Q, p: P, a: A): Double
 
   def calculateStats(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
@@ -69,7 +69,7 @@ private [prediction] trait StatsMetricHelper[EI, Q, P, A] {
   }
 }
 
-private [prediction] trait StatsOptionMetricHelper[EI, Q, P, A] {
+private[predictionio] trait StatsOptionMetricHelper[EI, Q, P, A] {
   def calculate(q: Q, p: P, a: A): Option[Double]
 
   def calculateStats(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])


[28/34] incubator-predictionio git commit: change all to org.apache.predictionio except docs

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Engine.scala
index 8fe6361..d443bb4 100644
--- a/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.recommendation
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   user: String,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Preparator.scala b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Preparator.scala
index 64f3908..11717fe 100644
--- a/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Preparator.scala
+++ b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.recommendation
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Serving.scala b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Serving.scala
index 132755e..ad9058d 100644
--- a/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-parallel-recommendation-cat/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.recommendation
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-custom-datasource/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-custom-datasource/build.sbt b/examples/experimental/scala-parallel-recommendation-custom-datasource/build.sbt
index f931883..5065433 100644
--- a/examples/experimental/scala-parallel-recommendation-custom-datasource/build.sbt
+++ b/examples/experimental/scala-parallel-recommendation-custom-datasource/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-recommendation"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/ALSAlgorithm.scala b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/ALSAlgorithm.scala
index e324af7..6fca7b4 100644
--- a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/ALSAlgorithm.scala
+++ b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/ALSAlgorithm.scala
@@ -1,8 +1,8 @@
 package org.template.recommendation
 
-import io.prediction.controller.PAlgorithm
-import io.prediction.controller.Params
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.PAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/ALSModel.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/ALSModel.scala b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/ALSModel.scala
index f0c7b7b..243c1d1 100644
--- a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/ALSModel.scala
+++ b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/ALSModel.scala
@@ -5,9 +5,9 @@ package org.apache.spark.mllib.recommendation
 
 import org.template.recommendation.ALSAlgorithmParams
 
-import io.prediction.controller.IPersistentModel
-import io.prediction.controller.IPersistentModelLoader
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.IPersistentModel
+import org.apache.predictionio.controller.IPersistentModelLoader
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/DataSource.scala b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/DataSource.scala
index c1c340d..0de2b47 100644
--- a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/DataSource.scala
+++ b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.recommendation
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Engine.scala
index edb7767..4702fe3 100644
--- a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.recommendation
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   user: String,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Preparator.scala b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Preparator.scala
index 6468e4b..8f2f7e4 100644
--- a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Preparator.scala
+++ b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.recommendation
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Serving.scala b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Serving.scala
index 132755e..ad9058d 100644
--- a/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-parallel-recommendation-custom-datasource/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.recommendation
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-entitymap/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-entitymap/build.sbt b/examples/experimental/scala-parallel-recommendation-entitymap/build.sbt
index f931883..5065433 100644
--- a/examples/experimental/scala-parallel-recommendation-entitymap/build.sbt
+++ b/examples/experimental/scala-parallel-recommendation-entitymap/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-recommendation"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/ALSAlgorithm.scala b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/ALSAlgorithm.scala
index 7f50769..1773b23 100644
--- a/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/ALSAlgorithm.scala
+++ b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/ALSAlgorithm.scala
@@ -1,7 +1,7 @@
 package org.template.recommendation
 
-import io.prediction.controller.PAlgorithm
-import io.prediction.controller.Params
+import org.apache.predictionio.controller.PAlgorithm
+import org.apache.predictionio.controller.Params
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/ALSModel.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/ALSModel.scala b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/ALSModel.scala
index ba7cf28..2dd2bf3 100644
--- a/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/ALSModel.scala
+++ b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/ALSModel.scala
@@ -8,9 +8,9 @@ import org.template.recommendation.ALSAlgorithmParams
 import org.template.recommendation.User
 import org.template.recommendation.Item
 
-import io.prediction.controller.IPersistentModel
-import io.prediction.controller.IPersistentModelLoader
-import io.prediction.data.storage.EntityMap
+import org.apache.predictionio.controller.IPersistentModel
+import org.apache.predictionio.controller.IPersistentModelLoader
+import org.apache.predictionio.data.storage.EntityMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/DataSource.scala b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/DataSource.scala
index 5b219eb..8427aff 100644
--- a/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/DataSource.scala
+++ b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/DataSource.scala
@@ -1,12 +1,12 @@
 package org.template.recommendation
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
-import io.prediction.data.storage.EntityMap
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
+import org.apache.predictionio.data.storage.EntityMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Engine.scala
index edb7767..4702fe3 100644
--- a/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.recommendation
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   user: String,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Preparator.scala b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Preparator.scala
index bf5013b..b1bf023 100644
--- a/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Preparator.scala
+++ b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Preparator.scala
@@ -1,7 +1,7 @@
 package org.template.recommendation
 
-import io.prediction.controller.PPreparator
-import io.prediction.data.storage.EntityMap
+import org.apache.predictionio.controller.PPreparator
+import org.apache.predictionio.data.storage.EntityMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Serving.scala b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Serving.scala
index cc5542c..38ba8b9 100644
--- a/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-parallel-recommendation-entitymap/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.recommendation
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-mongo-datasource/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-mongo-datasource/build.sbt b/examples/experimental/scala-parallel-recommendation-mongo-datasource/build.sbt
index 73777fd..5f173fa 100644
--- a/examples/experimental/scala-parallel-recommendation-mongo-datasource/build.sbt
+++ b/examples/experimental/scala-parallel-recommendation-mongo-datasource/build.sbt
@@ -4,10 +4,10 @@ assemblySettings
 
 name := "template-scala-parallel-recommendation"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/ALSAlgorithm.scala b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/ALSAlgorithm.scala
index e324af7..6fca7b4 100644
--- a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/ALSAlgorithm.scala
+++ b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/ALSAlgorithm.scala
@@ -1,8 +1,8 @@
 package org.template.recommendation
 
-import io.prediction.controller.PAlgorithm
-import io.prediction.controller.Params
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.PAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/ALSModel.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/ALSModel.scala b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/ALSModel.scala
index f0c7b7b..243c1d1 100644
--- a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/ALSModel.scala
+++ b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/ALSModel.scala
@@ -5,9 +5,9 @@ package org.apache.spark.mllib.recommendation
 
 import org.template.recommendation.ALSAlgorithmParams
 
-import io.prediction.controller.IPersistentModel
-import io.prediction.controller.IPersistentModelLoader
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.IPersistentModel
+import org.apache.predictionio.controller.IPersistentModelLoader
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/DataSource.scala b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/DataSource.scala
index c3cf391..d21c452 100644
--- a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/DataSource.scala
+++ b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.recommendation
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Engine.scala
index edb7767..4702fe3 100644
--- a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.recommendation
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   user: String,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Preparator.scala b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Preparator.scala
index 6468e4b..8f2f7e4 100644
--- a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Preparator.scala
+++ b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.recommendation
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Serving.scala b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Serving.scala
index 132755e..ad9058d 100644
--- a/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-parallel-recommendation-mongo-datasource/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.recommendation
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-regression/Run.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-regression/Run.scala b/examples/experimental/scala-parallel-regression/Run.scala
index 6500d66..8bee71b 100644
--- a/examples/experimental/scala-parallel-regression/Run.scala
+++ b/examples/experimental/scala-parallel-regression/Run.scala
@@ -1,16 +1,16 @@
-package io.prediction.examples.regression.parallel
+package org.apache.predictionio.examples.regression.parallel
 
-import io.prediction.controller.Engine
-import io.prediction.controller.Params
-import io.prediction.controller.PDataSource
-import io.prediction.controller.P2LAlgorithm
-import io.prediction.controller.IdentityPreparator
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.LAverageServing
-import io.prediction.controller.MeanSquareError
-import io.prediction.controller.Utils
-import io.prediction.controller.Workflow
-import io.prediction.controller.WorkflowParams
+import org.apache.predictionio.controller.Engine
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.IdentityPreparator
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.LAverageServing
+import org.apache.predictionio.controller.MeanSquareError
+import org.apache.predictionio.controller.Utils
+import org.apache.predictionio.controller.Workflow
+import org.apache.predictionio.controller.WorkflowParams
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-regression/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-regression/build.sbt b/examples/experimental/scala-parallel-regression/build.sbt
index 7c63e7d..1fa5433 100644
--- a/examples/experimental/scala-parallel-regression/build.sbt
+++ b/examples/experimental/scala-parallel-regression/build.sbt
@@ -10,7 +10,7 @@ organization := "myorg"
 version := "0.0.1-SNAPSHOT"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark"  %% "spark-mllib"    % "1.2.0"
     exclude("org.apache.spark", "spark-core_2.10")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-regression/engine.json
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-regression/engine.json b/examples/experimental/scala-parallel-regression/engine.json
index fec7aac..e078fad 100644
--- a/examples/experimental/scala-parallel-regression/engine.json
+++ b/examples/experimental/scala-parallel-regression/engine.json
@@ -1,7 +1,7 @@
 {
   "id": "scala-parallel-regression",
   "description": "scala-parallel-regression example",
-  "engineFactory": "io.prediction.examples.regression.parallel.RegressionEngineFactory",
+  "engineFactory": "org.apache.predictionio.examples.regression.parallel.RegressionEngineFactory",
   "datasource": {
     "params": {
       "filepath": "../data/lr_data.txt",

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-similarproduct-dimsum/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-similarproduct-dimsum/build.sbt b/examples/experimental/scala-parallel-similarproduct-dimsum/build.sbt
index fe7cf51..95c7280 100644
--- a/examples/experimental/scala-parallel-similarproduct-dimsum/build.sbt
+++ b/examples/experimental/scala-parallel-similarproduct-dimsum/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-similarproduct-dimsum"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/DIMSUMAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/DIMSUMAlgorithm.scala b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/DIMSUMAlgorithm.scala
index 8db754e..d5cdd9a 100644
--- a/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/DIMSUMAlgorithm.scala
+++ b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/DIMSUMAlgorithm.scala
@@ -1,10 +1,10 @@
 package org.template.similarproduct
 
-import io.prediction.controller.PAlgorithm
-import io.prediction.controller.Params
-import io.prediction.controller.IPersistentModel
-import io.prediction.controller.IPersistentModelLoader
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.PAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.IPersistentModel
+import org.apache.predictionio.controller.IPersistentModelLoader
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/DataSource.scala b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/DataSource.scala
index 4211186..b8571ea 100644
--- a/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/DataSource.scala
+++ b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.similarproduct
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Engine.scala
index 02a3212..e52cb82 100644
--- a/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.similarproduct
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   items: List[String],

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Preparator.scala b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Preparator.scala
index c5e42ec..6b84b01 100644
--- a/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Preparator.scala
+++ b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.similarproduct
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Serving.scala b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Serving.scala
index e7dadab..0e1057c 100644
--- a/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-parallel-similarproduct-dimsum/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.similarproduct
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-similarproduct-localmodel/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-similarproduct-localmodel/build.sbt b/examples/experimental/scala-parallel-similarproduct-localmodel/build.sbt
index e148283..2d8306e 100644
--- a/examples/experimental/scala-parallel-similarproduct-localmodel/build.sbt
+++ b/examples/experimental/scala-parallel-similarproduct-localmodel/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-similarproduct"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/ALSAlgorithm.scala b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/ALSAlgorithm.scala
index d250b71..b6d54c2 100644
--- a/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/ALSAlgorithm.scala
+++ b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/ALSAlgorithm.scala
@@ -1,10 +1,10 @@
 package org.template.similarproduct
 
-import io.prediction.controller.P2LAlgorithm // ADDED
-import io.prediction.controller.Params
-import io.prediction.controller.IPersistentModel
-import io.prediction.controller.IPersistentModelLoader
-import io.prediction.data.storage.BiMap
+import org.apache.predictionio.controller.P2LAlgorithm // ADDED
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.IPersistentModel
+import org.apache.predictionio.controller.IPersistentModelLoader
+import org.apache.predictionio.data.storage.BiMap
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/DataSource.scala b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/DataSource.scala
index b45fe37..4dc4a73 100644
--- a/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/DataSource.scala
+++ b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.similarproduct
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Engine.scala
index 8d594fc..7975530 100644
--- a/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.similarproduct
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 case class Query(
   items: List[String],

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Preparator.scala b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Preparator.scala
index c5e42ec..6b84b01 100644
--- a/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Preparator.scala
+++ b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.similarproduct
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Serving.scala b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Serving.scala
index e7dadab..0e1057c 100644
--- a/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-parallel-similarproduct-localmodel/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.similarproduct
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-trim-app/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-trim-app/build.sbt b/examples/experimental/scala-parallel-trim-app/build.sbt
index dbc4554..5ffa78f 100644
--- a/examples/experimental/scala-parallel-trim-app/build.sbt
+++ b/examples/experimental/scala-parallel-trim-app/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-vanilla"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-trim-app/engine.json
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-trim-app/engine.json b/examples/experimental/scala-parallel-trim-app/engine.json
index 87b5c12..5695856 100644
--- a/examples/experimental/scala-parallel-trim-app/engine.json
+++ b/examples/experimental/scala-parallel-trim-app/engine.json
@@ -1,7 +1,7 @@
 {
   "id": "default",
   "description": "Default settings",
-  "engineFactory": "io.prediction.examples.experimental.trimapp.VanillaEngine",
+  "engineFactory": "org.apache.predictionio.examples.experimental.trimapp.VanillaEngine",
   "datasource": {
     "params" : {
       "srcAppId": 13,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-trim-app/src/main/scala/Algorithm.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-trim-app/src/main/scala/Algorithm.scala b/examples/experimental/scala-parallel-trim-app/src/main/scala/Algorithm.scala
index e110621..f8f6bd2 100644
--- a/examples/experimental/scala-parallel-trim-app/src/main/scala/Algorithm.scala
+++ b/examples/experimental/scala-parallel-trim-app/src/main/scala/Algorithm.scala
@@ -1,7 +1,7 @@
-package io.prediction.examples.experimental.trimapp
+package org.apache.predictionio.examples.experimental.trimapp
 
-import io.prediction.controller.P2LAlgorithm
-import io.prediction.controller.Params
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-trim-app/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-trim-app/src/main/scala/DataSource.scala b/examples/experimental/scala-parallel-trim-app/src/main/scala/DataSource.scala
index d4e8bfd..d2e1c2f 100644
--- a/examples/experimental/scala-parallel-trim-app/src/main/scala/DataSource.scala
+++ b/examples/experimental/scala-parallel-trim-app/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
-package io.prediction.examples.experimental.trimapp
-
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+package org.apache.predictionio.examples.experimental.trimapp
+
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-trim-app/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-trim-app/src/main/scala/Engine.scala b/examples/experimental/scala-parallel-trim-app/src/main/scala/Engine.scala
index c47f9a7..4d7f813 100644
--- a/examples/experimental/scala-parallel-trim-app/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-parallel-trim-app/src/main/scala/Engine.scala
@@ -1,8 +1,8 @@
-package io.prediction.examples.experimental.trimapp
+package org.apache.predictionio.examples.experimental.trimapp
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
-import io.prediction.controller._
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
+import org.apache.predictionio.controller._
 
 case class Query(q: String) extends Serializable
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-trim-app/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-trim-app/src/main/scala/Preparator.scala b/examples/experimental/scala-parallel-trim-app/src/main/scala/Preparator.scala
index b634143..cdd3e46 100644
--- a/examples/experimental/scala-parallel-trim-app/src/main/scala/Preparator.scala
+++ b/examples/experimental/scala-parallel-trim-app/src/main/scala/Preparator.scala
@@ -1,7 +1,7 @@
-package io.prediction.examples.experimental.trimapp
+package org.apache.predictionio.examples.experimental.trimapp
 
-import io.prediction.controller.PPreparator
-import io.prediction.data.storage.Event
+import org.apache.predictionio.controller.PPreparator
+import org.apache.predictionio.data.storage.Event
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-parallel-trim-app/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-parallel-trim-app/src/main/scala/Serving.scala b/examples/experimental/scala-parallel-trim-app/src/main/scala/Serving.scala
index ec1c5a8..12f2f71 100644
--- a/examples/experimental/scala-parallel-trim-app/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-parallel-trim-app/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
-package io.prediction.examples.experimental.trimapp
+package org.apache.predictionio.examples.experimental.trimapp
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving
   extends LServing[Query, PredictedResult] {
@@ -10,4 +10,4 @@ class Serving
     predictedResults: Seq[PredictedResult]): PredictedResult = {
     predictedResults.head
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-recommendations/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-recommendations/build.sbt b/examples/experimental/scala-recommendations/build.sbt
index 0d68e7a..4b067bf 100644
--- a/examples/experimental/scala-recommendations/build.sbt
+++ b/examples/experimental/scala-recommendations/build.sbt
@@ -4,10 +4,10 @@ assemblySettings
 
 name := "example-scala-recommendations"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
   "commons-io"        % "commons-io"    % "2.4",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided",

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-recommendations/src/main/scala/Run.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-recommendations/src/main/scala/Run.scala b/examples/experimental/scala-recommendations/src/main/scala/Run.scala
index 7935a8c..2cd82a6 100644
--- a/examples/experimental/scala-recommendations/src/main/scala/Run.scala
+++ b/examples/experimental/scala-recommendations/src/main/scala/Run.scala
@@ -1,17 +1,17 @@
 package org.apache.spark.mllib.recommendation.engine
 
-import io.prediction.controller.Engine
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.IPersistentModel
-import io.prediction.controller.IPersistentModelLoader
-import io.prediction.controller.PDataSource
-import io.prediction.controller.Params
-import io.prediction.controller.PAlgorithm
-import io.prediction.controller.PIdentityPreparator
-import io.prediction.controller.LFirstServing
-import io.prediction.controller.Utils
-import io.prediction.controller.Workflow
-import io.prediction.controller.WorkflowParams
+import org.apache.predictionio.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.IPersistentModel
+import org.apache.predictionio.controller.IPersistentModelLoader
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.PAlgorithm
+import org.apache.predictionio.controller.PIdentityPreparator
+import org.apache.predictionio.controller.LFirstServing
+import org.apache.predictionio.controller.Utils
+import org.apache.predictionio.controller.Workflow
+import org.apache.predictionio.controller.WorkflowParams
 
 import org.apache.commons.io.FileUtils
 import org.apache.spark.SparkContext

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-refactor-test/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-refactor-test/build.sbt b/examples/experimental/scala-refactor-test/build.sbt
index 339d592..8e2b24b 100644
--- a/examples/experimental/scala-refactor-test/build.sbt
+++ b/examples/experimental/scala-refactor-test/build.sbt
@@ -4,10 +4,10 @@ assemblySettings
 
 name := "template-scala-parallel-vanilla"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  //"io.prediction"    %% "core"          % "0.8.6" % "provided",
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
+  //"org.apache.predictionio"    %% "core"          % "0.8.6" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-refactor-test/src/main/scala/Algorithm.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-refactor-test/src/main/scala/Algorithm.scala b/examples/experimental/scala-refactor-test/src/main/scala/Algorithm.scala
index 15131f7..8fb3cc3 100644
--- a/examples/experimental/scala-refactor-test/src/main/scala/Algorithm.scala
+++ b/examples/experimental/scala-refactor-test/src/main/scala/Algorithm.scala
@@ -1,7 +1,7 @@
 package pio.refactor
 
-import io.prediction.controller.P2LAlgorithm
-import io.prediction.controller.Params
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-refactor-test/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-refactor-test/src/main/scala/DataSource.scala b/examples/experimental/scala-refactor-test/src/main/scala/DataSource.scala
index 2707304..86e1432 100644
--- a/examples/experimental/scala-refactor-test/src/main/scala/DataSource.scala
+++ b/examples/experimental/scala-refactor-test/src/main/scala/DataSource.scala
@@ -1,12 +1,12 @@
 package pio.refactor
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.controller._
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller._
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-refactor-test/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-refactor-test/src/main/scala/Engine.scala b/examples/experimental/scala-refactor-test/src/main/scala/Engine.scala
index 8726c87..8b65ecb 100644
--- a/examples/experimental/scala-refactor-test/src/main/scala/Engine.scala
+++ b/examples/experimental/scala-refactor-test/src/main/scala/Engine.scala
@@ -1,9 +1,9 @@
 package pio.refactor
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
-import io.prediction.controller._
-//import io.prediction.workflow.CoreWorkflow
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
+import org.apache.predictionio.controller._
+//import org.apache.predictionio.workflow.CoreWorkflow
 import grizzled.slf4j.Logger
 
 case class Query(q: Int)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-refactor-test/src/main/scala/Evaluator.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-refactor-test/src/main/scala/Evaluator.scala b/examples/experimental/scala-refactor-test/src/main/scala/Evaluator.scala
index 7501dfe..709defd 100644
--- a/examples/experimental/scala-refactor-test/src/main/scala/Evaluator.scala
+++ b/examples/experimental/scala-refactor-test/src/main/scala/Evaluator.scala
@@ -1,8 +1,8 @@
 package pio.refactor
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
-import io.prediction.controller._
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
+import org.apache.predictionio.controller._
 
 class VanillaEvaluator
   extends Evaluator[EmptyEvaluationInfo, Query, PredictedResult,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-refactor-test/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-refactor-test/src/main/scala/Preparator.scala b/examples/experimental/scala-refactor-test/src/main/scala/Preparator.scala
index f8da129..c7edb4a 100644
--- a/examples/experimental/scala-refactor-test/src/main/scala/Preparator.scala
+++ b/examples/experimental/scala-refactor-test/src/main/scala/Preparator.scala
@@ -1,8 +1,8 @@
 /*
 package pio.refactor
 
-import io.prediction.controller.PPreparator
-import io.prediction.data.storage.Event
+import org.apache.predictionio.controller.PPreparator
+import org.apache.predictionio.data.storage.Event
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-refactor-test/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-refactor-test/src/main/scala/Serving.scala b/examples/experimental/scala-refactor-test/src/main/scala/Serving.scala
index 91a37ef..d61435d 100644
--- a/examples/experimental/scala-refactor-test/src/main/scala/Serving.scala
+++ b/examples/experimental/scala-refactor-test/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package pio.refactor
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 import grizzled.slf4j.Logger
 
 class Serving

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-stock/README.md
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-stock/README.md b/examples/experimental/scala-stock/README.md
index 66bda1b..49c5cfc 100644
--- a/examples/experimental/scala-stock/README.md
+++ b/examples/experimental/scala-stock/README.md
@@ -55,7 +55,7 @@ Go to PredictionIO/examples/scala-stock
 
 Now type:
 ```
-../../bin/pio run --asm io.prediction.examples.stock.YahooDataSourceRun -- --master <Your spark master address found at http:local8080> --driver-memory <4-12G>
+../../bin/pio run --asm org.apache.predictionio.examples.stock.YahooDataSourceRun -- --master <Your spark master address found at http:local8080> --driver-memory <4-12G>
 ```
 ### Step 7: Open dashboard and view results
 In PredictionIO folder
@@ -167,7 +167,7 @@ where `$PIO_HOME` is the root directory of PredictionIO's code tree.
 
 Run the fetch script.
 ```
-$ ../sbt/sbt "runMain io.prediction.examples.stock.FetchMain"
+$ ../sbt/sbt "runMain org.apache.predictionio.examples.stock.FetchMain"
 ```
 As SP500 constituents change all the time, the hardcoded list may not reflect
 the current state and the script may fail to extract delisted tickers. Whilst
@@ -314,7 +314,7 @@ evaluation, you have to specify two sets of parameters:
 You can run the evaluation with the following command.
 ```
 $ cd $PIO_HOME/examples
-$ ../bin/pio-run io.prediction.examples.stock.Demo1
+$ ../bin/pio-run org.apache.predictionio.examples.stock.Demo1
 ```
 
 You should see that we are trading from April 2005 until Dec 2007, the NAV went
@@ -334,7 +334,7 @@ backtesting class essentially divides the current NAV by the `maxPositions`. The
 demo is run the same way, by specifying the running main class.
 ```
 $ cd $PIO_HOME/examples
-$ ../bin/pio-run io.prediction.examples.stock.Demo2
+$ ../bin/pio-run org.apache.predictionio.examples.stock.Demo2
 ```
 
 The result is not as great, of course.
@@ -351,7 +351,7 @@ All you need is to change the `metrics` variable to `DailyMetrics`.
 [Demo3](Demo3.scala) shows the actual code. Try it out with:
 ```
 $ cd $PIO_HOME/examples
-$ ../bin/pio-run io.prediction.examples.stock.Demo3
+$ ../bin/pio-run org.apache.predictionio.examples.stock.Demo3
 ```
 
 ### Last Words

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-stock/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-stock/build.sbt b/examples/experimental/scala-stock/build.sbt
index d4e894d..cfc580f 100644
--- a/examples/experimental/scala-stock/build.sbt
+++ b/examples/experimental/scala-stock/build.sbt
@@ -4,11 +4,11 @@ assemblySettings
 
 name := "example-scala-stock"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"     %% "core"           % "0.9.1" % "provided",
-  "io.prediction"     %% "engines"        % "0.9.1" % "provided",
+  "org.apache.predictionio"     %% "core"           % "0.9.1" % "provided",
+  "org.apache.predictionio"     %% "engines"        % "0.9.1" % "provided",
   "com.github.scopt"  %% "scopt"          % "3.2.0",
   "commons-io"         % "commons-io"     % "2.4",
   "org.apache.commons" % "commons-math3"  % "3.3",

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-stock/src/main/scala/Algorithm.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-stock/src/main/scala/Algorithm.scala b/examples/experimental/scala-stock/src/main/scala/Algorithm.scala
index fd24567..772c213 100644
--- a/examples/experimental/scala-stock/src/main/scala/Algorithm.scala
+++ b/examples/experimental/scala-stock/src/main/scala/Algorithm.scala
@@ -1,12 +1,12 @@
-package io.prediction.examples.stock
+package org.apache.predictionio.examples.stock
 
-import io.prediction.controller.LAlgorithm
+import org.apache.predictionio.controller.LAlgorithm
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
 import org.apache.spark.broadcast.Broadcast
-import io.prediction.controller.EmptyParams
+import org.apache.predictionio.controller.EmptyParams
 import org.saddle._
 
 import scala.reflect._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-stock/src/main/scala/BackTestingMetrics.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-stock/src/main/scala/BackTestingMetrics.scala b/examples/experimental/scala-stock/src/main/scala/BackTestingMetrics.scala
index d691a4f..f0649a0 100644
--- a/examples/experimental/scala-stock/src/main/scala/BackTestingMetrics.scala
+++ b/examples/experimental/scala-stock/src/main/scala/BackTestingMetrics.scala
@@ -1,8 +1,8 @@
-package io.prediction.examples.stock
+package org.apache.predictionio.examples.stock
 
-import io.prediction.controller.Params
-import io.prediction.controller.Evaluator
-import io.prediction.controller.NiceRendering
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.Evaluator
+import org.apache.predictionio.controller.NiceRendering
 import com.github.nscala_time.time.Imports._
 import scala.collection.mutable.{ Map => MMap, ArrayBuffer }
 
@@ -12,7 +12,7 @@ import org.json4s.native.JsonMethods._
 import org.json4s.native.Serialization
 //import org.json4s.native.Serialization.{read, write}
 
-import io.prediction.engines.util.{ EvaluatorVisualization => MV }
+import org.apache.predictionio.engines.util.{ EvaluatorVisualization => MV }
 
 import breeze.stats.{ mean, meanAndVariance, MeanAndVariance }
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-stock/src/main/scala/Data.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-stock/src/main/scala/Data.scala b/examples/experimental/scala-stock/src/main/scala/Data.scala
index a57ef28..d8839ee 100644
--- a/examples/experimental/scala-stock/src/main/scala/Data.scala
+++ b/examples/experimental/scala-stock/src/main/scala/Data.scala
@@ -1,4 +1,4 @@
-package io.prediction.examples.stock
+package org.apache.predictionio.examples.stock
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-stock/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-stock/src/main/scala/DataSource.scala b/examples/experimental/scala-stock/src/main/scala/DataSource.scala
index a91f1aa..475cdc4 100644
--- a/examples/experimental/scala-stock/src/main/scala/DataSource.scala
+++ b/examples/experimental/scala-stock/src/main/scala/DataSource.scala
@@ -1,9 +1,9 @@
-package io.prediction.examples.stock
+package org.apache.predictionio.examples.stock
 
-import io.prediction.controller.Params
-import io.prediction.controller.PDataSource
-import io.prediction.controller.LDataSource
-import io.prediction.controller.EmptyParams
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.LDataSource
+import org.apache.predictionio.controller.EmptyParams
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-stock/src/main/scala/Indicators.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-stock/src/main/scala/Indicators.scala b/examples/experimental/scala-stock/src/main/scala/Indicators.scala
index 6e4fd61..a4e1d88 100644
--- a/examples/experimental/scala-stock/src/main/scala/Indicators.scala
+++ b/examples/experimental/scala-stock/src/main/scala/Indicators.scala
@@ -1,4 +1,4 @@
-package io.prediction.examples.stock
+package org.apache.predictionio.examples.stock
 
 import org.saddle._
 import org.saddle.index.IndexTime

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-stock/src/main/scala/RegressionStrategy.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-stock/src/main/scala/RegressionStrategy.scala b/examples/experimental/scala-stock/src/main/scala/RegressionStrategy.scala
index ed3fbe8..2d446e9 100644
--- a/examples/experimental/scala-stock/src/main/scala/RegressionStrategy.scala
+++ b/examples/experimental/scala-stock/src/main/scala/RegressionStrategy.scala
@@ -1,6 +1,6 @@
-package io.prediction.examples.stock
+package org.apache.predictionio.examples.stock
 
-import io.prediction.controller.Params
+import org.apache.predictionio.controller.Params
 
 import org.saddle._
 import org.saddle.index.IndexTime

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-stock/src/main/scala/Run.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-stock/src/main/scala/Run.scala b/examples/experimental/scala-stock/src/main/scala/Run.scala
index 2f99ade..39c419f 100644
--- a/examples/experimental/scala-stock/src/main/scala/Run.scala
+++ b/examples/experimental/scala-stock/src/main/scala/Run.scala
@@ -1,11 +1,11 @@
-package io.prediction.examples.stock
-
-import io.prediction.controller.Workflow
-import io.prediction.controller.WorkflowParams
-import io.prediction.controller.PIdentityPreparator
-import io.prediction.controller.EmptyParams
-import io.prediction.controller.LFirstServing
-import io.prediction.controller.Params
+package org.apache.predictionio.examples.stock
+
+import org.apache.predictionio.controller.Workflow
+import org.apache.predictionio.controller.WorkflowParams
+import org.apache.predictionio.controller.PIdentityPreparator
+import org.apache.predictionio.controller.EmptyParams
+import org.apache.predictionio.controller.LFirstServing
+import org.apache.predictionio.controller.Params
 import com.github.nscala_time.time.Imports._
 import scala.collection.immutable.HashMap
 import java.io.File

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-stock/src/main/scala/YahooDataSource.scala
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-stock/src/main/scala/YahooDataSource.scala b/examples/experimental/scala-stock/src/main/scala/YahooDataSource.scala
index 3c8d4f0..ad41020 100644
--- a/examples/experimental/scala-stock/src/main/scala/YahooDataSource.scala
+++ b/examples/experimental/scala-stock/src/main/scala/YahooDataSource.scala
@@ -1,11 +1,11 @@
-package io.prediction.examples.stock
+package org.apache.predictionio.examples.stock
 
 // YahooDataSource reads PredictionIO event store directly.
 
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
-import io.prediction.data.view.LBatchView
-import io.prediction.data.storage.DataMap
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
+import org.apache.predictionio.data.view.LBatchView
+import org.apache.predictionio.data.storage.DataMap
 
 import org.joda.time.DateTime
 import org.joda.time.DateTimeZone
@@ -14,8 +14,8 @@ import com.github.nscala_time.time.Imports._
 import scala.collection.mutable.{ Map => MMap }
 import scala.collection.GenMap
 
-import io.prediction.controller._
-import io.prediction.controller.{ Params => BaseParams }
+import org.apache.predictionio.controller._
+import org.apache.predictionio.controller.{ Params => BaseParams }
 
 
 import org.apache.spark.SparkContext

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/scala-stock/src/main/twirl/io/prediction/examples/stock/backtesting.scala.html
----------------------------------------------------------------------
diff --git a/examples/experimental/scala-stock/src/main/twirl/io/prediction/examples/stock/backtesting.scala.html b/examples/experimental/scala-stock/src/main/twirl/io/prediction/examples/stock/backtesting.scala.html
index 96ce113..80c796a 100644
--- a/examples/experimental/scala-stock/src/main/twirl/io/prediction/examples/stock/backtesting.scala.html
+++ b/examples/experimental/scala-stock/src/main/twirl/io/prediction/examples/stock/backtesting.scala.html
@@ -1,4 +1,4 @@
-@import io.prediction.examples.stock.BacktestingResult
+@import org.apache.predictionio.examples.stock.BacktestingResult
 <html>
   <head>
     <script type='text/javascript' src='http://www.google.com/jsapi'></script>

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-classification/add-algorithm/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/add-algorithm/build.sbt b/examples/scala-parallel-classification/add-algorithm/build.sbt
index a7e7858..30d0ccc 100644
--- a/examples/scala-parallel-classification/add-algorithm/build.sbt
+++ b/examples/scala-parallel-classification/add-algorithm/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-classification"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % pioVersion.value % "provided",
+  "org.apache.predictionio"    %% "core"          % pioVersion.value % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-classification/add-algorithm/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/add-algorithm/project/pio-build.sbt b/examples/scala-parallel-classification/add-algorithm/project/pio-build.sbt
index 8346a96..9aed0ee 100644
--- a/examples/scala-parallel-classification/add-algorithm/project/pio-build.sbt
+++ b/examples/scala-parallel-classification/add-algorithm/project/pio-build.sbt
@@ -1 +1 @@
-addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")
+addSbtPlugin("org.apache.predictionio" % "pio-build" % "0.9.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-classification/add-algorithm/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/add-algorithm/src/main/scala/DataSource.scala b/examples/scala-parallel-classification/add-algorithm/src/main/scala/DataSource.scala
index c2a422d..0561611 100644
--- a/examples/scala-parallel-classification/add-algorithm/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-classification/add-algorithm/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package org.template.classification
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-classification/add-algorithm/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/add-algorithm/src/main/scala/Engine.scala b/examples/scala-parallel-classification/add-algorithm/src/main/scala/Engine.scala
index 67e36d2..4a9df1c 100644
--- a/examples/scala-parallel-classification/add-algorithm/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-classification/add-algorithm/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package org.template.classification
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 class Query(
   val features: Array[Double]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-classification/add-algorithm/src/main/scala/NaiveBayesAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/add-algorithm/src/main/scala/NaiveBayesAlgorithm.scala b/examples/scala-parallel-classification/add-algorithm/src/main/scala/NaiveBayesAlgorithm.scala
index 1666ae8..9bd9e36 100644
--- a/examples/scala-parallel-classification/add-algorithm/src/main/scala/NaiveBayesAlgorithm.scala
+++ b/examples/scala-parallel-classification/add-algorithm/src/main/scala/NaiveBayesAlgorithm.scala
@@ -1,7 +1,7 @@
 package org.template.classification
 
-import io.prediction.controller.P2LAlgorithm
-import io.prediction.controller.Params
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
 
 import org.apache.spark.SparkContext
 import org.apache.spark.mllib.classification.NaiveBayes

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-classification/add-algorithm/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/add-algorithm/src/main/scala/Preparator.scala b/examples/scala-parallel-classification/add-algorithm/src/main/scala/Preparator.scala
index 4e581ea..587eb83 100644
--- a/examples/scala-parallel-classification/add-algorithm/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-classification/add-algorithm/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package org.template.classification
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-classification/add-algorithm/src/main/scala/RandomForestAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/add-algorithm/src/main/scala/RandomForestAlgorithm.scala b/examples/scala-parallel-classification/add-algorithm/src/main/scala/RandomForestAlgorithm.scala
index 5752265..e5e1e33 100644
--- a/examples/scala-parallel-classification/add-algorithm/src/main/scala/RandomForestAlgorithm.scala
+++ b/examples/scala-parallel-classification/add-algorithm/src/main/scala/RandomForestAlgorithm.scala
@@ -1,7 +1,7 @@
 package org.template.classification
 
-import io.prediction.controller.P2LAlgorithm
-import io.prediction.controller.Params
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
 
 import org.apache.spark.SparkContext
 import org.apache.spark.mllib.tree.RandomForest // CHANGED

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-classification/add-algorithm/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/add-algorithm/src/main/scala/Serving.scala b/examples/scala-parallel-classification/add-algorithm/src/main/scala/Serving.scala
index ef06088..9722883 100644
--- a/examples/scala-parallel-classification/add-algorithm/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-classification/add-algorithm/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package org.template.classification
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving extends LServing[Query, PredictedResult] {
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-classification/custom-attributes/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/custom-attributes/build.sbt b/examples/scala-parallel-classification/custom-attributes/build.sbt
index 5e80315..fc16e53 100644
--- a/examples/scala-parallel-classification/custom-attributes/build.sbt
+++ b/examples/scala-parallel-classification/custom-attributes/build.sbt
@@ -4,11 +4,11 @@ assemblySettings
 
 name := "template-scala-parallel-classification"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.8.6" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.8.6" % "provided",
   "commons-io"        % "commons-io"    % "2.4",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided",
-  "org.json4s"       %% "json4s-native" % "3.2.10")
\ No newline at end of file
+  "org.json4s"       %% "json4s-native" % "3.2.10")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-classification/custom-attributes/src/main/scala/DataSource.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/custom-attributes/src/main/scala/DataSource.scala b/examples/scala-parallel-classification/custom-attributes/src/main/scala/DataSource.scala
index 52a8635..970e3e0 100644
--- a/examples/scala-parallel-classification/custom-attributes/src/main/scala/DataSource.scala
+++ b/examples/scala-parallel-classification/custom-attributes/src/main/scala/DataSource.scala
@@ -1,11 +1,11 @@
 package com.test1
 
-import io.prediction.controller.PDataSource
-import io.prediction.controller.EmptyEvaluationInfo
-import io.prediction.controller.EmptyActualResult
-import io.prediction.controller.Params
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.PDataSource
+import org.apache.predictionio.controller.EmptyEvaluationInfo
+import org.apache.predictionio.controller.EmptyActualResult
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-classification/custom-attributes/src/main/scala/Engine.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/custom-attributes/src/main/scala/Engine.scala b/examples/scala-parallel-classification/custom-attributes/src/main/scala/Engine.scala
index 8236808..a648cfb 100644
--- a/examples/scala-parallel-classification/custom-attributes/src/main/scala/Engine.scala
+++ b/examples/scala-parallel-classification/custom-attributes/src/main/scala/Engine.scala
@@ -1,7 +1,7 @@
 package com.test1
 
-import io.prediction.controller.IEngineFactory
-import io.prediction.controller.Engine
+import org.apache.predictionio.controller.IEngineFactory
+import org.apache.predictionio.controller.Engine
 
 class Query(
  val  gender: String,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-classification/custom-attributes/src/main/scala/Preparator.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/custom-attributes/src/main/scala/Preparator.scala b/examples/scala-parallel-classification/custom-attributes/src/main/scala/Preparator.scala
index c49ed8d..c3cc571 100644
--- a/examples/scala-parallel-classification/custom-attributes/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-classification/custom-attributes/src/main/scala/Preparator.scala
@@ -1,6 +1,6 @@
 package com.test1
 
-import io.prediction.controller.PPreparator
+import org.apache.predictionio.controller.PPreparator
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-classification/custom-attributes/src/main/scala/RandomForestAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/custom-attributes/src/main/scala/RandomForestAlgorithm.scala b/examples/scala-parallel-classification/custom-attributes/src/main/scala/RandomForestAlgorithm.scala
index f2b5c35..ff7a4a9 100644
--- a/examples/scala-parallel-classification/custom-attributes/src/main/scala/RandomForestAlgorithm.scala
+++ b/examples/scala-parallel-classification/custom-attributes/src/main/scala/RandomForestAlgorithm.scala
@@ -1,7 +1,7 @@
 package com.test1
 
-import io.prediction.controller.P2LAlgorithm
-import io.prediction.controller.Params
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
 
 import org.apache.spark.mllib.tree.RandomForest // CHANGED
 import org.apache.spark.mllib.tree.model.RandomForestModel // CHANGED

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-classification/custom-attributes/src/main/scala/Serving.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-classification/custom-attributes/src/main/scala/Serving.scala b/examples/scala-parallel-classification/custom-attributes/src/main/scala/Serving.scala
index 0df95d4..ff9692d 100644
--- a/examples/scala-parallel-classification/custom-attributes/src/main/scala/Serving.scala
+++ b/examples/scala-parallel-classification/custom-attributes/src/main/scala/Serving.scala
@@ -1,6 +1,6 @@
 package com.test1
 
-import io.prediction.controller.LServing
+import org.apache.predictionio.controller.LServing
 
 class Serving extends LServing[Query, PredictedResult] {
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/build.sbt b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/build.sbt
index f4d98ee..650c7c3 100644
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/build.sbt
+++ b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/build.sbt
@@ -4,9 +4,9 @@ assemblySettings
 
 name := "template-scala-parallel-ecommercerecommendation"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % pioVersion.value  % "provided",
+  "org.apache.predictionio"    %% "core"          % pioVersion.value  % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided",
   "org.apache.spark" %% "spark-mllib"   % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/pio-build.sbt
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/pio-build.sbt b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/pio-build.sbt
index 8346a96..9aed0ee 100644
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/pio-build.sbt
+++ b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/project/pio-build.sbt
@@ -1 +1 @@
-addSbtPlugin("io.prediction" % "pio-build" % "0.9.0")
+addSbtPlugin("org.apache.predictionio" % "pio-build" % "0.9.0")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/ALSAlgorithm.scala
----------------------------------------------------------------------
diff --git a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/ALSAlgorithm.scala
index 21e619e..59c95ca 100644
--- a/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/ALSAlgorithm.scala
@@ -1,10 +1,10 @@
 package org.template.ecommercerecommendation
 
-import io.prediction.controller.P2LAlgorithm
-import io.prediction.controller.Params
-import io.prediction.data.storage.BiMap
-import io.prediction.data.storage.Event
-import io.prediction.data.storage.Storage
+import org.apache.predictionio.controller.P2LAlgorithm
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.data.storage.BiMap
+import org.apache.predictionio.data.storage.Event
+import org.apache.predictionio.data.storage.Storage
 
 import org.apache.spark.SparkContext
 import org.apache.spark.SparkContext._



[16/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/DateTimeJson4sSupport.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/DateTimeJson4sSupport.scala b/data/src/main/scala/io/prediction/data/storage/DateTimeJson4sSupport.scala
deleted file mode 100644
index 48f935a..0000000
--- a/data/src/main/scala/io/prediction/data/storage/DateTimeJson4sSupport.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import io.prediction.annotation.DeveloperApi
-import io.prediction.data.{Utils => DataUtils}
-import org.joda.time.DateTime
-import org.json4s._
-
-/** :: DeveloperApi ::
-  * JSON4S serializer for Joda-Time
-  *
-  * @group Common
-  */
-@DeveloperApi
-object DateTimeJson4sSupport {
-
-  @transient lazy implicit val formats = DefaultFormats
-
-  /** Serialize DateTime to JValue */
-  def serializeToJValue: PartialFunction[Any, JValue] = {
-    case d: DateTime => JString(DataUtils.dateTimeToString(d))
-  }
-
-  /** Deserialize JValue to DateTime */
-  def deserializeFromJValue: PartialFunction[JValue, DateTime] = {
-    case jv: JValue => DataUtils.stringToDateTime(jv.extract[String])
-  }
-
-  /** Custom JSON4S serializer for Joda-Time */
-  class Serializer extends CustomSerializer[DateTime](format => (
-    deserializeFromJValue, serializeToJValue))
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/EngineInstances.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/EngineInstances.scala b/data/src/main/scala/io/prediction/data/storage/EngineInstances.scala
deleted file mode 100644
index fdbb6ba..0000000
--- a/data/src/main/scala/io/prediction/data/storage/EngineInstances.scala
+++ /dev/null
@@ -1,177 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import com.github.nscala_time.time.Imports._
-import io.prediction.annotation.DeveloperApi
-import org.json4s._
-
-/** :: DeveloperApi ::
-  * Stores parameters, model, and other information for each engine instance
-  *
-  * @param id Engine instance ID.
-  * @param status Status of the engine instance.
-  * @param startTime Start time of the training/evaluation.
-  * @param endTime End time of the training/evaluation.
-  * @param engineId Engine ID of the instance.
-  * @param engineVersion Engine version of the instance.
-  * @param engineVariant Engine variant ID of the instance.
-  * @param engineFactory Engine factory class for the instance.
-  * @param batch A batch label of the engine instance.
-  * @param env The environment in which the instance was created.
-  * @param sparkConf Custom Spark configuration of the instance.
-  * @param dataSourceParams Data source parameters of the instance.
-  * @param preparatorParams Preparator parameters of the instance.
-  * @param algorithmsParams Algorithms parameters of the instance.
-  * @param servingParams Serving parameters of the instance.
-  * @group Meta Data
-  */
-@DeveloperApi
-case class EngineInstance(
-  id: String,
-  status: String,
-  startTime: DateTime,
-  endTime: DateTime,
-  engineId: String,
-  engineVersion: String,
-  engineVariant: String,
-  engineFactory: String,
-  batch: String,
-  env: Map[String, String],
-  sparkConf: Map[String, String],
-  dataSourceParams: String,
-  preparatorParams: String,
-  algorithmsParams: String,
-  servingParams: String)
-
-/** :: DeveloperApi ::
-  * Base trait of the [[EngineInstance]] data access object
-  *
-  * @group Meta Data
-  */
-@DeveloperApi
-trait EngineInstances {
-  /** Insert a new [[EngineInstance]] */
-  def insert(i: EngineInstance): String
-
-  /** Get an [[EngineInstance]] by ID */
-  def get(id: String): Option[EngineInstance]
-
-  /** Get all [[EngineInstance]]s */
-  def getAll(): Seq[EngineInstance]
-
-  /** Get an instance that has started training the latest and has trained to
-    * completion
-    */
-  def getLatestCompleted(
-      engineId: String,
-      engineVersion: String,
-      engineVariant: String): Option[EngineInstance]
-
-  /** Get all instances that has trained to completion */
-  def getCompleted(
-    engineId: String,
-    engineVersion: String,
-    engineVariant: String): Seq[EngineInstance]
-
-  /** Update an [[EngineInstance]] */
-  def update(i: EngineInstance): Unit
-
-  /** Delete an [[EngineInstance]] */
-  def delete(id: String): Unit
-}
-
-/** :: DeveloperApi ::
-  * JSON4S serializer for [[EngineInstance]]
-  *
-  * @group Meta Data
-  */
-@DeveloperApi
-class EngineInstanceSerializer
-    extends CustomSerializer[EngineInstance](
-  format => ({
-    case JObject(fields) =>
-      implicit val formats = DefaultFormats
-      val seed = EngineInstance(
-          id = "",
-          status = "",
-          startTime = DateTime.now,
-          endTime = DateTime.now,
-          engineId = "",
-          engineVersion = "",
-          engineVariant = "",
-          engineFactory = "",
-          batch = "",
-          env = Map(),
-          sparkConf = Map(),
-          dataSourceParams = "",
-          preparatorParams = "",
-          algorithmsParams = "",
-          servingParams = "")
-      fields.foldLeft(seed) { case (i, field) =>
-        field match {
-          case JField("id", JString(id)) => i.copy(id = id)
-          case JField("status", JString(status)) => i.copy(status = status)
-          case JField("startTime", JString(startTime)) =>
-            i.copy(startTime = Utils.stringToDateTime(startTime))
-          case JField("endTime", JString(endTime)) =>
-            i.copy(endTime = Utils.stringToDateTime(endTime))
-          case JField("engineId", JString(engineId)) =>
-            i.copy(engineId = engineId)
-          case JField("engineVersion", JString(engineVersion)) =>
-            i.copy(engineVersion = engineVersion)
-          case JField("engineVariant", JString(engineVariant)) =>
-            i.copy(engineVariant = engineVariant)
-          case JField("engineFactory", JString(engineFactory)) =>
-            i.copy(engineFactory = engineFactory)
-          case JField("batch", JString(batch)) => i.copy(batch = batch)
-          case JField("env", env) =>
-            i.copy(env = Extraction.extract[Map[String, String]](env))
-          case JField("sparkConf", sparkConf) =>
-            i.copy(sparkConf = Extraction.extract[Map[String, String]](sparkConf))
-          case JField("dataSourceParams", JString(dataSourceParams)) =>
-            i.copy(dataSourceParams = dataSourceParams)
-          case JField("preparatorParams", JString(preparatorParams)) =>
-            i.copy(preparatorParams = preparatorParams)
-          case JField("algorithmsParams", JString(algorithmsParams)) =>
-            i.copy(algorithmsParams = algorithmsParams)
-          case JField("servingParams", JString(servingParams)) =>
-            i.copy(servingParams = servingParams)
-          case _ => i
-        }
-      }
-  },
-  {
-    case i: EngineInstance =>
-      JObject(
-        JField("id", JString(i.id)) ::
-        JField("status", JString(i.status)) ::
-        JField("startTime", JString(i.startTime.toString)) ::
-        JField("endTime", JString(i.endTime.toString)) ::
-        JField("engineId", JString(i.engineId)) ::
-        JField("engineVersion", JString(i.engineVersion)) ::
-        JField("engineVariant", JString(i.engineVariant)) ::
-        JField("engineFactory", JString(i.engineFactory)) ::
-        JField("batch", JString(i.batch)) ::
-        JField("env", Extraction.decompose(i.env)(DefaultFormats)) ::
-        JField("sparkConf", Extraction.decompose(i.sparkConf)(DefaultFormats)) ::
-        JField("dataSourceParams", JString(i.dataSourceParams)) ::
-        JField("preparatorParams", JString(i.preparatorParams)) ::
-        JField("algorithmsParams", JString(i.algorithmsParams)) ::
-        JField("servingParams", JString(i.servingParams)) ::
-        Nil)
-  }
-))

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/EngineManifests.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/EngineManifests.scala b/data/src/main/scala/io/prediction/data/storage/EngineManifests.scala
deleted file mode 100644
index d69ceae..0000000
--- a/data/src/main/scala/io/prediction/data/storage/EngineManifests.scala
+++ /dev/null
@@ -1,117 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import io.prediction.annotation.DeveloperApi
-import org.json4s._
-
-/** :: DeveloperApi ::
-  * Provides a way to discover engines by ID and version in a distributed
-  * environment
-  *
-  * @param id Unique identifier of an engine.
-  * @param version Engine version string.
-  * @param name A short and descriptive name for the engine.
-  * @param description A long description of the engine.
-  * @param files Paths to engine files.
-  * @param engineFactory Engine's factory class name.
-  * @group Meta Data
-  */
-@DeveloperApi
-case class EngineManifest(
-  id: String,
-  version: String,
-  name: String,
-  description: Option[String],
-  files: Seq[String],
-  engineFactory: String)
-
-/** :: DeveloperApi ::
-  * Base trait of the [[EngineManifest]] data access object
-  *
-  * @group Meta Data
-  */
-@DeveloperApi
-trait EngineManifests {
-  /** Inserts an [[EngineManifest]] */
-  def insert(engineManifest: EngineManifest): Unit
-
-  /** Get an [[EngineManifest]] by its ID */
-  def get(id: String, version: String): Option[EngineManifest]
-
-  /** Get all [[EngineManifest]] */
-  def getAll(): Seq[EngineManifest]
-
-  /** Updates an [[EngineManifest]] */
-  def update(engineInfo: EngineManifest, upsert: Boolean = false): Unit
-
-  /** Delete an [[EngineManifest]] by its ID */
-  def delete(id: String, version: String): Unit
-}
-
-/** :: DeveloperApi ::
-  * JSON4S serializer for [[EngineManifest]]
-  *
-  * @group Meta Data
-  */
-@DeveloperApi
-class EngineManifestSerializer
-    extends CustomSerializer[EngineManifest](format => (
-  {
-    case JObject(fields) =>
-      val seed = EngineManifest(
-        id = "",
-        version = "",
-        name = "",
-        description = None,
-        files = Nil,
-        engineFactory = "")
-      fields.foldLeft(seed) { case (enginemanifest, field) =>
-        field match {
-          case JField("id", JString(id)) => enginemanifest.copy(id = id)
-          case JField("version", JString(version)) =>
-            enginemanifest.copy(version = version)
-          case JField("name", JString(name)) => enginemanifest.copy(name = name)
-          case JField("description", JString(description)) =>
-            enginemanifest.copy(description = Some(description))
-          case JField("files", JArray(s)) =>
-            enginemanifest.copy(files = s.map(t =>
-              t match {
-                case JString(file) => file
-                case _ => ""
-              }
-            ))
-          case JField("engineFactory", JString(engineFactory)) =>
-            enginemanifest.copy(engineFactory = engineFactory)
-          case _ => enginemanifest
-        }
-      }
-  },
-  {
-    case enginemanifest: EngineManifest =>
-      JObject(
-        JField("id", JString(enginemanifest.id)) ::
-        JField("version", JString(enginemanifest.version)) ::
-        JField("name", JString(enginemanifest.name)) ::
-        JField("description",
-          enginemanifest.description.map(
-            x => JString(x)).getOrElse(JNothing)) ::
-        JField("files",
-          JArray(enginemanifest.files.map(x => JString(x)).toList)) ::
-        JField("engineFactory", JString(enginemanifest.engineFactory)) ::
-        Nil)
-  }
-))

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/EntityMap.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/EntityMap.scala b/data/src/main/scala/io/prediction/data/storage/EntityMap.scala
deleted file mode 100644
index d9cd4c8..0000000
--- a/data/src/main/scala/io/prediction/data/storage/EntityMap.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package io.prediction.data.storage
-
-import io.prediction.annotation.Experimental
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-/**
- * :: Experimental ::
- */
-@Experimental
-class EntityIdIxMap(val idToIx: BiMap[String, Long]) extends Serializable {
-
-  val ixToId: BiMap[Long, String] = idToIx.inverse
-
-  def apply(id: String): Long = idToIx(id)
-
-  def apply(ix: Long): String = ixToId(ix)
-
-  def contains(id: String): Boolean = idToIx.contains(id)
-
-  def contains(ix: Long): Boolean = ixToId.contains(ix)
-
-  def get(id: String): Option[Long] = idToIx.get(id)
-
-  def get(ix: Long): Option[String] = ixToId.get(ix)
-
-  def getOrElse(id: String, default: => Long): Long =
-    idToIx.getOrElse(id, default)
-
-  def getOrElse(ix: Long, default: => String): String =
-    ixToId.getOrElse(ix, default)
-
-  def toMap: Map[String, Long] = idToIx.toMap
-
-  def size: Long = idToIx.size
-
-  def take(n: Int): EntityIdIxMap = new EntityIdIxMap(idToIx.take(n))
-
-  override def toString: String = idToIx.toString
-}
-
-/** :: Experimental :: */
-@Experimental
-object EntityIdIxMap {
-  def apply(keys: RDD[String]): EntityIdIxMap = {
-    new EntityIdIxMap(BiMap.stringLong(keys))
-  }
-}
-
-/** :: Experimental :: */
-@Experimental
-class EntityMap[A](val idToData: Map[String, A],
-  override val idToIx: BiMap[String, Long]) extends EntityIdIxMap(idToIx) {
-
-  def this(idToData: Map[String, A]) = this(
-    idToData,
-    BiMap.stringLong(idToData.keySet)
-  )
-
-  def data(id: String): A = idToData(id)
-
-  def data(ix: Long): A = idToData(ixToId(ix))
-
-  def getData(id: String): Option[A] = idToData.get(id)
-
-  def getData(ix: Long): Option[A] = idToData.get(ixToId(ix))
-
-  def getOrElseData(id: String, default: => A): A =
-    getData(id).getOrElse(default)
-
-  def getOrElseData(ix: Long, default: => A): A =
-    getData(ix).getOrElse(default)
-
-  override def take(n: Int): EntityMap[A] = {
-    val newIdToIx = idToIx.take(n)
-    new EntityMap[A](idToData.filterKeys(newIdToIx.contains(_)), newIdToIx)
-  }
-
-  override def toString: String = {
-    s"idToData: ${idToData.toString} " + s"idToix: ${idToIx.toString}"
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/EvaluationInstances.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/EvaluationInstances.scala b/data/src/main/scala/io/prediction/data/storage/EvaluationInstances.scala
deleted file mode 100644
index 0a7d502..0000000
--- a/data/src/main/scala/io/prediction/data/storage/EvaluationInstances.scala
+++ /dev/null
@@ -1,135 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import com.github.nscala_time.time.Imports._
-import io.prediction.annotation.DeveloperApi
-import org.json4s._
-
-/** :: DeveloperApi ::
-  * Stores meta information for each evaluation instance.
-  *
-  * @param id Instance ID.
-  * @param status Status of this instance.
-  * @param startTime Start time of this instance.
-  * @param endTime End time of this instance.
-  * @param evaluationClass Evaluation class name of this instance.
-  * @param engineParamsGeneratorClass Engine parameters generator class name of this instance.
-  * @param batch Batch label of this instance.
-  * @param env The environment in which this instance was created.
-  * @param evaluatorResults Results of the evaluator.
-  * @param evaluatorResultsHTML HTML results of the evaluator.
-  * @param evaluatorResultsJSON JSON results of the evaluator.
-  * @group Meta Data
-  */
-@DeveloperApi
-case class EvaluationInstance(
-  id: String = "",
-  status: String = "",
-  startTime: DateTime = DateTime.now,
-  endTime: DateTime = DateTime.now,
-  evaluationClass: String = "",
-  engineParamsGeneratorClass: String = "",
-  batch: String = "",
-  env: Map[String, String] = Map(),
-  sparkConf: Map[String, String] = Map(),
-  evaluatorResults: String = "",
-  evaluatorResultsHTML: String = "",
-  evaluatorResultsJSON: String = "")
-
-/** :: DeveloperApi ::
-  * Base trait of the [[EvaluationInstance]] data access object
-  *
-  * @group Meta Data
-  */
-@DeveloperApi
-trait EvaluationInstances {
-  /** Insert a new [[EvaluationInstance]] */
-  def insert(i: EvaluationInstance): String
-
-  /** Get an [[EvaluationInstance]] by ID */
-  def get(id: String): Option[EvaluationInstance]
-
-  /** Get all [[EvaluationInstances]] */
-  def getAll: Seq[EvaluationInstance]
-
-  /** Get instances that are produced by evaluation and have run to completion,
-    * reverse sorted by the start time
-    */
-  def getCompleted: Seq[EvaluationInstance]
-
-  /** Update an [[EvaluationInstance]] */
-  def update(i: EvaluationInstance): Unit
-
-  /** Delete an [[EvaluationInstance]] */
-  def delete(id: String): Unit
-}
-
-/** :: DeveloperApi ::
-  * JSON4S serializer for [[EvaluationInstance]]
-  *
-  * @group Meta Data
-  */
-class EvaluationInstanceSerializer extends CustomSerializer[EvaluationInstance](
-  format => ({
-    case JObject(fields) =>
-      implicit val formats = DefaultFormats
-      fields.foldLeft(EvaluationInstance()) { case (i, field) =>
-        field match {
-          case JField("id", JString(id)) => i.copy(id = id)
-          case JField("status", JString(status)) => i.copy(status = status)
-          case JField("startTime", JString(startTime)) =>
-            i.copy(startTime = Utils.stringToDateTime(startTime))
-          case JField("endTime", JString(endTime)) =>
-            i.copy(endTime = Utils.stringToDateTime(endTime))
-          case JField("evaluationClass", JString(evaluationClass)) =>
-            i.copy(evaluationClass = evaluationClass)
-          case JField("engineParamsGeneratorClass", JString(engineParamsGeneratorClass)) =>
-            i.copy(engineParamsGeneratorClass = engineParamsGeneratorClass)
-          case JField("batch", JString(batch)) => i.copy(batch = batch)
-          case JField("env", env) =>
-            i.copy(env = Extraction.extract[Map[String, String]](env))
-          case JField("sparkConf", sparkConf) =>
-            i.copy(sparkConf = Extraction.extract[Map[String, String]](sparkConf))
-          case JField("evaluatorResults", JString(evaluatorResults)) =>
-            i.copy(evaluatorResults = evaluatorResults)
-          case JField("evaluatorResultsHTML", JString(evaluatorResultsHTML)) =>
-            i.copy(evaluatorResultsHTML = evaluatorResultsHTML)
-          case JField("evaluatorResultsJSON", JString(evaluatorResultsJSON)) =>
-            i.copy(evaluatorResultsJSON = evaluatorResultsJSON)
-          case _ => i
-        }
-      }
-  }, {
-    case i: EvaluationInstance =>
-      JObject(
-        JField("id", JString(i.id)) ::
-          JField("status", JString(i.status)) ::
-          JField("startTime", JString(i.startTime.toString)) ::
-          JField("endTime", JString(i.endTime.toString)) ::
-          JField("evaluationClass", JString(i.evaluationClass)) ::
-          JField("engineParamsGeneratorClass", JString(i.engineParamsGeneratorClass)) ::
-          JField("batch", JString(i.batch)) ::
-          JField("env", Extraction.decompose(i.env)(DefaultFormats)) ::
-          JField("sparkConf", Extraction.decompose(i.sparkConf)(DefaultFormats)) ::
-          JField("evaluatorResults", JString(i.evaluatorResults)) ::
-          JField("evaluatorResultsHTML", JString(i.evaluatorResultsHTML)) ::
-          JField("evaluatorResultsJSON", JString(i.evaluatorResultsJSON)) ::
-          Nil
-      )
-  }
-  )
-)

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/Event.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/Event.scala b/data/src/main/scala/io/prediction/data/storage/Event.scala
deleted file mode 100644
index abc16b9..0000000
--- a/data/src/main/scala/io/prediction/data/storage/Event.scala
+++ /dev/null
@@ -1,164 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import io.prediction.annotation.DeveloperApi
-import org.joda.time.DateTime
-import org.joda.time.DateTimeZone
-
-/** Each event in the Event Store can be represented by fields in this case
-  * class.
-  *
-  * @param eventId Unique ID of this event.
-  * @param event Name of this event.
-  * @param entityType Type of the entity associated with this event.
-  * @param entityId ID of the entity associated with this event.
-  * @param targetEntityType Type of the target entity associated with this
-  *                         event.
-  * @param targetEntityId ID of the target entity associated with this event.
-  * @param properties Properties associated with this event.
-  * @param eventTime Time of the happening of this event.
-  * @param tags Tags of this event.
-  * @param prId PredictedResultId of this event.
-  * @param creationTime Time of creation in the system of this event.
-  * @group Event Data
-  */
-case class Event(
-  val eventId: Option[String] = None,
-  val event: String,
-  val entityType: String,
-  val entityId: String,
-  val targetEntityType: Option[String] = None,
-  val targetEntityId: Option[String] = None,
-  val properties: DataMap = DataMap(), // default empty
-  val eventTime: DateTime = DateTime.now,
-  val tags: Seq[String] = Nil,
-  val prId: Option[String] = None,
-  val creationTime: DateTime = DateTime.now
-) {
-  override def toString(): String = {
-    s"Event(id=$eventId,event=$event,eType=$entityType,eId=$entityId," +
-    s"tType=$targetEntityType,tId=$targetEntityId,p=$properties,t=$eventTime," +
-    s"tags=$tags,pKey=$prId,ct=$creationTime)"
-  }
-}
-
-/** :: DeveloperApi ::
-  * Utilities for validating [[Event]]s
-  *
-  * @group Event Data
-  */
-@DeveloperApi
-object EventValidation {
-  /** Default time zone is set to UTC */
-  val defaultTimeZone = DateTimeZone.UTC
-
-  /** Checks whether an event name contains a reserved prefix
-    *
-    * @param name Event name
-    * @return true if event name starts with \$ or pio_, false otherwise
-    */
-  def isReservedPrefix(name: String): Boolean = name.startsWith("$") ||
-    name.startsWith("pio_")
-
-  /** PredictionIO reserves some single entity event names. They are currently
-    * \$set, \$unset, and \$delete.
-    */
-  val specialEvents = Set("$set", "$unset", "$delete")
-
-  /** Checks whether an event name is a special PredictionIO event name
-    *
-    * @param name Event name
-    * @return true if the name is a special event, false otherwise
-    */
-  def isSpecialEvents(name: String): Boolean = specialEvents.contains(name)
-
-  /** Validate an [[Event]], throwing exceptions when the candidate violates any
-    * of the following:
-    *
-    *  - event name must not be empty
-    *  - entityType must not be empty
-    *  - entityId must not be empty
-    *  - targetEntityType must not be Some of empty
-    *  - targetEntityId must not be Some of empty
-    *  - targetEntityType and targetEntityId must be both Some or None
-    *  - properties must not be empty when event is \$unset
-    *  - event name must be a special event if it has a reserved prefix
-    *  - targetEntityType and targetEntityId must be None if the event name has
-    *    a reserved prefix
-    *  - entityType must be a built-in entity type if entityType has a
-    *    reserved prefix
-    *  - targetEntityType must be a built-in entity type if targetEntityType is
-    *    Some and has a reserved prefix
-    *
-    * @param e Event to be validated
-    */
-  def validate(e: Event): Unit = {
-
-    require(!e.event.isEmpty, "event must not be empty.")
-    require(!e.entityType.isEmpty, "entityType must not be empty string.")
-    require(!e.entityId.isEmpty, "entityId must not be empty string.")
-    require(e.targetEntityType.map(!_.isEmpty).getOrElse(true),
-      "targetEntityType must not be empty string")
-    require(e.targetEntityId.map(!_.isEmpty).getOrElse(true),
-      "targetEntityId must not be empty string.")
-    require(!((e.targetEntityType != None) && (e.targetEntityId == None)),
-      "targetEntityType and targetEntityId must be specified together.")
-    require(!((e.targetEntityType == None) && (e.targetEntityId != None)),
-      "targetEntityType and targetEntityId must be specified together.")
-    require(!((e.event == "$unset") && e.properties.isEmpty),
-      "properties cannot be empty for $unset event")
-    require(!isReservedPrefix(e.event) || isSpecialEvents(e.event),
-      s"${e.event} is not a supported reserved event name.")
-    require(!isSpecialEvents(e.event) ||
-      ((e.targetEntityType == None) && (e.targetEntityId == None)),
-      s"Reserved event ${e.event} cannot have targetEntity")
-    require(!isReservedPrefix(e.entityType) ||
-      isBuiltinEntityTypes(e.entityType),
-      s"The entityType ${e.entityType} is not allowed. " +
-        s"'pio_' is a reserved name prefix.")
-    require(e.targetEntityType.map{ t =>
-      (!isReservedPrefix(t) || isBuiltinEntityTypes(t))}.getOrElse(true),
-      s"The targetEntityType ${e.targetEntityType.get} is not allowed. " +
-        s"'pio_' is a reserved name prefix.")
-    validateProperties(e)
-  }
-
-  /** Defines built-in entity types. The current built-in type is pio_pr. */
-  val builtinEntityTypes: Set[String] = Set("pio_pr")
-
-  /** Defines built-in properties. This is currently empty. */
-  val builtinProperties: Set[String] = Set()
-
-  /** Checks whether an entity type is a built-in entity type */
-  def isBuiltinEntityTypes(name: String): Boolean = builtinEntityTypes.contains(name)
-
-  /** Validate event properties, throwing exceptions when the candidate violates
-    * any of the following:
-    *
-    *  - property name must not contain a reserved prefix
-    *
-    * @param e Event to be validated
-    */
-  def validateProperties(e: Event): Unit = {
-    e.properties.keySet.foreach { k =>
-      require(!isReservedPrefix(k) || builtinProperties.contains(k),
-        s"The property ${k} is not allowed. " +
-          s"'pio_' is a reserved name prefix.")
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/EventJson4sSupport.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/EventJson4sSupport.scala b/data/src/main/scala/io/prediction/data/storage/EventJson4sSupport.scala
deleted file mode 100644
index 22243c2..0000000
--- a/data/src/main/scala/io/prediction/data/storage/EventJson4sSupport.scala
+++ /dev/null
@@ -1,236 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import io.prediction.annotation.DeveloperApi
-import io.prediction.data.{Utils => DataUtils}
-import org.joda.time.DateTime
-import org.json4s._
-import scala.util.{Try, Success, Failure}
-
-/** :: DeveloperApi ::
-  * Support library for dealing with [[Event]] and JSON4S
-  *
-  * @group Event Data
-  */
-@DeveloperApi
-object EventJson4sSupport {
-  /** This is set to org.json4s.DefaultFormats. Do not use JSON4S to serialize
-    * or deserialize Joda-Time DateTime because it has some issues with timezone
-    * (as of version 3.2.10)
-    */
-  implicit val formats = DefaultFormats
-
-  /** :: DeveloperApi ::
-    * Convert JSON from Event Server to [[Event]]
-    *
-    * @return deserialization routine used by [[APISerializer]]
-    */
-  @DeveloperApi
-  def readJson: PartialFunction[JValue, Event] = {
-    case JObject(x) => {
-      val fields = new DataMap(x.toMap)
-      // use get() if required in json
-      // use getOpt() if not required in json
-      try {
-        val event = fields.get[String]("event")
-        val entityType = fields.get[String]("entityType")
-        val entityId = fields.get[String]("entityId")
-        val targetEntityType = fields.getOpt[String]("targetEntityType")
-        val targetEntityId = fields.getOpt[String]("targetEntityId")
-        val properties = fields.getOrElse[Map[String, JValue]](
-          "properties", Map())
-        // default currentTime expressed as UTC timezone
-        lazy val currentTime = DateTime.now(EventValidation.defaultTimeZone)
-        val eventTime = fields.getOpt[String]("eventTime")
-          .map{ s =>
-            try {
-              DataUtils.stringToDateTime(s)
-            } catch {
-              case _: Exception =>
-                throw new MappingException(s"Fail to extract eventTime ${s}")
-            }
-          }.getOrElse(currentTime)
-
-        // disable tags from API for now.
-        val tags = List()
-      // val tags = fields.getOpt[Seq[String]]("tags").getOrElse(List())
-
-        val prId = fields.getOpt[String]("prId")
-
-        // don't allow user set creationTime from API for now.
-        val creationTime = currentTime
-      // val creationTime = fields.getOpt[String]("creationTime")
-      //   .map{ s =>
-      //     try {
-      //       DataUtils.stringToDateTime(s)
-      //     } catch {
-      //       case _: Exception =>
-      //         throw new MappingException(s"Fail to extract creationTime ${s}")
-      //     }
-      //   }.getOrElse(currentTime)
-
-
-        val newEvent = Event(
-          event = event,
-          entityType = entityType,
-          entityId = entityId,
-          targetEntityType = targetEntityType,
-          targetEntityId = targetEntityId,
-          properties = DataMap(properties),
-          eventTime = eventTime,
-          prId = prId,
-          creationTime = creationTime
-        )
-        EventValidation.validate(newEvent)
-        newEvent
-      } catch {
-        case e: Exception => throw new MappingException(e.toString, e)
-      }
-    }
-  }
-
-  /** :: DeveloperApi ::
-    * Convert [[Event]] to JSON for use by the Event Server
-    *
-    * @return serialization routine used by [[APISerializer]]
-    */
-  @DeveloperApi
-  def writeJson: PartialFunction[Any, JValue] = {
-    case d: Event => {
-      JObject(
-        JField("eventId",
-          d.eventId.map( eid => JString(eid)).getOrElse(JNothing)) ::
-        JField("event", JString(d.event)) ::
-        JField("entityType", JString(d.entityType)) ::
-        JField("entityId", JString(d.entityId)) ::
-        JField("targetEntityType",
-          d.targetEntityType.map(JString(_)).getOrElse(JNothing)) ::
-        JField("targetEntityId",
-          d.targetEntityId.map(JString(_)).getOrElse(JNothing)) ::
-        JField("properties", d.properties.toJObject) ::
-        JField("eventTime", JString(DataUtils.dateTimeToString(d.eventTime))) ::
-        // disable tags from API for now
-        // JField("tags", JArray(d.tags.toList.map(JString(_)))) ::
-        // disable tags from API for now
-        JField("prId",
-          d.prId.map(JString(_)).getOrElse(JNothing)) ::
-        // don't show creationTime for now
-        JField("creationTime",
-          JString(DataUtils.dateTimeToString(d.creationTime))) ::
-        Nil)
-    }
-  }
-
-  /** :: DeveloperApi ::
-    * Convert JSON4S JValue to [[Event]]
-    *
-    * @return deserialization routine used by [[DBSerializer]]
-    */
-  @DeveloperApi
-  def deserializeFromJValue: PartialFunction[JValue, Event] = {
-    case jv: JValue => {
-      val event = (jv \ "event").extract[String]
-      val entityType = (jv \ "entityType").extract[String]
-      val entityId = (jv \ "entityId").extract[String]
-      val targetEntityType = (jv \ "targetEntityType").extract[Option[String]]
-      val targetEntityId = (jv \ "targetEntityId").extract[Option[String]]
-      val properties = (jv \ "properties").extract[JObject]
-      val eventTime = DataUtils.stringToDateTime(
-        (jv \ "eventTime").extract[String])
-      val tags = (jv \ "tags").extract[Seq[String]]
-      val prId = (jv \ "prId").extract[Option[String]]
-      val creationTime = DataUtils.stringToDateTime(
-        (jv \ "creationTime").extract[String])
-      Event(
-        event = event,
-        entityType = entityType,
-        entityId = entityId,
-        targetEntityType = targetEntityType,
-        targetEntityId = targetEntityId,
-        properties = DataMap(properties),
-        eventTime = eventTime,
-        tags = tags,
-        prId = prId,
-        creationTime = creationTime)
-    }
-  }
-
-  /** :: DeveloperApi ::
-    * Convert [[Event]] to JSON4S JValue
-    *
-    * @return serialization routine used by [[DBSerializer]]
-    */
-  @DeveloperApi
-  def serializeToJValue: PartialFunction[Any, JValue] = {
-    case d: Event => {
-      JObject(
-        JField("event", JString(d.event)) ::
-        JField("entityType", JString(d.entityType)) ::
-        JField("entityId", JString(d.entityId)) ::
-        JField("targetEntityType",
-          d.targetEntityType.map(JString(_)).getOrElse(JNothing)) ::
-        JField("targetEntityId",
-          d.targetEntityId.map(JString(_)).getOrElse(JNothing)) ::
-        JField("properties", d.properties.toJObject) ::
-        JField("eventTime", JString(DataUtils.dateTimeToString(d.eventTime))) ::
-        JField("tags", JArray(d.tags.toList.map(JString(_)))) ::
-        JField("prId",
-          d.prId.map(JString(_)).getOrElse(JNothing)) ::
-        JField("creationTime",
-          JString(DataUtils.dateTimeToString(d.creationTime))) ::
-        Nil)
-    }
-  }
-
-  /** :: DeveloperApi ::
-    * Custom JSON4S serializer for [[Event]] intended to be used by database
-    * access, or anywhere that demands serdes of [[Event]] to/from JSON4S JValue
-    */
-  @DeveloperApi
-  class DBSerializer extends CustomSerializer[Event](format => (
-    deserializeFromJValue, serializeToJValue))
-
-  /** :: DeveloperApi ::
-    * Custom JSON4S serializer for [[Event]] intended to be used by the Event
-    * Server, or anywhere that demands serdes of [[Event]] to/from JSON
-    */
-  @DeveloperApi
-  class APISerializer extends CustomSerializer[Event](format => (
-    readJson, writeJson))
-}
-
-
-@DeveloperApi
-object BatchEventsJson4sSupport {
-  implicit val formats = DefaultFormats
-
-  @DeveloperApi
-  def readJson: PartialFunction[JValue, Seq[Try[Event]]] = {
-    case JArray(events) => {
-      events.map { event =>
-        try {
-          Success(EventJson4sSupport.readJson(event))
-        } catch {
-          case e: Exception => Failure(e)
-        }
-      }
-    }
-  }
-
-  @DeveloperApi
-  class APISerializer extends CustomSerializer[Seq[Try[Event]]](format => (readJson, Map.empty))
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/LEventAggregator.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/LEventAggregator.scala b/data/src/main/scala/io/prediction/data/storage/LEventAggregator.scala
deleted file mode 100644
index f3c4b11..0000000
--- a/data/src/main/scala/io/prediction/data/storage/LEventAggregator.scala
+++ /dev/null
@@ -1,145 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import io.prediction.annotation.DeveloperApi
-import org.joda.time.DateTime
-
-/** :: DeveloperApi ::
-  * Provides aggregation support of [[Event]]s to [[LEvents]]. Engine developers
-  * should use [[io.prediction.data.store.LEventStore]] instead of using this
-  * directly.
-  *
-  * @group Event Data
-  */
-@DeveloperApi
-object LEventAggregator {
-  /** :: DeveloperApi ::
-    * Aggregate all properties grouped by entity type given an iterator of
-    * [[Event]]s with the latest property values from all [[Event]]s, and their
-    * first and last updated time
-    *
-    * @param events An iterator of [[Event]]s whose properties will be aggregated
-    * @return A map of entity type to [[PropertyMap]]
-    */
-  @DeveloperApi
-  def aggregateProperties(events: Iterator[Event]): Map[String, PropertyMap] = {
-    events.toList
-      .groupBy(_.entityId)
-      .mapValues(_.sortBy(_.eventTime.getMillis)
-        .foldLeft[Prop](Prop())(propAggregator))
-      .filter{ case (k, v) => v.dm.isDefined }
-      .mapValues{ v =>
-        require(v.firstUpdated.isDefined,
-          "Unexpected Error: firstUpdated cannot be None.")
-        require(v.lastUpdated.isDefined,
-          "Unexpected Error: lastUpdated cannot be None.")
-
-        PropertyMap(
-          fields = v.dm.get.fields,
-          firstUpdated = v.firstUpdated.get,
-          lastUpdated = v.lastUpdated.get
-        )
-      }
-  }
-
-  /** :: DeveloperApi ::
-    * Aggregate all properties given an iterator of [[Event]]s with the latest
-    * property values from all [[Event]]s, and their first and last updated time
-    *
-    * @param events An iterator of [[Event]]s whose properties will be aggregated
-    * @return An optional [[PropertyMap]]
-    */
-  @DeveloperApi
-  def aggregatePropertiesSingle(events: Iterator[Event])
-  : Option[PropertyMap] = {
-    val prop = events.toList
-      .sortBy(_.eventTime.getMillis)
-      .foldLeft[Prop](Prop())(propAggregator)
-
-    prop.dm.map{ d =>
-      require(prop.firstUpdated.isDefined,
-        "Unexpected Error: firstUpdated cannot be None.")
-      require(prop.lastUpdated.isDefined,
-        "Unexpected Error: lastUpdated cannot be None.")
-
-      PropertyMap(
-        fields = d.fields,
-        firstUpdated = prop.firstUpdated.get,
-        lastUpdated = prop.lastUpdated.get
-      )
-    }
-  }
-
-  /** Event names that control aggregation: \$set, \$unset, and \$delete */
-  val eventNames = List("$set", "$unset", "$delete")
-
-  private
-  def dataMapAggregator: ((Option[DataMap], Event) => Option[DataMap]) = {
-    (p, e) => {
-      e.event match {
-        case "$set" => {
-          if (p == None) {
-            Some(e.properties)
-          } else {
-            p.map(_ ++ e.properties)
-          }
-        }
-        case "$unset" => {
-          if (p == None) {
-            None
-          } else {
-            p.map(_ -- e.properties.keySet)
-          }
-        }
-        case "$delete" => None
-        case _ => p // do nothing for others
-      }
-    }
-  }
-
-  private
-  def propAggregator: ((Prop, Event) => Prop) = {
-    (p, e) => {
-      e.event match {
-        case "$set" | "$unset" | "$delete" => {
-          Prop(
-            dm = dataMapAggregator(p.dm, e),
-            firstUpdated = p.firstUpdated.map { t =>
-              first(t, e.eventTime)
-            }.orElse(Some(e.eventTime)),
-            lastUpdated = p.lastUpdated.map { t =>
-              last(t, e.eventTime)
-            }.orElse(Some(e.eventTime))
-          )
-        }
-        case _ => p // do nothing for others
-      }
-    }
-  }
-
-  private
-  def first(a: DateTime, b: DateTime): DateTime = if (b.isBefore(a)) b else a
-
-  private
-  def last(a: DateTime, b: DateTime): DateTime = if (b.isAfter(a)) b else a
-
-  private case class Prop(
-    dm: Option[DataMap] = None,
-    firstUpdated: Option[DateTime] = None,
-    lastUpdated: Option[DateTime] = None
-  )
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/LEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/LEvents.scala b/data/src/main/scala/io/prediction/data/storage/LEvents.scala
deleted file mode 100644
index 411f3a4..0000000
--- a/data/src/main/scala/io/prediction/data/storage/LEvents.scala
+++ /dev/null
@@ -1,489 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import io.prediction.annotation.DeveloperApi
-import io.prediction.annotation.Experimental
-
-import scala.concurrent.Future
-import scala.concurrent.Await
-import scala.concurrent.duration.Duration
-import scala.concurrent.ExecutionContext
-import scala.concurrent.TimeoutException
-
-import org.joda.time.DateTime
-
-/** :: DeveloperApi ::
-  * Base trait of a data access object that directly returns [[Event]] without
-  * going through Spark's parallelization. Engine developers should use
-  * [[io.prediction.data.store.LEventStore]] instead of using this directly.
-  *
-  * @group Event Data
-  */
-@DeveloperApi
-trait LEvents {
-  /** Default timeout for asynchronous operations that is set to 1 minute */
-  val defaultTimeout = Duration(60, "seconds")
-
-  /** :: DeveloperApi ::
-    * Initialize Event Store for an app ID and optionally a channel ID.
-    * This routine is to be called when an app is first created.
-    *
-    * @param appId App ID
-    * @param channelId Optional channel ID
-    * @return true if initialization was successful; false otherwise.
-    */
-  @DeveloperApi
-  def init(appId: Int, channelId: Option[Int] = None): Boolean
-
-  /** :: DeveloperApi ::
-    * Remove Event Store for an app ID and optional channel ID.
-    *
-    * @param appId App ID
-    * @param channelId Optional channel ID
-    * @return true if removal was successful; false otherwise.
-    */
-  @DeveloperApi
-  def remove(appId: Int, channelId: Option[Int] = None): Boolean
-
-  /** :: DeveloperApi ::
-    * Close this Event Store interface object, e.g. close connection, release
-    * resources, etc.
-    */
-  @DeveloperApi
-  def close(): Unit
-
-  /** :: DeveloperApi ::
-    * Insert an [[Event]] in a non-blocking fashion.
-    *
-    * @param event An [[Event]] to be inserted
-    * @param appId App ID for the [[Event]] to be inserted to
-    */
-  @DeveloperApi
-  def futureInsert(event: Event, appId: Int)(implicit ec: ExecutionContext):
-    Future[String] = futureInsert(event, appId, None)
-
-  /** :: DeveloperApi ::
-    * Insert an [[Event]] in a non-blocking fashion.
-    *
-    * @param event An [[Event]] to be inserted
-    * @param appId App ID for the [[Event]] to be inserted to
-    * @param channelId Optional channel ID for the [[Event]] to be inserted to
-    */
-  @DeveloperApi
-  def futureInsert(
-    event: Event, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext): Future[String]
-
-  /** :: DeveloperApi ::
-    * Get an [[Event]] in a non-blocking fashion.
-    *
-    * @param eventId ID of the [[Event]]
-    * @param appId ID of the app that contains the [[Event]]
-    */
-  @DeveloperApi
-  def futureGet(eventId: String, appId: Int)(implicit ec: ExecutionContext):
-    Future[Option[Event]] = futureGet(eventId, appId, None)
-
-  /** :: DeveloperApi ::
-    * Get an [[Event]] in a non-blocking fashion.
-    *
-    * @param eventId ID of the [[Event]]
-    * @param appId ID of the app that contains the [[Event]]
-    * @param channelId Optional channel ID that contains the [[Event]]
-    */
-  @DeveloperApi
-  def futureGet(
-      eventId: String,
-      appId: Int,
-      channelId: Option[Int]
-    )(implicit ec: ExecutionContext): Future[Option[Event]]
-
-  /** :: DeveloperApi ::
-    * Delete an [[Event]] in a non-blocking fashion.
-    *
-    * @param eventId ID of the [[Event]]
-    * @param appId ID of the app that contains the [[Event]]
-    */
-  @DeveloperApi
-  def futureDelete(eventId: String, appId: Int)(implicit ec: ExecutionContext):
-    Future[Boolean] = futureDelete(eventId, appId, None)
-
-  /** :: DeveloperApi ::
-    * Delete an [[Event]] in a non-blocking fashion.
-    *
-    * @param eventId ID of the [[Event]]
-    * @param appId ID of the app that contains the [[Event]]
-    * @param channelId Optional channel ID that contains the [[Event]]
-    */
-  @DeveloperApi
-  def futureDelete(
-      eventId: String,
-      appId: Int,
-      channelId: Option[Int]
-    )(implicit ec: ExecutionContext): Future[Boolean]
-
-  /** :: DeveloperApi ::
-    * Reads from database and returns a Future of Iterator of [[Event]]s.
-    *
-    * @param appId return events of this app ID
-    * @param channelId return events of this channel ID (default channel if it's None)
-    * @param startTime return events with eventTime >= startTime
-    * @param untilTime return events with eventTime < untilTime
-    * @param entityType return events of this entityType
-    * @param entityId return events of this entityId
-    * @param eventNames return events with any of these event names.
-    * @param targetEntityType return events of this targetEntityType:
-    *   - None means no restriction on targetEntityType
-    *   - Some(None) means no targetEntityType for this event
-    *   - Some(Some(x)) means targetEntityType should match x.
-    * @param targetEntityId return events of this targetEntityId
-    *   - None means no restriction on targetEntityId
-    *   - Some(None) means no targetEntityId for this event
-    *   - Some(Some(x)) means targetEntityId should match x.
-    * @param limit Limit number of events. Get all events if None or Some(-1)
-    * @param reversed Reverse the order.
-    *   - return oldest events first if None or Some(false) (default)
-    *   - return latest events first if Some(true)
-    * @param ec ExecutionContext
-    * @return Future[Iterator[Event]]
-    */
-  @DeveloperApi
-  def futureFind(
-      appId: Int,
-      channelId: Option[Int] = None,
-      startTime: Option[DateTime] = None,
-      untilTime: Option[DateTime] = None,
-      entityType: Option[String] = None,
-      entityId: Option[String] = None,
-      eventNames: Option[Seq[String]] = None,
-      targetEntityType: Option[Option[String]] = None,
-      targetEntityId: Option[Option[String]] = None,
-      limit: Option[Int] = None,
-      reversed: Option[Boolean] = None
-    )(implicit ec: ExecutionContext): Future[Iterator[Event]]
-
-  /** Aggregate properties of entities based on these special events:
-    * \$set, \$unset, \$delete events.
-    * and returns a Future of Map of entityId to properties.
-    *
-    * @param appId use events of this app ID
-    * @param channelId use events of this channel ID (default channel if it's None)
-    * @param entityType aggregate properties of the entities of this entityType
-    * @param startTime use events with eventTime >= startTime
-    * @param untilTime use events with eventTime < untilTime
-    * @param required only keep entities with these required properties defined
-    * @param ec ExecutionContext
-    * @return Future[Map[String, PropertyMap]]
-    */
-  private[prediction] def futureAggregateProperties(
-    appId: Int,
-    channelId: Option[Int] = None,
-    entityType: String,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    required: Option[Seq[String]] = None)(implicit ec: ExecutionContext):
-    Future[Map[String, PropertyMap]] = {
-      futureFind(
-        appId = appId,
-        channelId = channelId,
-        startTime = startTime,
-        untilTime = untilTime,
-        entityType = Some(entityType),
-        eventNames = Some(LEventAggregator.eventNames)
-      ).map{ eventIt =>
-        val dm = LEventAggregator.aggregateProperties(eventIt)
-        if (required.isDefined) {
-          dm.filter { case (k, v) =>
-            required.get.map(v.contains(_)).reduce(_ && _)
-          }
-        } else dm
-      }
-    }
-
-  /**
-    * :: Experimental ::
-    *
-    * Aggregate properties of the specified entity (entityType + entityId)
-    * based on these special events:
-    * \$set, \$unset, \$delete events.
-    * and returns a Future of Option[PropertyMap]
-    *
-    * @param appId use events of this app ID
-    * @param channelId use events of this channel ID (default channel if it's None)
-    * @param entityType the entityType
-    * @param entityId the entityId
-    * @param startTime use events with eventTime >= startTime
-    * @param untilTime use events with eventTime < untilTime
-    * @param ec ExecutionContext
-    * @return Future[Option[PropertyMap]]
-    */
-  @Experimental
-  private[prediction] def futureAggregatePropertiesOfEntity(
-    appId: Int,
-    channelId: Option[Int] = None,
-    entityType: String,
-    entityId: String,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None)(implicit ec: ExecutionContext):
-    Future[Option[PropertyMap]] = {
-      futureFind(
-        appId = appId,
-        channelId = channelId,
-        startTime = startTime,
-        untilTime = untilTime,
-        entityType = Some(entityType),
-        entityId = Some(entityId),
-        eventNames = Some(LEventAggregator.eventNames)
-      ).map{ eventIt =>
-        LEventAggregator.aggregatePropertiesSingle(eventIt)
-      }
-    }
-
-  // following is blocking
-  private[prediction] def insert(event: Event, appId: Int,
-    channelId: Option[Int] = None,
-    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    String = {
-    Await.result(futureInsert(event, appId, channelId), timeout)
-  }
-
-  private[prediction] def get(eventId: String, appId: Int,
-    channelId: Option[Int] = None,
-    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Option[Event] = {
-    Await.result(futureGet(eventId, appId, channelId), timeout)
-  }
-
-  private[prediction] def delete(eventId: String, appId: Int,
-    channelId: Option[Int] = None,
-    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Boolean = {
-    Await.result(futureDelete(eventId, appId, channelId), timeout)
-  }
-
-  /** reads from database and returns events iterator.
-    *
-    * @param appId return events of this app ID
-    * @param channelId return events of this channel ID (default channel if it's None)
-    * @param startTime return events with eventTime >= startTime
-    * @param untilTime return events with eventTime < untilTime
-    * @param entityType return events of this entityType
-    * @param entityId return events of this entityId
-    * @param eventNames return events with any of these event names.
-    * @param targetEntityType return events of this targetEntityType:
-    *   - None means no restriction on targetEntityType
-    *   - Some(None) means no targetEntityType for this event
-    *   - Some(Some(x)) means targetEntityType should match x.
-    * @param targetEntityId return events of this targetEntityId
-    *   - None means no restriction on targetEntityId
-    *   - Some(None) means no targetEntityId for this event
-    *   - Some(Some(x)) means targetEntityId should match x.
-    * @param limit Limit number of events. Get all events if None or Some(-1)
-    * @param reversed Reverse the order (should be used with both
-    *   targetEntityType and targetEntityId specified)
-    *   - return oldest events first if None or Some(false) (default)
-    *   - return latest events first if Some(true)
-    * @param ec ExecutionContext
-    * @return Iterator[Event]
-    */
-  private[prediction] def find(
-    appId: Int,
-    channelId: Option[Int] = None,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    entityType: Option[String] = None,
-    entityId: Option[String] = None,
-    eventNames: Option[Seq[String]] = None,
-    targetEntityType: Option[Option[String]] = None,
-    targetEntityId: Option[Option[String]] = None,
-    limit: Option[Int] = None,
-    reversed: Option[Boolean] = None,
-    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Iterator[Event] = {
-      Await.result(futureFind(
-        appId = appId,
-        channelId = channelId,
-        startTime = startTime,
-        untilTime = untilTime,
-        entityType = entityType,
-        entityId = entityId,
-        eventNames = eventNames,
-        targetEntityType = targetEntityType,
-        targetEntityId = targetEntityId,
-        limit = limit,
-        reversed = reversed), timeout)
-  }
-
-  // NOTE: remove in next release
-  @deprecated("Use find() instead.", "0.9.2")
-  private[prediction] def findLegacy(
-    appId: Int,
-    channelId: Option[Int] = None,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    entityType: Option[String] = None,
-    entityId: Option[String] = None,
-    eventNames: Option[Seq[String]] = None,
-    targetEntityType: Option[Option[String]] = None,
-    targetEntityId: Option[Option[String]] = None,
-    limit: Option[Int] = None,
-    reversed: Option[Boolean] = None,
-    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Either[StorageError, Iterator[Event]] = {
-      try {
-        // return Either for legacy usage
-        Right(Await.result(futureFind(
-          appId = appId,
-          channelId = channelId,
-          startTime = startTime,
-          untilTime = untilTime,
-          entityType = entityType,
-          entityId = entityId,
-          eventNames = eventNames,
-          targetEntityType = targetEntityType,
-          targetEntityId = targetEntityId,
-          limit = limit,
-          reversed = reversed), timeout))
-      } catch {
-        case e: TimeoutException => Left(StorageError(s"${e}"))
-        case e: Exception => Left(StorageError(s"${e}"))
-      }
-  }
-
-  /** reads events of the specified entity.
-    *
-    * @param appId return events of this app ID
-    * @param channelId return events of this channel ID (default channel if it's None)
-    * @param entityType return events of this entityType
-    * @param entityId return events of this entityId
-    * @param eventNames return events with any of these event names.
-    * @param targetEntityType return events of this targetEntityType:
-    *   - None means no restriction on targetEntityType
-    *   - Some(None) means no targetEntityType for this event
-    *   - Some(Some(x)) means targetEntityType should match x.
-    * @param targetEntityId return events of this targetEntityId
-    *   - None means no restriction on targetEntityId
-    *   - Some(None) means no targetEntityId for this event
-    *   - Some(Some(x)) means targetEntityId should match x.
-    * @param startTime return events with eventTime >= startTime
-    * @param untilTime return events with eventTime < untilTime
-    * @param limit Limit number of events. Get all events if None or Some(-1)
-    * @param latest Return latest event first (default true)
-    * @param ec ExecutionContext
-    * @return Either[StorageError, Iterator[Event]]
-    */
-  // NOTE: remove this function in next release
-  @deprecated("Use LEventStore.findByEntity() instead.", "0.9.2")
-  def findSingleEntity(
-    appId: Int,
-    channelId: Option[Int] = None,
-    entityType: String,
-    entityId: String,
-    eventNames: Option[Seq[String]] = None,
-    targetEntityType: Option[Option[String]] = None,
-    targetEntityId: Option[Option[String]] = None,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    limit: Option[Int] = None,
-    latest: Boolean = true,
-    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Either[StorageError, Iterator[Event]] = {
-
-    findLegacy(
-      appId = appId,
-      channelId = channelId,
-      startTime = startTime,
-      untilTime = untilTime,
-      entityType = Some(entityType),
-      entityId = Some(entityId),
-      eventNames = eventNames,
-      targetEntityType = targetEntityType,
-      targetEntityId = targetEntityId,
-      limit = limit,
-      reversed = Some(latest),
-      timeout = timeout)
-
-  }
-
-  /** Aggregate properties of entities based on these special events:
-    * \$set, \$unset, \$delete events.
-    * and returns a Map of entityId to properties.
-    *
-    * @param appId use events of this app ID
-    * @param channelId use events of this channel ID (default channel if it's None)
-    * @param entityType aggregate properties of the entities of this entityType
-    * @param startTime use events with eventTime >= startTime
-    * @param untilTime use events with eventTime < untilTime
-    * @param required only keep entities with these required properties defined
-    * @param ec ExecutionContext
-    * @return Map[String, PropertyMap]
-    */
-  private[prediction] def aggregateProperties(
-    appId: Int,
-    channelId: Option[Int] = None,
-    entityType: String,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    required: Option[Seq[String]] = None,
-    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Map[String, PropertyMap] = {
-    Await.result(futureAggregateProperties(
-      appId = appId,
-      channelId = channelId,
-      entityType = entityType,
-      startTime = startTime,
-      untilTime = untilTime,
-      required = required), timeout)
-  }
-
-  /**
-    * :: Experimental ::
-    *
-    * Aggregate properties of the specified entity (entityType + entityId)
-    * based on these special events:
-    * \$set, \$unset, \$delete events.
-    * and returns Option[PropertyMap]
-    *
-    * @param appId use events of this app ID
-    * @param channelId use events of this channel ID
-    * @param entityType the entityType
-    * @param entityId the entityId
-    * @param startTime use events with eventTime >= startTime
-    * @param untilTime use events with eventTime < untilTime
-    * @param ec ExecutionContext
-    * @return Future[Option[PropertyMap]]
-    */
-  @Experimental
-  private[prediction] def aggregatePropertiesOfEntity(
-    appId: Int,
-    channelId: Option[Int] = None,
-    entityType: String,
-    entityId: String,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
-    Option[PropertyMap] = {
-
-    Await.result(futureAggregatePropertiesOfEntity(
-      appId = appId,
-      channelId = channelId,
-      entityType = entityType,
-      entityId = entityId,
-      startTime = startTime,
-      untilTime = untilTime), timeout)
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/Models.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/Models.scala b/data/src/main/scala/io/prediction/data/storage/Models.scala
deleted file mode 100644
index 53a76ff..0000000
--- a/data/src/main/scala/io/prediction/data/storage/Models.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import com.google.common.io.BaseEncoding
-import io.prediction.annotation.DeveloperApi
-import org.json4s._
-
-/** :: DeveloperApi ::
-  * Stores model for each engine instance
-  *
-  * @param id ID of the model, which should be the same as engine instance ID
-  * @param models Trained models of all algorithms
-  * @group Model Data
-  */
-@DeveloperApi
-case class Model(
-  id: String,
-  models: Array[Byte])
-
-/** :: DeveloperApi ::
-  * Base trait for of the [[Model]] data access object
-  *
-  * @group Model Data
-  */
-@DeveloperApi
-trait Models {
-  /** Insert a new [[Model]] */
-  def insert(i: Model): Unit
-
-  /** Get a [[Model]] by ID */
-  def get(id: String): Option[Model]
-
-  /** Delete a [[Model]] */
-  def delete(id: String): Unit
-}
-
-/** :: DeveloperApi ::
-  * JSON4S serializer for [[Model]]
-  *
-  * @group Model Data
-  */
-@DeveloperApi
-class ModelSerializer extends CustomSerializer[Model](
-  format => ({
-    case JObject(fields) =>
-      implicit val formats = DefaultFormats
-      val seed = Model(
-          id = "",
-          models = Array[Byte]())
-      fields.foldLeft(seed) { case (i, field) =>
-        field match {
-          case JField("id", JString(id)) => i.copy(id = id)
-          case JField("models", JString(models)) =>
-            i.copy(models = BaseEncoding.base64.decode(models))
-          case _ => i
-        }
-      }
-  },
-  {
-    case i: Model =>
-      JObject(
-        JField("id", JString(i.id)) ::
-        JField("models", JString(BaseEncoding.base64.encode(i.models))) ::
-        Nil)
-  }
-))

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/PEventAggregator.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/PEventAggregator.scala b/data/src/main/scala/io/prediction/data/storage/PEventAggregator.scala
deleted file mode 100644
index 2430df9..0000000
--- a/data/src/main/scala/io/prediction/data/storage/PEventAggregator.scala
+++ /dev/null
@@ -1,209 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import org.joda.time.DateTime
-
-import org.json4s.JValue
-
-import org.apache.spark.SparkContext
-import org.apache.spark.SparkContext._
-import org.apache.spark.rdd.RDD
-
-// each JValue data associated with the time it is set
-private[prediction] case class PropTime(val d: JValue, val t: Long)
-    extends Serializable
-
-private[prediction] case class SetProp (
-  val fields: Map[String, PropTime],
-  // last set time. Note: fields could be empty with valid set time
-  val t: Long) extends Serializable {
-
-  def ++ (that: SetProp): SetProp = {
-    val commonKeys = fields.keySet.intersect(that.fields.keySet)
-
-    val common: Map[String, PropTime] = commonKeys.map { k =>
-      val thisData = this.fields(k)
-      val thatData = that.fields(k)
-      // only keep the value with latest time
-      val v = if (thisData.t > thatData.t) thisData else thatData
-      (k, v)
-    }.toMap
-
-    val combinedFields = common ++
-      (this.fields -- commonKeys) ++ (that.fields -- commonKeys)
-
-    // keep the latest set time
-    val combinedT = if (this.t > that.t) this.t else that.t
-
-    SetProp(
-      fields = combinedFields,
-      t = combinedT
-    )
-  }
-}
-
-private[prediction] case class UnsetProp (fields: Map[String, Long])
-    extends Serializable {
-  def ++ (that: UnsetProp): UnsetProp = {
-    val commonKeys = fields.keySet.intersect(that.fields.keySet)
-
-    val common: Map[String, Long] = commonKeys.map { k =>
-      val thisData = this.fields(k)
-      val thatData = that.fields(k)
-      // only keep the value with latest time
-      val v = if (thisData > thatData) thisData else thatData
-      (k, v)
-    }.toMap
-
-    val combinedFields = common ++
-      (this.fields -- commonKeys) ++ (that.fields -- commonKeys)
-
-    UnsetProp(
-      fields = combinedFields
-    )
-  }
-}
-
-private[prediction] case class DeleteEntity (t: Long) extends Serializable {
-  def ++ (that: DeleteEntity): DeleteEntity = {
-    if (this.t > that.t) this else that
-  }
-}
-
-private[prediction] case class EventOp (
-  val setProp: Option[SetProp] = None,
-  val unsetProp: Option[UnsetProp] = None,
-  val deleteEntity: Option[DeleteEntity] = None,
-  val firstUpdated: Option[DateTime] = None,
-  val lastUpdated: Option[DateTime] = None
-) extends Serializable {
-
-  def ++ (that: EventOp): EventOp = {
-    val firstUp = (this.firstUpdated ++ that.firstUpdated).reduceOption{
-      (a, b) => if (b.getMillis < a.getMillis) b else a
-    }
-    val lastUp = (this.lastUpdated ++ that.lastUpdated).reduceOption {
-      (a, b) => if (b.getMillis > a.getMillis) b else a
-    }
-
-    EventOp(
-      setProp = (setProp ++ that.setProp).reduceOption(_ ++ _),
-      unsetProp = (unsetProp ++ that.unsetProp).reduceOption(_ ++ _),
-      deleteEntity = (deleteEntity ++ that.deleteEntity).reduceOption(_ ++ _),
-      firstUpdated = firstUp,
-      lastUpdated = lastUp
-    )
-  }
-
-  def toPropertyMap(): Option[PropertyMap] = {
-    setProp.flatMap { set =>
-
-      val unsetKeys: Set[String] = unsetProp.map( unset =>
-        unset.fields.filter{ case (k, v) => (v >= set.fields(k).t) }.keySet
-      ).getOrElse(Set())
-
-      val combinedFields = deleteEntity.map { delete =>
-        if (delete.t >= set.t) {
-          None
-        } else {
-          val deleteKeys: Set[String] = set.fields
-            .filter { case (k, PropTime(kv, t)) =>
-              (delete.t >= t)
-            }.keySet
-          Some(set.fields -- unsetKeys -- deleteKeys)
-        }
-      }.getOrElse{
-        Some(set.fields -- unsetKeys)
-      }
-
-      // Note: mapValues() doesn't return concrete Map and causes
-      // NotSerializableException issue. Use map(identity) to work around this.
-      // see https://issues.scala-lang.org/browse/SI-7005
-      combinedFields.map{ f =>
-        require(firstUpdated.isDefined,
-          "Unexpected Error: firstUpdated cannot be None.")
-        require(lastUpdated.isDefined,
-          "Unexpected Error: lastUpdated cannot be None.")
-        PropertyMap(
-          fields = f.mapValues(_.d).map(identity),
-          firstUpdated = firstUpdated.get,
-          lastUpdated = lastUpdated.get
-        )
-      }
-    }
-  }
-
-}
-
-private[prediction] object EventOp {
-  // create EventOp from Event object
-  def apply(e: Event): EventOp = {
-    val t = e.eventTime.getMillis
-    e.event match {
-      case "$set" => {
-        val fields = e.properties.fields.mapValues(jv =>
-          PropTime(jv, t)
-        ).map(identity)
-
-        EventOp(
-          setProp = Some(SetProp(fields = fields, t = t)),
-          firstUpdated = Some(e.eventTime),
-          lastUpdated = Some(e.eventTime)
-        )
-      }
-      case "$unset" => {
-        val fields = e.properties.fields.mapValues(jv => t).map(identity)
-        EventOp(
-          unsetProp = Some(UnsetProp(fields = fields)),
-          firstUpdated = Some(e.eventTime),
-          lastUpdated = Some(e.eventTime)
-        )
-      }
-      case "$delete" => {
-        EventOp(
-          deleteEntity = Some(DeleteEntity(t)),
-          firstUpdated = Some(e.eventTime),
-          lastUpdated = Some(e.eventTime)
-        )
-      }
-      case _ => {
-        EventOp()
-      }
-    }
-  }
-}
-
-
-private[prediction] object PEventAggregator {
-
-  val eventNames = List("$set", "$unset", "$delete")
-
-  def aggregateProperties(eventsRDD: RDD[Event]): RDD[(String, PropertyMap)] = {
-    eventsRDD
-      .map( e => (e.entityId, EventOp(e) ))
-      .aggregateByKey[EventOp](EventOp())(
-        // within same partition
-        seqOp = { case (u, v) => u ++ v },
-        // across partition
-        combOp = { case (accu, u) => accu ++ u }
-      )
-      .mapValues(_.toPropertyMap)
-      .filter{ case (k, v) => v.isDefined }
-      .map{ case (k, v) => (k, v.get) }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/PEvents.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/PEvents.scala b/data/src/main/scala/io/prediction/data/storage/PEvents.scala
deleted file mode 100644
index 96a11b8..0000000
--- a/data/src/main/scala/io/prediction/data/storage/PEvents.scala
+++ /dev/null
@@ -1,182 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.data.storage
-
-import grizzled.slf4j.Logger
-import io.prediction.annotation.DeveloperApi
-import io.prediction.annotation.Experimental
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-import org.joda.time.DateTime
-
-import scala.reflect.ClassTag
-
-/** :: DeveloperApi ::
-  * Base trait of a data access object that returns [[Event]] related RDD data
-  * structure. Engine developers should use
-  * [[io.prediction.data.store.PEventStore]] instead of using this directly.
-  *
-  * @group Event Data
-  */
-@DeveloperApi
-trait PEvents extends Serializable {
-  @transient protected lazy val logger = Logger[this.type]
-  @deprecated("Use PEventStore.find() instead.", "0.9.2")
-  def getByAppIdAndTimeAndEntity(appId: Int,
-    startTime: Option[DateTime],
-    untilTime: Option[DateTime],
-    entityType: Option[String],
-    entityId: Option[String])(sc: SparkContext): RDD[Event] = {
-      find(
-        appId = appId,
-        startTime = startTime,
-        untilTime = untilTime,
-        entityType = entityType,
-        entityId = entityId,
-        eventNames = None
-      )(sc)
-    }
-
-  /** :: DeveloperApi ::
-    * Read from database and return the events. The deprecation here is intended
-    * to engine developers only.
-    *
-    * @param appId return events of this app ID
-    * @param channelId return events of this channel ID (default channel if it's None)
-    * @param startTime return events with eventTime >= startTime
-    * @param untilTime return events with eventTime < untilTime
-    * @param entityType return events of this entityType
-    * @param entityId return events of this entityId
-    * @param eventNames return events with any of these event names.
-    * @param targetEntityType return events of this targetEntityType:
-    *   - None means no restriction on targetEntityType
-    *   - Some(None) means no targetEntityType for this event
-    *   - Some(Some(x)) means targetEntityType should match x.
-    * @param targetEntityId return events of this targetEntityId
-    *   - None means no restriction on targetEntityId
-    *   - Some(None) means no targetEntityId for this event
-    *   - Some(Some(x)) means targetEntityId should match x.
-    * @param sc Spark context
-    * @return RDD[Event]
-    */
-  @deprecated("Use PEventStore.find() instead.", "0.9.2")
-  @DeveloperApi
-  def find(
-    appId: Int,
-    channelId: Option[Int] = None,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    entityType: Option[String] = None,
-    entityId: Option[String] = None,
-    eventNames: Option[Seq[String]] = None,
-    targetEntityType: Option[Option[String]] = None,
-    targetEntityId: Option[Option[String]] = None)(sc: SparkContext): RDD[Event]
-
-  /** Aggregate properties of entities based on these special events:
-    * \$set, \$unset, \$delete events. The deprecation here is intended to
-    * engine developers only.
-    *
-    * @param appId use events of this app ID
-    * @param channelId use events of this channel ID (default channel if it's None)
-    * @param entityType aggregate properties of the entities of this entityType
-    * @param startTime use events with eventTime >= startTime
-    * @param untilTime use events with eventTime < untilTime
-    * @param required only keep entities with these required properties defined
-    * @param sc Spark context
-    * @return RDD[(String, PropertyMap)] RDD of entityId and PropertyMap pair
-    */
-  @deprecated("Use PEventStore.aggregateProperties() instead.", "0.9.2")
-  def aggregateProperties(
-    appId: Int,
-    channelId: Option[Int] = None,
-    entityType: String,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    required: Option[Seq[String]] = None)
-    (sc: SparkContext): RDD[(String, PropertyMap)] = {
-    val eventRDD = find(
-      appId = appId,
-      channelId = channelId,
-      startTime = startTime,
-      untilTime = untilTime,
-      entityType = Some(entityType),
-      eventNames = Some(PEventAggregator.eventNames))(sc)
-
-    val dmRDD = PEventAggregator.aggregateProperties(eventRDD)
-
-    required map { r =>
-      dmRDD.filter { case (k, v) =>
-        r.map(v.contains(_)).reduce(_ && _)
-      }
-    } getOrElse dmRDD
-  }
-
-  /** :: Experimental ::
-    * Extract EntityMap[A] from events for the entityType
-    * NOTE: it is local EntityMap[A]
-    */
-  @deprecated("Use PEventStore.aggregateProperties() instead.", "0.9.2")
-  @Experimental
-  def extractEntityMap[A: ClassTag](
-    appId: Int,
-    entityType: String,
-    startTime: Option[DateTime] = None,
-    untilTime: Option[DateTime] = None,
-    required: Option[Seq[String]] = None)
-    (sc: SparkContext)(extract: DataMap => A): EntityMap[A] = {
-    val idToData: Map[String, A] = aggregateProperties(
-      appId = appId,
-      entityType = entityType,
-      startTime = startTime,
-      untilTime = untilTime,
-      required = required
-    )(sc).map{ case (id, dm) =>
-      try {
-        (id, extract(dm))
-      } catch {
-        case e: Exception => {
-          logger.error(s"Failed to get extract entity from DataMap $dm of " +
-            s"entityId $id.", e)
-          throw e
-        }
-      }
-    }.collectAsMap.toMap
-
-    new EntityMap(idToData)
-  }
-
-  /** :: DeveloperApi ::
-    * Write events to database
-    *
-    * @param events RDD of Event
-    * @param appId the app ID
-    * @param sc Spark Context
-    */
-  @DeveloperApi
-  def write(events: RDD[Event], appId: Int)(sc: SparkContext): Unit =
-    write(events, appId, None)(sc)
-
-  /** :: DeveloperApi ::
-    * Write events to database
-    *
-    * @param events RDD of Event
-    * @param appId the app ID
-    * @param channelId  channel ID (default channel if it's None)
-    * @param sc Spark Context
-    */
-  @DeveloperApi
-  def write(events: RDD[Event], appId: Int, channelId: Option[Int])(sc: SparkContext): Unit
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/data/src/main/scala/io/prediction/data/storage/PropertyMap.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/io/prediction/data/storage/PropertyMap.scala b/data/src/main/scala/io/prediction/data/storage/PropertyMap.scala
deleted file mode 100644
index bc55fd3..0000000
--- a/data/src/main/scala/io/prediction/data/storage/PropertyMap.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-package io.prediction.data.storage
-
-import org.joda.time.DateTime
-
-import org.json4s.JValue
-import org.json4s.JObject
-import org.json4s.native.JsonMethods.parse
-
-/** A PropertyMap stores aggregated properties of the entity.
-  * Internally it is a Map
-  * whose keys are property names and values are corresponding JSON values
-  * respectively. Use the get() method to retrieve the value of mandatory
-  * property or use getOpt() to retrieve the value of the optional property.
-  *
-  * @param fields Map of property name to JValue
-  * @param firstUpdated first updated time of this PropertyMap
-  * @param lastUpdated last updated time of this PropertyMap
-  */
-class PropertyMap(
-  fields: Map[String, JValue],
-  val firstUpdated: DateTime,
-  val lastUpdated: DateTime
-) extends DataMap(fields) {
-
-  override
-  def toString: String = s"PropertyMap(${fields}, ${firstUpdated}, ${lastUpdated})"
-
-  override
-  def hashCode: Int =
-    41 * (
-      41 * (
-        41 + fields.hashCode
-      ) + firstUpdated.hashCode
-    ) + lastUpdated.hashCode
-
-  override
-  def equals(other: Any): Boolean = other match {
-    case that: PropertyMap => {
-      (that.canEqual(this)) &&
-      (super.equals(that)) &&
-      (this.firstUpdated.equals(that.firstUpdated)) &&
-      (this.lastUpdated.equals(that.lastUpdated))
-    }
-    case that: DataMap => { // for testing purpose
-      super.equals(that)
-    }
-    case _ => false
-  }
-
-  override
-  def canEqual(other: Any): Boolean = other.isInstanceOf[PropertyMap]
-}
-
-/** Companion object of the [[PropertyMap]] class. */
-object PropertyMap {
-
-  /** Create an PropertyMap from a Map of String to JValue,
-    * firstUpdated and lastUpdated time.
-    *
-    * @param fields a Map of String to JValue
-    * @param firstUpdated First updated time
-    * @param lastUpdated Last updated time
-    * @return a new PropertyMap
-    */
-  def apply(fields: Map[String, JValue],
-    firstUpdated: DateTime, lastUpdated: DateTime): PropertyMap =
-    new PropertyMap(fields, firstUpdated, lastUpdated)
-
-  /** Create an PropertyMap from a JSON String and firstUpdated and lastUpdated
-    * time.
-    * @param js JSON String. eg """{ "a": 1, "b": "foo" }"""
-    * @param firstUpdated First updated time
-    * @param lastUpdated Last updated time
-    * @return a new PropertyMap
-    */
-  def apply(js: String, firstUpdated: DateTime, lastUpdated: DateTime)
-  : PropertyMap = apply(
-      fields = parse(js).asInstanceOf[JObject].obj.toMap,
-      firstUpdated = firstUpdated,
-      lastUpdated = lastUpdated
-    )
-}


[21/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/Metric.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/Metric.scala b/core/src/main/scala/org/apache/predictionio/controller/Metric.scala
new file mode 100644
index 0000000..cc27984
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/Metric.scala
@@ -0,0 +1,266 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import _root_.org.apache.predictionio.controller.java.SerializableComparator
+import org.apache.predictionio.core.BaseEngine
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+import org.apache.spark.util.StatCounter
+
+import scala.Numeric.Implicits._
+import scala.reflect._
+
+/** Base class of a [[Metric]].
+  *
+  * @tparam EI Evaluation information
+  * @tparam Q Query
+  * @tparam P Predicted result
+  * @tparam A Actual result
+  * @tparam R Metric result
+  * @group Evaluation
+  */
+abstract class Metric[EI, Q, P, A, R](implicit rOrder: Ordering[R])
+extends Serializable {
+  /** Java friendly constructor
+    *
+    * @param comparator A serializable comparator for sorting the metric results.
+    *
+    */
+  def this(comparator: SerializableComparator[R]) = {
+    this()(Ordering.comparatorToOrdering(comparator))
+  }
+
+  /** Class name of this [[Metric]]. */
+  def header: String = this.getClass.getSimpleName
+
+  /** Calculates the result of this [[Metric]]. */
+  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])]): R
+
+  /** Comparison function for R's ordering. */
+  def compare(r0: R, r1: R): Int = rOrder.compare(r0, r1)
+}
+
+private [prediction] trait StatsMetricHelper[EI, Q, P, A] {
+  def calculate(q: Q, p: P, a: A): Double
+
+  def calculateStats(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
+  : StatCounter = {
+    val doubleRDD = sc.union(
+      evalDataSet.map { case (_, qpaRDD) =>
+        qpaRDD.map { case (q, p, a) => calculate(q, p, a) }
+      }
+    )
+
+    doubleRDD.stats()
+  }
+}
+
+private [prediction] trait StatsOptionMetricHelper[EI, Q, P, A] {
+  def calculate(q: Q, p: P, a: A): Option[Double]
+
+  def calculateStats(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
+  : StatCounter = {
+    val doubleRDD = sc.union(
+      evalDataSet.map { case (_, qpaRDD) =>
+        qpaRDD.flatMap { case (q, p, a) => calculate(q, p, a) }
+      }
+    )
+
+    doubleRDD.stats()
+  }
+}
+
+/** Returns the global average of the score returned by the calculate method.
+  *
+  * @tparam EI Evaluation information
+  * @tparam Q Query
+  * @tparam P Predicted result
+  * @tparam A Actual result
+  *
+  * @group Evaluation
+  */
+abstract class AverageMetric[EI, Q, P, A]
+    extends Metric[EI, Q, P, A, Double]
+    with StatsMetricHelper[EI, Q, P, A]
+    with QPAMetric[Q, P, A, Double] {
+  /** Implement this method to return a score that will be used for averaging
+    * across all QPA tuples.
+    */
+  def calculate(q: Q, p: P, a: A): Double
+
+  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
+  : Double = {
+    calculateStats(sc, evalDataSet).mean
+  }
+}
+
+/** Returns the global average of the non-None score returned by the calculate
+  * method.
+  *
+  * @tparam EI Evaluation information
+  * @tparam Q Query
+  * @tparam P Predicted result
+  * @tparam A Actual result
+  *
+  * @group Evaluation
+  */
+abstract class OptionAverageMetric[EI, Q, P, A]
+    extends Metric[EI, Q, P, A, Double]
+    with StatsOptionMetricHelper[EI, Q, P, A]
+    with QPAMetric[Q, P, A, Option[Double]] {
+  /** Implement this method to return a score that will be used for averaging
+    * across all QPA tuples.
+    */
+  def calculate(q: Q, p: P, a: A): Option[Double]
+
+  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
+  : Double = {
+    calculateStats(sc, evalDataSet).mean
+  }
+}
+
+/** Returns the global standard deviation of the score returned by the calculate method
+  *
+  * This method uses org.apache.spark.util.StatCounter library, a one pass
+  * method is used for calculation
+  *
+  * @tparam EI Evaluation information
+  * @tparam Q Query
+  * @tparam P Predicted result
+  * @tparam A Actual result
+  *
+  * @group Evaluation
+  */
+abstract class StdevMetric[EI, Q, P, A]
+    extends Metric[EI, Q, P, A, Double]
+    with StatsMetricHelper[EI, Q, P, A]
+    with QPAMetric[Q, P, A, Double] {
+  /** Implement this method to return a score that will be used for calculating
+    * the stdev
+    * across all QPA tuples.
+    */
+  def calculate(q: Q, p: P, a: A): Double
+
+  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
+  : Double = {
+    calculateStats(sc, evalDataSet).stdev
+  }
+}
+
+/** Returns the global standard deviation of the non-None score returned by the calculate method
+  *
+  * This method uses org.apache.spark.util.StatCounter library, a one pass
+  * method is used for calculation
+  *
+  * @tparam EI Evaluation information
+  * @tparam Q Query
+  * @tparam P Predicted result
+  * @tparam A Actual result
+  *
+  * @group Evaluation
+  */
+abstract class OptionStdevMetric[EI, Q, P, A]
+    extends Metric[EI, Q, P, A, Double]
+    with StatsOptionMetricHelper[EI, Q, P, A]
+    with QPAMetric[Q, P, A, Option[Double]] {
+  /** Implement this method to return a score that will be used for calculating
+    * the stdev
+    * across all QPA tuples.
+    */
+  def calculate(q: Q, p: P, a: A): Option[Double]
+
+  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
+  : Double = {
+    calculateStats(sc, evalDataSet).stdev
+  }
+}
+
+/** Returns the sum of the score returned by the calculate method.
+  *
+  * @tparam EI Evaluation information
+  * @tparam Q Query
+  * @tparam P Predicted result
+  * @tparam A Actual result
+  * @tparam R Result, output of the function calculate, must be Numeric
+  *
+  * @group Evaluation
+  */
+abstract class SumMetric[EI, Q, P, A, R: ClassTag](implicit num: Numeric[R])
+    extends Metric[EI, Q, P, A, R]()(num)
+    with QPAMetric[Q, P, A, R] {
+  /** Implement this method to return a score that will be used for summing
+    * across all QPA tuples.
+    */
+  def calculate(q: Q, p: P, a: A): R
+
+  def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
+  : R = {
+    val union: RDD[R] = sc.union(
+      evalDataSet.map { case (_, qpaRDD) =>
+        qpaRDD.map { case (q, p, a) => calculate(q, p, a) }
+      }
+    )
+
+    union.aggregate[R](num.zero)(_ + _, _ + _)
+  }
+}
+
+/** Returns zero. Useful as a placeholder during evaluation development when not all components are
+  * implemented.
+  * @tparam EI Evaluation information
+  * @tparam Q Query
+  * @tparam P Predicted result
+  * @tparam A Actual result
+  *
+  * @group Evaluation
+  */
+class ZeroMetric[EI, Q, P, A] extends Metric[EI, Q, P, A, Double]() {
+   def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])]): Double = 0.0
+}
+
+/** Companion object of [[ZeroMetric]]
+  *
+  * @group Evaluation
+  */
+object ZeroMetric {
+  /** Returns a ZeroMetric instance using Engine's type parameters. */
+  def apply[EI, Q, P, A](engine: BaseEngine[EI, Q, P, A]): ZeroMetric[EI, Q, P, A] = {
+    new ZeroMetric[EI, Q, P, A]()
+  }
+}
+
+
+/** Trait for metric which returns a score based on Query, PredictedResult,
+  * and ActualResult
+  *
+  * @tparam Q Query class
+  * @tparam P Predicted result class
+  * @tparam A Actual result class
+  * @tparam R Metric result class
+  * @group Evaluation
+  */
+trait QPAMetric[Q, P, A, R] {
+  /** Calculate a metric result based on query, predicted result, and actual
+    * result
+    *
+    * @param q Query
+    * @param p Predicted result
+    * @param a Actual result
+    * @return Metric result
+    */
+  def calculate(q: Q, p: P, a: A): R
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/MetricEvaluator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/MetricEvaluator.scala b/core/src/main/scala/org/apache/predictionio/controller/MetricEvaluator.scala
new file mode 100644
index 0000000..b707041
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/MetricEvaluator.scala
@@ -0,0 +1,260 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import _root_.java.io.File
+import _root_.java.io.PrintWriter
+
+import com.github.nscala_time.time.Imports.DateTime
+import grizzled.slf4j.Logger
+import org.apache.predictionio.annotation.DeveloperApi
+import org.apache.predictionio.core.BaseEvaluator
+import org.apache.predictionio.core.BaseEvaluatorResult
+import org.apache.predictionio.data.storage.Storage
+import org.apache.predictionio.workflow.JsonExtractor
+import org.apache.predictionio.workflow.JsonExtractorOption.Both
+import org.apache.predictionio.workflow.NameParamsSerializer
+import org.apache.predictionio.workflow.WorkflowParams
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+import org.json4s.native.Serialization.write
+import org.json4s.native.Serialization.writePretty
+
+import scala.language.existentials
+
+/** Case class storing a primary score, and other scores
+  *
+  * @param score Primary metric score
+  * @param otherScores Other scores this metric might have
+  * @tparam R Type of the primary metric score
+  * @group Evaluation
+  */
+case class MetricScores[R](
+  score: R,
+  otherScores: Seq[Any])
+
+/** Contains all results of a [[MetricEvaluator]]
+  *
+  * @param bestScore The best score among all iterations
+  * @param bestEngineParams The set of engine parameters that yielded the best score
+  * @param bestIdx The index of iteration that yielded the best score
+  * @param metricHeader Brief description of the primary metric score
+  * @param otherMetricHeaders Brief descriptions of other metric scores
+  * @param engineParamsScores All sets of engine parameters and corresponding metric scores
+  * @param outputPath An optional output path where scores are saved
+  * @tparam R Type of the primary metric score
+  * @group Evaluation
+  */
+case class MetricEvaluatorResult[R](
+  bestScore: MetricScores[R],
+  bestEngineParams: EngineParams,
+  bestIdx: Int,
+  metricHeader: String,
+  otherMetricHeaders: Seq[String],
+  engineParamsScores: Seq[(EngineParams, MetricScores[R])],
+  outputPath: Option[String])
+extends BaseEvaluatorResult {
+
+  override def toOneLiner(): String = {
+    val idx = engineParamsScores.map(_._1).indexOf(bestEngineParams)
+    s"Best Params Index: $idx Score: ${bestScore.score}"
+  }
+
+  override def toJSON(): String = {
+    implicit lazy val formats = Utils.json4sDefaultFormats +
+      new NameParamsSerializer
+    write(this)
+  }
+
+  override def toHTML(): String = html.metric_evaluator().toString()
+
+  override def toString: String = {
+    implicit lazy val formats = Utils.json4sDefaultFormats +
+      new NameParamsSerializer
+
+    val bestEPStr = JsonExtractor.engineParamstoPrettyJson(Both, bestEngineParams)
+
+    val strings = Seq(
+      "MetricEvaluatorResult:",
+      s"  # engine params evaluated: ${engineParamsScores.size}") ++
+      Seq(
+        "Optimal Engine Params:",
+        s"  $bestEPStr",
+        "Metrics:",
+        s"  $metricHeader: ${bestScore.score}") ++
+      otherMetricHeaders.zip(bestScore.otherScores).map {
+        case (h, s) => s"  $h: $s"
+      } ++
+      outputPath.toSeq.map {
+        p => s"The best variant params can be found in $p"
+      }
+
+    strings.mkString("\n")
+  }
+}
+
+/** Companion object of [[MetricEvaluator]]
+  *
+  * @group Evaluation
+  */
+object MetricEvaluator {
+  def apply[EI, Q, P, A, R](
+    metric: Metric[EI, Q, P, A, R],
+    otherMetrics: Seq[Metric[EI, Q, P, A, _]],
+    outputPath: String): MetricEvaluator[EI, Q, P, A, R] = {
+    new MetricEvaluator[EI, Q, P, A, R](
+      metric,
+      otherMetrics,
+      Some(outputPath))
+  }
+
+  def apply[EI, Q, P, A, R](
+    metric: Metric[EI, Q, P, A, R],
+    otherMetrics: Seq[Metric[EI, Q, P, A, _]])
+  : MetricEvaluator[EI, Q, P, A, R] = {
+    new MetricEvaluator[EI, Q, P, A, R](
+      metric,
+      otherMetrics,
+      None)
+  }
+
+  def apply[EI, Q, P, A, R](metric: Metric[EI, Q, P, A, R])
+  : MetricEvaluator[EI, Q, P, A, R] = {
+    new MetricEvaluator[EI, Q, P, A, R](
+      metric,
+      Seq[Metric[EI, Q, P, A, _]](),
+      None)
+  }
+
+  case class NameParams(name: String, params: Params) {
+    def this(np: (String, Params)) = this(np._1, np._2)
+  }
+
+  case class EngineVariant(
+    id: String,
+    description: String,
+    engineFactory: String,
+    datasource: NameParams,
+    preparator: NameParams,
+    algorithms: Seq[NameParams],
+    serving: NameParams) {
+
+    def this(evaluation: Evaluation, engineParams: EngineParams) = this(
+      id = "",
+      description = "",
+      engineFactory = evaluation.getClass.getName,
+      datasource = new NameParams(engineParams.dataSourceParams),
+      preparator = new NameParams(engineParams.preparatorParams),
+      algorithms = engineParams.algorithmParamsList.map(np => new NameParams(np)),
+      serving = new NameParams(engineParams.servingParams))
+  }
+}
+
+/** :: DeveloperApi ::
+  * Do no use this directly. Use [[MetricEvaluator$]] instead. This is an
+  * implementation of [[org.apache.predictionio.core.BaseEvaluator]] that evaluates
+  * prediction performance based on metric scores.
+  *
+  * @param metric Primary metric
+  * @param otherMetrics Other metrics
+  * @param outputPath Optional output path to save evaluation results
+  * @tparam EI Evaluation information type
+  * @tparam Q Query class
+  * @tparam P Predicted result class
+  * @tparam A Actual result class
+  * @tparam R Metric result class
+  * @group Evaluation
+  */
+@DeveloperApi
+class MetricEvaluator[EI, Q, P, A, R] (
+  val metric: Metric[EI, Q, P, A, R],
+  val otherMetrics: Seq[Metric[EI, Q, P, A, _]],
+  val outputPath: Option[String])
+  extends BaseEvaluator[EI, Q, P, A, MetricEvaluatorResult[R]] {
+  @transient lazy val logger = Logger[this.type]
+  @transient val engineInstances = Storage.getMetaDataEngineInstances()
+
+  def saveEngineJson(
+    evaluation: Evaluation,
+    engineParams: EngineParams,
+    outputPath: String) {
+
+    val now = DateTime.now
+    val evalClassName = evaluation.getClass.getName
+
+    val variant = MetricEvaluator.EngineVariant(
+      id = s"$evalClassName $now",
+      description = "",
+      engineFactory = evalClassName,
+      datasource = new MetricEvaluator.NameParams(engineParams.dataSourceParams),
+      preparator = new MetricEvaluator.NameParams(engineParams.preparatorParams),
+      algorithms = engineParams.algorithmParamsList.map(np => new MetricEvaluator.NameParams(np)),
+      serving = new MetricEvaluator.NameParams(engineParams.servingParams))
+
+    implicit lazy val formats = Utils.json4sDefaultFormats
+
+    logger.info(s"Writing best variant params to disk ($outputPath)...")
+    val writer = new PrintWriter(new File(outputPath))
+    writer.write(writePretty(variant))
+    writer.close()
+  }
+
+  def evaluateBase(
+    sc: SparkContext,
+    evaluation: Evaluation,
+    engineEvalDataSet: Seq[(EngineParams, Seq[(EI, RDD[(Q, P, A)])])],
+    params: WorkflowParams): MetricEvaluatorResult[R] = {
+
+    val evalResultList: Seq[(EngineParams, MetricScores[R])] = engineEvalDataSet
+    .zipWithIndex
+    .par
+    .map { case ((engineParams, evalDataSet), idx) =>
+      val metricScores = MetricScores[R](
+        metric.calculate(sc, evalDataSet),
+        otherMetrics.map(_.calculate(sc, evalDataSet)))
+      (engineParams, metricScores)
+    }
+    .seq
+
+    implicit lazy val formats = Utils.json4sDefaultFormats +
+      new NameParamsSerializer
+
+    evalResultList.zipWithIndex.foreach { case ((ep, r), idx) =>
+      logger.info(s"Iteration $idx")
+      logger.info(s"EngineParams: ${JsonExtractor.engineParamsToJson(Both, ep)}")
+      logger.info(s"Result: $r")
+    }
+
+    // use max. take implicit from Metric.
+    val ((bestEngineParams, bestScore), bestIdx) = evalResultList
+    .zipWithIndex
+    .reduce { (x, y) =>
+      if (metric.compare(x._1._2.score, y._1._2.score) >= 0) x else y
+    }
+
+    // save engine params if it is set.
+    outputPath.foreach { path => saveEngineJson(evaluation, bestEngineParams, path) }
+
+    MetricEvaluatorResult(
+      bestScore = bestScore,
+      bestEngineParams = bestEngineParams,
+      bestIdx = bestIdx,
+      metricHeader = metric.header,
+      otherMetricHeaders = otherMetrics.map(_.header),
+      engineParamsScores = evalResultList,
+      outputPath = outputPath)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/P2LAlgorithm.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/P2LAlgorithm.scala b/core/src/main/scala/org/apache/predictionio/controller/P2LAlgorithm.scala
new file mode 100644
index 0000000..cb9f7c4
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/P2LAlgorithm.scala
@@ -0,0 +1,121 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import _root_.org.apache.predictionio.annotation.DeveloperApi
+import org.apache.predictionio.core.BaseAlgorithm
+import org.apache.predictionio.workflow.PersistentModelManifest
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+
+import scala.reflect._
+
+/** Base class of a parallel-to-local algorithm.
+  *
+  * A parallel-to-local algorithm can be run in parallel on a cluster and
+  * produces a model that can fit within a single machine.
+  *
+  * If your input query class requires custom JSON4S serialization, the most
+  * idiomatic way is to implement a trait that extends [[CustomQuerySerializer]],
+  * and mix that into your algorithm class, instead of overriding
+  * [[querySerializer]] directly.
+  *
+  * @tparam PD Prepared data class.
+  * @tparam M Trained model class.
+  * @tparam Q Input query class.
+  * @tparam P Output prediction class.
+  * @group Algorithm
+  */
+abstract class P2LAlgorithm[PD, M: ClassTag, Q: ClassTag, P]
+  extends BaseAlgorithm[PD, M, Q, P] {
+
+  def trainBase(sc: SparkContext, pd: PD): M = train(sc, pd)
+
+  /** Implement this method to produce a model from prepared data.
+    *
+    * @param pd Prepared data for model training.
+    * @return Trained model.
+    */
+  def train(sc: SparkContext, pd: PD): M
+
+  def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
+  : RDD[(Long, P)] = batchPredict(bm.asInstanceOf[M], qs)
+
+  /** This is a default implementation to perform batch prediction. Override
+    * this method for a custom implementation.
+    *
+    * @param m A model
+    * @param qs An RDD of index-query tuples. The index is used to keep track of
+    *           predicted results with corresponding queries.
+    * @return Batch of predicted results
+    */
+  def batchPredict(m: M, qs: RDD[(Long, Q)]): RDD[(Long, P)] = {
+    qs.mapValues { q => predict(m, q) }
+  }
+
+  def predictBase(bm: Any, q: Q): P = predict(bm.asInstanceOf[M], q)
+
+  /** Implement this method to produce a prediction from a query and trained
+    * model.
+    *
+    * @param model Trained model produced by [[train]].
+    * @param query An input query.
+    * @return A prediction.
+    */
+  def predict(model: M, query: Q): P
+
+  /** :: DeveloperApi ::
+    * Engine developers should not use this directly (read on to see how
+    * parallel-to-local algorithm models are persisted).
+    *
+    * Parallel-to-local algorithms produce local models. By default, models will be
+    * serialized and stored automatically. Engine developers can override this behavior by
+    * mixing the [[PersistentModel]] trait into the model class, and
+    * PredictionIO will call [[PersistentModel.save]] instead. If it returns
+    * true, a [[org.apache.predictionio.workflow.PersistentModelManifest]] will be
+    * returned so that during deployment, PredictionIO will use
+    * [[PersistentModelLoader]] to retrieve the model. Otherwise, Unit will be
+    * returned and the model will be re-trained on-the-fly.
+    *
+    * @param sc Spark context
+    * @param modelId Model ID
+    * @param algoParams Algorithm parameters that trained this model
+    * @param bm Model
+    * @return The model itself for automatic persistence, an instance of
+    *         [[org.apache.predictionio.workflow.PersistentModelManifest]] for manual
+    *         persistence, or Unit for re-training on deployment
+    */
+  @DeveloperApi
+  override
+  def makePersistentModel(
+    sc: SparkContext,
+    modelId: String,
+    algoParams: Params,
+    bm: Any): Any = {
+    val m = bm.asInstanceOf[M]
+    if (m.isInstanceOf[PersistentModel[_]]) {
+      if (m.asInstanceOf[PersistentModel[Params]].save(
+        modelId, algoParams, sc)) {
+        PersistentModelManifest(className = m.getClass.getName)
+      } else {
+        Unit
+      }
+    } else {
+      m
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/PAlgorithm.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/PAlgorithm.scala b/core/src/main/scala/org/apache/predictionio/controller/PAlgorithm.scala
new file mode 100644
index 0000000..d2123f3
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/PAlgorithm.scala
@@ -0,0 +1,126 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.annotation.DeveloperApi
+import org.apache.predictionio.core.BaseAlgorithm
+import org.apache.predictionio.workflow.PersistentModelManifest
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+/** Base class of a parallel algorithm.
+  *
+  * A parallel algorithm can be run in parallel on a cluster and produces a
+  * model that can also be distributed across a cluster.
+  *
+  * If your input query class requires custom JSON4S serialization, the most
+  * idiomatic way is to implement a trait that extends [[CustomQuerySerializer]],
+  * and mix that into your algorithm class, instead of overriding
+  * [[querySerializer]] directly.
+  *
+  * To provide evaluation feature, one must override and implement the
+  * [[batchPredict]] method. Otherwise, an exception will be thrown when pio eval`
+  * is used.
+  *
+  * @tparam PD Prepared data class.
+  * @tparam M Trained model class.
+  * @tparam Q Input query class.
+  * @tparam P Output prediction class.
+  * @group Algorithm
+  */
+abstract class PAlgorithm[PD, M, Q, P]
+  extends BaseAlgorithm[PD, M, Q, P] {
+
+  def trainBase(sc: SparkContext, pd: PD): M = train(sc, pd)
+
+  /** Implement this method to produce a model from prepared data.
+    *
+    * @param pd Prepared data for model training.
+    * @return Trained model.
+    */
+  def train(sc: SparkContext, pd: PD): M
+
+  def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
+  : RDD[(Long, P)] = batchPredict(bm.asInstanceOf[M], qs)
+
+  /** To provide evaluation feature, one must override and implement this method
+    * to generate many predictions in batch. Otherwise, an exception will be
+    * thrown when `pio eval` is used.
+    *
+    * The default implementation throws an exception.
+    *
+    * @param m Trained model produced by [[train]].
+    * @param qs An RDD of index-query tuples. The index is used to keep track of
+    *           predicted results with corresponding queries.
+    */
+  def batchPredict(m: M, qs: RDD[(Long, Q)]): RDD[(Long, P)] =
+    throw new NotImplementedError("batchPredict not implemented")
+
+  def predictBase(baseModel: Any, query: Q): P = {
+    predict(baseModel.asInstanceOf[M], query)
+  }
+
+  /** Implement this method to produce a prediction from a query and trained
+    * model.
+    *
+    * @param model Trained model produced by [[train]].
+    * @param query An input query.
+    * @return A prediction.
+    */
+  def predict(model: M, query: Q): P
+
+  /** :: DeveloperApi ::
+    * Engine developers should not use this directly (read on to see how parallel
+    * algorithm models are persisted).
+    *
+    * In general, parallel models may contain multiple RDDs. It is not easy to
+    * infer and persist them programmatically since these RDDs may be
+    * potentially huge. To persist these models, engine developers need to  mix
+    * the [[PersistentModel]] trait into the model class and implement
+    * [[PersistentModel.save]]. If it returns true, a
+    * [[org.apache.predictionio.workflow.PersistentModelManifest]] will be
+    * returned so that during deployment, PredictionIO will use
+    * [[PersistentModelLoader]] to retrieve the model. Otherwise, Unit will be
+    * returned and the model will be re-trained on-the-fly.
+    *
+    * @param sc Spark context
+    * @param modelId Model ID
+    * @param algoParams Algorithm parameters that trained this model
+    * @param bm Model
+    * @return The model itself for automatic persistence, an instance of
+    *         [[org.apache.predictionio.workflow.PersistentModelManifest]] for manual
+    *         persistence, or Unit for re-training on deployment
+    */
+  @DeveloperApi
+  override
+  def makePersistentModel(
+    sc: SparkContext,
+    modelId: String,
+    algoParams: Params,
+    bm: Any): Any = {
+    val m = bm.asInstanceOf[M]
+    if (m.isInstanceOf[PersistentModel[_]]) {
+      if (m.asInstanceOf[PersistentModel[Params]].save(
+        modelId, algoParams, sc)) {
+        PersistentModelManifest(className = m.getClass.getName)
+      } else {
+        Unit
+      }
+    } else {
+      Unit
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/PDataSource.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/PDataSource.scala b/core/src/main/scala/org/apache/predictionio/controller/PDataSource.scala
new file mode 100644
index 0000000..e595dca
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/PDataSource.scala
@@ -0,0 +1,57 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.core.BaseDataSource
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+/** Base class of a parallel data source.
+  *
+  * A parallel data source runs locally within a single machine, or in parallel
+  * on a cluster, to return data that is distributed across a cluster.
+  *
+  * @tparam TD Training data class.
+  * @tparam EI Evaluation Info class.
+  * @tparam Q Input query class.
+  * @tparam A Actual value class.
+  * @group Data Source
+  */
+
+abstract class PDataSource[TD, EI, Q, A]
+  extends BaseDataSource[TD, EI, Q, A] {
+
+  def readTrainingBase(sc: SparkContext): TD = readTraining(sc)
+
+  /** Implement this method to only return training data from a data source */
+  def readTraining(sc: SparkContext): TD
+
+  def readEvalBase(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] = readEval(sc)
+
+  /** To provide evaluation feature for your engine, your must override this
+    * method to return data for evaluation from a data source. Returned data can
+    * optionally include a sequence of query and actual value pairs for
+    * evaluation purpose.
+    *
+    * The default implementation returns an empty sequence as a stub, so that
+    * an engine can be compiled without implementing evaluation.
+    */
+  def readEval(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] =
+    Seq[(TD, EI, RDD[(Q, A)])]()
+
+  @deprecated("Use readEval() instead.", "0.9.0")
+  def read(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])] = readEval(sc)
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/PPreparator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/PPreparator.scala b/core/src/main/scala/org/apache/predictionio/controller/PPreparator.scala
new file mode 100644
index 0000000..66f51e4
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/PPreparator.scala
@@ -0,0 +1,44 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.core.BasePreparator
+import org.apache.spark.SparkContext
+
+/** Base class of a parallel preparator.
+  *
+  * A parallel preparator can be run in parallel on a cluster and produces a
+  * prepared data that is distributed across a cluster.
+  *
+  * @tparam TD Training data class.
+  * @tparam PD Prepared data class.
+  * @group Preparator
+  */
+abstract class PPreparator[TD, PD]
+  extends BasePreparator[TD, PD] {
+
+  def prepareBase(sc: SparkContext, td: TD): PD = {
+    prepare(sc, td)
+  }
+
+  /** Implement this method to produce prepared data that is ready for model
+    * training.
+    *
+    * @param sc An Apache Spark context.
+    * @param trainingData Training data to be prepared.
+    */
+  def prepare(sc: SparkContext, trainingData: TD): PD
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/Params.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/Params.scala b/core/src/main/scala/org/apache/predictionio/controller/Params.scala
new file mode 100644
index 0000000..bdb3f7e
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/Params.scala
@@ -0,0 +1,31 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+/** Base trait for all kinds of parameters that will be passed to constructors
+  * of different controller classes.
+  *
+  * @group Helper
+  */
+trait Params extends Serializable {}
+
+/** A concrete implementation of [[Params]] representing empty parameters.
+  *
+  * @group Helper
+  */
+case class EmptyParams() extends Params {
+  override def toString(): String = "Empty"
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/PersistentModel.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/PersistentModel.scala b/core/src/main/scala/org/apache/predictionio/controller/PersistentModel.scala
new file mode 100644
index 0000000..fb8d57b
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/PersistentModel.scala
@@ -0,0 +1,112 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.spark.SparkContext
+
+/** Mix in and implement this trait if your model cannot be persisted by
+  * PredictionIO automatically. A companion object extending
+  * IPersistentModelLoader is required for PredictionIO to load the persisted
+  * model automatically during deployment.
+  *
+  * Notice that models generated by [[PAlgorithm]] cannot be persisted
+  * automatically by nature and must implement these traits if model persistence
+  * is desired.
+  *
+  * {{{
+  * class MyModel extends PersistentModel[MyParams] {
+  *   def save(id: String, params: MyParams, sc: SparkContext): Boolean = {
+  *     ...
+  *   }
+  * }
+  *
+  * object MyModel extends PersistentModelLoader[MyParams, MyModel] {
+  *   def apply(id: String, params: MyParams, sc: Option[SparkContext]): MyModel = {
+  *     ...
+  *   }
+  * }
+  * }}}
+  *
+  * In Java, all you need to do is to implement this interface, and add a static
+  * method with 3 arguments of type String, [[Params]], and SparkContext.
+  *
+  * {{{
+  * public class MyModel implements PersistentModel<MyParams>, Serializable {
+  *   ...
+  *   public boolean save(String id, MyParams params, SparkContext sc) {
+  *     ...
+  *   }
+  *
+  *   public static MyModel load(String id, Params params, SparkContext sc) {
+  *     ...
+  *   }
+  *   ...
+  * }
+  * }}}
+  *
+  * @tparam AP Algorithm parameters class.
+  * @see [[PersistentModelLoader]]
+  * @group Algorithm
+  */
+trait PersistentModel[AP <: Params] {
+  /** Save the model to some persistent storage.
+    *
+    * This method should return true if the model has been saved successfully so
+    * that PredictionIO knows that it can be restored later during deployment.
+    * This method should return false if the model cannot be saved (or should
+    * not be saved due to configuration) so that PredictionIO will re-train the
+    * model during deployment. All arguments of this method are provided by
+    * automatically by PredictionIO.
+    *
+    * @param id ID of the run that trained this model.
+    * @param params Algorithm parameters that were used to train this model.
+    * @param sc An Apache Spark context.
+    */
+  def save(id: String, params: AP, sc: SparkContext): Boolean
+}
+
+/** Implement an object that extends this trait for PredictionIO to support
+  * loading a persisted model during serving deployment.
+  *
+  * @tparam AP Algorithm parameters class.
+  * @tparam M Model class.
+  * @see [[PersistentModel]]
+  * @group Algorithm
+  */
+trait PersistentModelLoader[AP <: Params, M] {
+  /** Implement this method to restore a persisted model that extends the
+    * [[PersistentModel]] trait. All arguments of this method are provided
+    * automatically by PredictionIO.
+    *
+    * @param id ID of the run that trained this model.
+    * @param params Algorithm parameters that were used to train this model.
+    * @param sc An optional Apache Spark context. This will be injected if the
+    *           model was generated by a [[PAlgorithm]].
+    */
+  def apply(id: String, params: AP, sc: Option[SparkContext]): M
+}
+
+/** DEPRECATED. Use [[PersistentModel]] instead.
+  *
+  * @group Algorithm */
+@deprecated("Use PersistentModel instead.", "0.9.2")
+trait IPersistentModel[AP <: Params] extends PersistentModel[AP]
+
+/** DEPRECATED. Use [[PersistentModelLoader]] instead.
+  *
+  * @group Algorithm */
+@deprecated("Use PersistentModelLoader instead.", "0.9.2")
+trait IPersistentModelLoader[AP <: Params, M] extends PersistentModelLoader[AP, M]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/SanityCheck.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/SanityCheck.scala b/core/src/main/scala/org/apache/predictionio/controller/SanityCheck.scala
new file mode 100644
index 0000000..8449a71
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/SanityCheck.scala
@@ -0,0 +1,30 @@
+/** Copyright 2015 TappingStone, Inc.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.predictionio.controller
+
+/** Extends a data class with this trait if you want PredictionIO to
+  * automatically perform sanity check on your data classes during training.
+  * This is very useful when you need to debug your engine.
+  *
+  * @group Helper
+  */
+trait SanityCheck {
+  /** Implement this method to perform checks on your data. This method should
+    * contain assertions that throw exceptions when your data does not meet
+    * your pre-defined requirement.
+    */
+  def sanityCheck(): Unit
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/Utils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/Utils.scala b/core/src/main/scala/org/apache/predictionio/controller/Utils.scala
new file mode 100644
index 0000000..e74fe4b
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/Utils.scala
@@ -0,0 +1,69 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller
+
+import org.apache.predictionio.workflow.KryoInstantiator
+
+import org.json4s._
+import org.json4s.ext.JodaTimeSerializers
+
+import scala.io.Source
+
+import _root_.java.io.File
+import _root_.java.io.FileOutputStream
+
+/** Controller utilities.
+  *
+  * @group Helper
+  */
+object Utils {
+  /** Default JSON4S serializers for PredictionIO controllers. */
+  val json4sDefaultFormats = DefaultFormats.lossless ++ JodaTimeSerializers.all
+
+  /** Save a model object as a file to a temporary location on local filesystem.
+    * It will first try to use the location indicated by the environmental
+    * variable PIO_FS_TMPDIR, then fall back to the java.io.tmpdir property.
+    *
+    * @param id Used as the filename of the file.
+    * @param model Model object.
+    */
+  def save(id: String, model: Any): Unit = {
+    val tmpdir = sys.env.getOrElse("PIO_FS_TMPDIR", System.getProperty("java.io.tmpdir"))
+    val modelFile = tmpdir + File.separator + id
+    (new File(tmpdir)).mkdirs
+    val fos = new FileOutputStream(modelFile)
+    val kryo = KryoInstantiator.newKryoInjection
+    fos.write(kryo(model))
+    fos.close
+  }
+
+  /** Load a model object from a file in a temporary location on local
+    * filesystem. It will first try to use the location indicated by the
+    * environmental variable PIO_FS_TMPDIR, then fall back to the java.io.tmpdir
+    * property.
+    *
+    * @param id Used as the filename of the file.
+    */
+  def load(id: String): Any = {
+    val tmpdir = sys.env.getOrElse("PIO_FS_TMPDIR", System.getProperty("java.io.tmpdir"))
+    val modelFile = tmpdir + File.separator + id
+    val src = Source.fromFile(modelFile)(scala.io.Codec.ISO8859)
+    val kryo = KryoInstantiator.newKryoInjection
+    val m = kryo.invert(src.map(_.toByte).toArray).get
+    src.close
+    m
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/java/JavaEngineParamsGenerator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/java/JavaEngineParamsGenerator.scala b/core/src/main/scala/org/apache/predictionio/controller/java/JavaEngineParamsGenerator.scala
new file mode 100644
index 0000000..6ab5382
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/java/JavaEngineParamsGenerator.scala
@@ -0,0 +1,39 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller.java
+
+import org.apache.predictionio.controller.EngineParams
+import org.apache.predictionio.controller.EngineParamsGenerator
+
+import scala.collection.JavaConversions.asScalaBuffer
+
+/** Define an engine parameter generator in Java
+  *
+  * Implementations of this abstract class can be supplied to "pio eval" as the second
+  * command line argument.
+  *
+  * @group Evaluation
+  */
+abstract class JavaEngineParamsGenerator extends EngineParamsGenerator {
+
+  /** Set the list of [[EngineParams]].
+    *
+    * @param engineParams A list of engine params
+    */
+  def setEngineParamsList(engineParams: java.util.List[_ <: EngineParams]) {
+    engineParamsList = asScalaBuffer(engineParams)
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/java/JavaEvaluation.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/java/JavaEvaluation.scala b/core/src/main/scala/org/apache/predictionio/controller/java/JavaEvaluation.scala
new file mode 100644
index 0000000..7c9c984
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/java/JavaEvaluation.scala
@@ -0,0 +1,66 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller.java
+
+import org.apache.predictionio.controller.Evaluation
+import org.apache.predictionio.controller.Metric
+import org.apache.predictionio.core.BaseEngine
+
+import scala.collection.JavaConversions.asScalaBuffer
+
+/** Define an evaluation in Java.
+  *
+  * Implementations of this abstract class can be supplied to "pio eval" as the first
+  * argument.
+  *
+  * @group Evaluation
+  */
+
+abstract class JavaEvaluation extends Evaluation {
+  /** Set the [[BaseEngine]] and [[Metric]] for this [[Evaluation]]
+    *
+    * @param baseEngine [[BaseEngine]] for this [[JavaEvaluation]]
+    * @param metric [[Metric]] for this [[JavaEvaluation]]
+    * @tparam EI Evaluation information class
+    * @tparam Q Query class
+    * @tparam P Predicted result class
+    * @tparam A Actual result class
+    */
+  def setEngineMetric[EI, Q, P, A](
+    baseEngine: BaseEngine[EI, Q, P, A],
+    metric: Metric[EI, Q, P, A, _]) {
+
+    engineMetric = (baseEngine, metric)
+  }
+
+  /** Set the [[BaseEngine]] and [[Metric]]s for this [[JavaEvaluation]]
+    *
+    * @param baseEngine [[BaseEngine]] for this [[JavaEvaluation]]
+    * @param metric [[Metric]] for this [[JavaEvaluation]]
+    * @param metrics Other [[Metric]]s for this [[JavaEvaluation]]
+    * @tparam EI Evaluation information class
+    * @tparam Q Query class
+    * @tparam P Predicted result class
+    * @tparam A Actual result class
+    */
+  def setEngineMetrics[EI, Q, P, A](
+    baseEngine: BaseEngine[EI, Q, P, A],
+    metric: Metric[EI, Q, P, A, _],
+    metrics: java.util.List[_ <: Metric[EI, Q, P, A, _]]) {
+
+    engineMetrics = (baseEngine, metric, asScalaBuffer(metrics))
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/java/LJavaAlgorithm.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/java/LJavaAlgorithm.scala b/core/src/main/scala/org/apache/predictionio/controller/java/LJavaAlgorithm.scala
new file mode 100644
index 0000000..41cbbff
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/java/LJavaAlgorithm.scala
@@ -0,0 +1,31 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller.java
+
+import org.apache.predictionio.controller.LAlgorithm
+
+import scala.reflect.ClassTag
+
+/** Base class of a Java local algorithm. Refer to [[LAlgorithm]] for documentation.
+  *
+  * @tparam PD Prepared data class.
+  * @tparam M Trained model class.
+  * @tparam Q Input query class.
+  * @tparam P Output prediction class.
+  * @group Algorithm
+  */
+abstract class LJavaAlgorithm[PD, M, Q, P]
+  extends LAlgorithm[PD, M, Q, P]()(ClassTag.AnyRef.asInstanceOf[ClassTag[M]])

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/java/LJavaDataSource.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/java/LJavaDataSource.scala b/core/src/main/scala/org/apache/predictionio/controller/java/LJavaDataSource.scala
new file mode 100644
index 0000000..aca2ce6
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/java/LJavaDataSource.scala
@@ -0,0 +1,31 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller.java
+
+import org.apache.predictionio.controller.LDataSource
+
+import scala.reflect.ClassTag
+
+/** Base class of a Java local data source. Refer to [[LDataSource]] for documentation.
+  *
+  * @tparam TD Training data class.
+  * @tparam EI Evaluation Info class.
+  * @tparam Q Input query class.
+  * @tparam A Actual value class.
+  * @group Data Source
+  */
+abstract class LJavaDataSource[TD, EI, Q, A]
+  extends LDataSource[TD, EI, Q, A]()(ClassTag.AnyRef.asInstanceOf[ClassTag[TD]])

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/java/LJavaPreparator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/java/LJavaPreparator.scala b/core/src/main/scala/org/apache/predictionio/controller/java/LJavaPreparator.scala
new file mode 100644
index 0000000..8def08b
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/java/LJavaPreparator.scala
@@ -0,0 +1,29 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller.java
+
+import org.apache.predictionio.controller.LPreparator
+
+import scala.reflect.ClassTag
+
+/** Base class of a Java local preparator. Refer to [[LPreparator]] for documentation.
+  *
+  * @tparam TD Training data class.
+  * @tparam PD Prepared data class.
+  * @group Preparator
+  */
+abstract class LJavaPreparator[TD, PD]
+  extends LPreparator[TD, PD]()(ClassTag.AnyRef.asInstanceOf[ClassTag[PD]])

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/java/LJavaServing.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/java/LJavaServing.scala b/core/src/main/scala/org/apache/predictionio/controller/java/LJavaServing.scala
new file mode 100644
index 0000000..ee380c3
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/java/LJavaServing.scala
@@ -0,0 +1,26 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller.java
+
+import org.apache.predictionio.controller.LServing
+
+/** Base class of Java local serving. Refer to [[LServing]] for documentation.
+  *
+  * @tparam Q Input query class.
+  * @tparam P Output prediction class.
+  * @group Serving
+  */
+abstract class LJavaServing[Q, P] extends LServing[Q, P]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/java/P2LJavaAlgorithm.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/java/P2LJavaAlgorithm.scala b/core/src/main/scala/org/apache/predictionio/controller/java/P2LJavaAlgorithm.scala
new file mode 100644
index 0000000..f41903d
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/java/P2LJavaAlgorithm.scala
@@ -0,0 +1,33 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller.java
+
+import org.apache.predictionio.controller.P2LAlgorithm
+
+import scala.reflect.ClassTag
+
+/** Base class of a Java parallel-to-local algorithm. Refer to [[P2LAlgorithm]] for documentation.
+  *
+  * @tparam PD Prepared data class.
+  * @tparam M Trained model class.
+  * @tparam Q Input query class.
+  * @tparam P Output prediction class.
+  * @group Algorithm
+  */
+abstract class P2LJavaAlgorithm[PD, M, Q, P]
+  extends P2LAlgorithm[PD, M, Q, P]()(
+    ClassTag.AnyRef.asInstanceOf[ClassTag[M]],
+    ClassTag.AnyRef.asInstanceOf[ClassTag[Q]])

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/java/PJavaAlgorithm.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/java/PJavaAlgorithm.scala b/core/src/main/scala/org/apache/predictionio/controller/java/PJavaAlgorithm.scala
new file mode 100644
index 0000000..38eaa70
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/java/PJavaAlgorithm.scala
@@ -0,0 +1,28 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller.java
+
+import org.apache.predictionio.controller.PAlgorithm
+
+/** Base class of a Java parallel algorithm. Refer to [[PAlgorithm]] for documentation.
+  *
+  * @tparam PD Prepared data class.
+  * @tparam M Trained model class.
+  * @tparam Q Input query class.
+  * @tparam P Output prediction class.
+  * @group Algorithm
+  */
+abstract class PJavaAlgorithm[PD, M, Q, P] extends PAlgorithm[PD, M, Q, P]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/java/PJavaDataSource.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/java/PJavaDataSource.scala b/core/src/main/scala/org/apache/predictionio/controller/java/PJavaDataSource.scala
new file mode 100644
index 0000000..cb04da6
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/java/PJavaDataSource.scala
@@ -0,0 +1,28 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller.java
+
+import org.apache.predictionio.controller.PDataSource
+
+/** Base class of a Java parallel data source. Refer to [[PDataSource]] for documentation.
+  *
+  * @tparam TD Training data class.
+  * @tparam EI Evaluation Info class.
+  * @tparam Q Input query class.
+  * @tparam A Actual value class.
+  * @group Data Source
+  */
+abstract class PJavaDataSource[TD, EI, Q, A] extends PDataSource[TD, EI, Q, A]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/java/PJavaPreparator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/java/PJavaPreparator.scala b/core/src/main/scala/org/apache/predictionio/controller/java/PJavaPreparator.scala
new file mode 100644
index 0000000..5fb953f
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/java/PJavaPreparator.scala
@@ -0,0 +1,26 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller.java
+
+import org.apache.predictionio.controller.PPreparator
+
+/** Base class of a Java parallel preparator. Refer to [[PPreparator]] for documentation
+  *
+  * @tparam TD Training data class.
+  * @tparam PD Prepared data class.
+  * @group Preparator
+  */
+abstract class PJavaPreparator[TD, PD] extends PPreparator[TD, PD]

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/java/SerializableComparator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/java/SerializableComparator.scala b/core/src/main/scala/org/apache/predictionio/controller/java/SerializableComparator.scala
new file mode 100644
index 0000000..970cd6c
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/java/SerializableComparator.scala
@@ -0,0 +1,20 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.controller.java
+
+import java.util.Comparator
+
+trait SerializableComparator[T] extends Comparator[T] with java.io.Serializable

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/controller/package.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/controller/package.scala b/core/src/main/scala/org/apache/predictionio/controller/package.scala
new file mode 100644
index 0000000..35b1e81
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/controller/package.scala
@@ -0,0 +1,168 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio
+
+/** Provides building blocks for writing a complete prediction engine
+  * consisting of DataSource, Preparator, Algorithm, Serving, and Evaluation.
+  *
+  * == Start Building an Engine ==
+  * The starting point of a prediction engine is the [[Engine]] class.
+  *
+  * == The DASE Paradigm ==
+  * The building blocks together form the DASE paradigm. Learn more about DASE
+  * [[http://docs.prediction.io/customize/ here]].
+  *
+  * == Types of Building Blocks ==
+  * Depending on the problem you are solving, you would need to pick appropriate
+  * flavors of building blocks.
+  *
+  * === Engines ===
+  * There are 3 typical engine configurations:
+  *
+  *  1. [[PDataSource]], [[PPreparator]], [[P2LAlgorithm]], [[LServing]]
+  *  2. [[PDataSource]], [[PPreparator]], [[PAlgorithm]], [[LServing]]
+  *  3. [[LDataSource]], [[LPreparator]], [[LAlgorithm]], [[LServing]]
+  *
+  * In both configurations 1 and 2, data is sourced and prepared in a
+  * parallelized fashion, with data type as RDD.
+  *
+  * The difference between configurations 1 and 2 come at the algorithm stage.
+  * In configuration 1, the algorithm operates on potentially large data as RDDs
+  * in the Spark cluster, and eventually outputs a model that is small enough to
+  * fit in a single machine.
+  *
+  * On the other hand, configuration 2 outputs a model that is potentially too
+  * large to fit in a single machine, and must reside in the Spark cluster as
+  * RDD(s).
+  *
+  * With configuration 1 ([[P2LAlgorithm]]), PredictionIO will automatically
+  * try to persist the model to local disk or HDFS if the model is serializable.
+  *
+  * With configuration 2 ([[PAlgorithm]]), PredictionIO will not automatically
+  * try to persist the model, unless the model implements the [[PersistentModel]]
+  * trait.
+  *
+  * In special circumstances where both the data and the model are small,
+  * configuration 3 may be used. Beware that RDDs cannot be used with
+  * configuration 3.
+  *
+  * === Data Source ===
+  * [[PDataSource]] is probably the most used data source base class with the
+  * ability to process RDD-based data. [[LDataSource]] '''cannot''' handle
+  * RDD-based data. Use only when you have a special requirement.
+  *
+  * === Preparator ===
+  * With [[PDataSource]], you must pick [[PPreparator]]. The same applies to
+  * [[LDataSource]] and [[LPreparator]].
+  *
+  * === Algorithm ===
+  * The workhorse of the engine comes in 3 different flavors.
+  *
+  * ==== P2LAlgorithm ====
+  * Produces a model that is small enough to fit in a single machine from
+  * [[PDataSource]] and [[PPreparator]]. The model '''cannot''' contain any RDD.
+  * If the produced model is serializable, PredictionIO will try to
+  * automatically persist it. In addition, P2LAlgorithm.batchPredict is
+  * already implemented for [[Evaluation]] purpose.
+  *
+  * ==== PAlgorithm ====
+  * Produces a model that could contain RDDs from [[PDataSource]] and
+  * [[PPreparator]]. PredictionIO will not try to persist it automatically
+  * unless the model implements [[PersistentModel]]. [[PAlgorithm.batchPredict]]
+  * must be implemented for [[Evaluation]].
+  *
+  * ==== LAlgorithm ====
+  * Produces a model that is small enough to fit in a single machine from
+  * [[LDataSource]] and [[LPreparator]]. The model '''cannot''' contain any RDD.
+  * If the produced model is serializable, PredictionIO will try to
+  * automatically persist it. In addition, LAlgorithm.batchPredict is
+  * already implemented for [[Evaluation]] purpose.
+  *
+  * === Serving ===
+  * The serving component comes with only 1 flavor--[[LServing]]. At the serving
+  * stage, it is assumed that the result being served is already at a human-
+  * consumable size.
+  *
+  * == Model Persistence ==
+  * PredictionIO tries its best to persist trained models automatically. Please
+  * refer to [[LAlgorithm.makePersistentModel]],
+  * [[P2LAlgorithm.makePersistentModel]], and [[PAlgorithm.makePersistentModel]]
+  * for descriptions on different strategies.
+  */
+package object controller {
+
+  /** Base class of several helper types that represent emptiness
+    *
+    * @group Helper
+    */
+  class SerializableClass() extends Serializable
+
+  /** Empty data source parameters.
+    * @group Helper
+    */
+  type EmptyDataSourceParams = EmptyParams
+
+  /** Empty data parameters.
+    * @group Helper
+    */
+  type EmptyDataParams = EmptyParams
+
+  /** Empty evaluation info.
+    * @group Helper
+    */
+  type EmptyEvaluationInfo = SerializableClass
+
+  /** Empty preparator parameters.
+    * @group Helper
+    */
+  type EmptyPreparatorParams = EmptyParams
+
+  /** Empty algorithm parameters.
+    * @group Helper
+    */
+  type EmptyAlgorithmParams = EmptyParams
+
+  /** Empty serving parameters.
+    * @group Helper
+    */
+  type EmptyServingParams = EmptyParams
+
+  /** Empty metrics parameters.
+    * @group Helper
+    */
+  type EmptyMetricsParams = EmptyParams
+
+  /** Empty training data.
+    * @group Helper
+    */
+  type EmptyTrainingData = SerializableClass
+
+  /** Empty prepared data.
+    * @group Helper
+    */
+  type EmptyPreparedData = SerializableClass
+
+  /** Empty model.
+    * @group Helper
+    */
+  type EmptyModel = SerializableClass
+
+  /** Empty actual result.
+    * @group Helper
+    */
+  type EmptyActualResult = SerializableClass
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/core/AbstractDoer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/core/AbstractDoer.scala b/core/src/main/scala/org/apache/predictionio/core/AbstractDoer.scala
new file mode 100644
index 0000000..5f90d99
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/core/AbstractDoer.scala
@@ -0,0 +1,66 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.core
+
+import grizzled.slf4j.Logging
+import org.apache.predictionio.annotation.DeveloperApi
+import org.apache.predictionio.controller.Params
+
+/** :: DeveloperApi ::
+  * Base class for all controllers
+  */
+@DeveloperApi
+abstract class AbstractDoer extends Serializable
+
+/** :: DeveloperApi ::
+  * Provides facility to instantiate controller classes
+  */
+@DeveloperApi
+object Doer extends Logging {
+  /** :: DeveloperApi ::
+    * Instantiates a controller class using supplied controller parameters as
+    * constructor parameters
+    *
+    * @param cls Class of the controller class
+    * @param params Parameters of the controller class
+    * @tparam C Controller class
+    * @return An instance of the controller class
+    */
+  @DeveloperApi
+  def apply[C <: AbstractDoer] (
+    cls: Class[_ <: C], params: Params): C = {
+
+    // Subclasses only allows two kind of constructors.
+    // 1. Constructor with P <: Params.
+    // 2. Emtpy constructor.
+    // First try (1), if failed, try (2).
+    try {
+      val constr = cls.getConstructor(params.getClass)
+      constr.newInstance(params)
+    } catch {
+      case e: NoSuchMethodException => try {
+        val zeroConstr = cls.getConstructor()
+        zeroConstr.newInstance()
+      } catch {
+        case e: NoSuchMethodException =>
+          error(s"${params.getClass.getName} was used as the constructor " +
+            s"argument to ${e.getMessage}, but no constructor can handle it. " +
+            "Aborting.")
+          sys.exit(1)
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/core/BaseAlgorithm.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/core/BaseAlgorithm.scala b/core/src/main/scala/org/apache/predictionio/core/BaseAlgorithm.scala
new file mode 100644
index 0000000..7774861
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/core/BaseAlgorithm.scala
@@ -0,0 +1,123 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.core
+
+import com.google.gson.TypeAdapterFactory
+import org.apache.predictionio.annotation.DeveloperApi
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.Utils
+import net.jodah.typetools.TypeResolver
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+/** :: DeveloperApi ::
+  * Base trait with default custom query serializer, exposed to engine developer
+  * via [[org.apache.predictionio.controller.CustomQuerySerializer]]
+  */
+@DeveloperApi
+trait BaseQuerySerializer {
+  /** :: DeveloperApi ::
+    * Serializer for Scala query classes using
+    * [[org.apache.predictionio.controller.Utils.json4sDefaultFormats]]
+    */
+  @DeveloperApi
+  @transient lazy val querySerializer = Utils.json4sDefaultFormats
+
+  /** :: DeveloperApi ::
+    * Serializer for Java query classes using Gson
+    */
+  @DeveloperApi
+  @transient lazy val gsonTypeAdapterFactories = Seq.empty[TypeAdapterFactory]
+}
+
+/** :: DeveloperApi ::
+  * Base class of all algorithm controllers
+  *
+  * @tparam PD Prepared data class
+  * @tparam M Model class
+  * @tparam Q Query class
+  * @tparam P Predicted result class
+  */
+@DeveloperApi
+abstract class BaseAlgorithm[PD, M, Q, P]
+  extends AbstractDoer with BaseQuerySerializer {
+  /** :: DeveloperApi ::
+    * Engine developers should not use this directly. This is called by workflow
+    * to train a model.
+    *
+    * @param sc Spark context
+    * @param pd Prepared data
+    * @return Trained model
+    */
+  @DeveloperApi
+  def trainBase(sc: SparkContext, pd: PD): M
+
+  /** :: DeveloperApi ::
+    * Engine developers should not use this directly. This is called by
+    * evaluation workflow to perform batch prediction.
+    *
+    * @param sc Spark context
+    * @param bm Model
+    * @param qs Batch of queries
+    * @return Batch of predicted results
+    */
+  @DeveloperApi
+  def batchPredictBase(sc: SparkContext, bm: Any, qs: RDD[(Long, Q)])
+  : RDD[(Long, P)]
+
+  /** :: DeveloperApi ::
+    * Engine developers should not use this directly. Called by serving to
+    * perform a single prediction.
+    *
+    * @param bm Model
+    * @param q Query
+    * @return Predicted result
+    */
+  @DeveloperApi
+  def predictBase(bm: Any, q: Q): P
+
+  /** :: DeveloperApi ::
+    * Engine developers should not use this directly. Prepare a model for
+    * persistence in the downstream consumer. PredictionIO supports 3 types of
+    * model persistence: automatic persistence, manual persistence, and
+    * re-training on deployment. This method provides a way for downstream
+    * modules to determine which mode the model should be persisted.
+    *
+    * @param sc Spark context
+    * @param modelId Model ID
+    * @param algoParams Algorithm parameters that trained this model
+    * @param bm Model
+    * @return The model itself for automatic persistence, an instance of
+    *         [[org.apache.predictionio.workflow.PersistentModelManifest]] for manual
+    *         persistence, or Unit for re-training on deployment
+    */
+  @DeveloperApi
+  def makePersistentModel(
+    sc: SparkContext,
+    modelId: String,
+    algoParams: Params,
+    bm: Any): Any = Unit
+
+  /** :: DeveloperApi ::
+    * Obtains the type signature of query for this algorithm
+    *
+    * @return Type signature of query
+    */
+  def queryClass: Class[Q] = {
+    val types = TypeResolver.resolveRawArguments(classOf[BaseAlgorithm[PD, M, Q, P]], getClass)
+    types(2).asInstanceOf[Class[Q]]
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/core/BaseDataSource.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/core/BaseDataSource.scala b/core/src/main/scala/org/apache/predictionio/core/BaseDataSource.scala
new file mode 100644
index 0000000..96e2548
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/core/BaseDataSource.scala
@@ -0,0 +1,52 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.core
+
+import org.apache.predictionio.annotation.DeveloperApi
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+/** :: DeveloperApi ::
+  * Base class of all data source controllers
+  *
+  * @tparam TD Training data class
+  * @tparam EI Evaluation information class
+  * @tparam Q Query class
+  * @tparam A Actual result class
+  */
+@DeveloperApi
+abstract class BaseDataSource[TD, EI, Q, A] extends AbstractDoer {
+  /** :: DeveloperApi ::
+    * Engine developer should not use this directly. This is called by workflow
+    * to read training data.
+    *
+    * @param sc Spark context
+    * @return Training data
+    */
+  @DeveloperApi
+  def readTrainingBase(sc: SparkContext): TD
+
+  /** :: DeveloperApi ::
+    * Engine developer should not use this directly. This is called by
+    * evaluation workflow to read training and validation data.
+    *
+    * @param sc Spark context
+    * @return Sets of training data, evaluation information, queries, and actual
+    *         results
+    */
+  @DeveloperApi
+  def readEvalBase(sc: SparkContext): Seq[(TD, EI, RDD[(Q, A)])]
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/core/BaseEngine.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/core/BaseEngine.scala b/core/src/main/scala/org/apache/predictionio/core/BaseEngine.scala
new file mode 100644
index 0000000..6dfce7d
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/core/BaseEngine.scala
@@ -0,0 +1,100 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.core
+
+import org.apache.predictionio.annotation.DeveloperApi
+import org.apache.predictionio.controller.EngineParams
+import org.apache.predictionio.workflow.JsonExtractorOption.JsonExtractorOption
+import org.apache.predictionio.workflow.WorkflowParams
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+import org.json4s.JValue
+
+/** :: DeveloperApi ::
+  * Base class of all engine controller classes
+  *
+  * @tparam EI Evaluation information class
+  * @tparam Q Query class
+  * @tparam P Predicted result class
+  * @tparam A Actual result class
+  */
+@DeveloperApi
+abstract class BaseEngine[EI, Q, P, A] extends Serializable {
+  /** :: DeveloperApi ::
+    * Implement this method so that training this engine would return a list of
+    * models.
+    *
+    * @param sc An instance of SparkContext.
+    * @param engineParams An instance of [[EngineParams]] for running a single training.
+    * @param params An instance of [[WorkflowParams]] that controls the workflow.
+    * @return A list of models.
+    */
+  @DeveloperApi
+  def train(
+    sc: SparkContext,
+    engineParams: EngineParams,
+    engineInstanceId: String,
+    params: WorkflowParams): Seq[Any]
+
+  /** :: DeveloperApi ::
+    * Implement this method so that [[org.apache.predictionio.controller.Evaluation]] can
+    * use this method to generate inputs for [[org.apache.predictionio.controller.Metric]].
+    *
+    * @param sc An instance of SparkContext.
+    * @param engineParams An instance of [[EngineParams]] for running a single evaluation.
+    * @param params An instance of [[WorkflowParams]] that controls the workflow.
+    * @return A list of evaluation information and RDD of query, predicted
+    *         result, and actual result tuple tuple.
+    */
+  @DeveloperApi
+  def eval(
+    sc: SparkContext,
+    engineParams: EngineParams,
+    params: WorkflowParams): Seq[(EI, RDD[(Q, P, A)])]
+
+  /** :: DeveloperApi ::
+    * Override this method to further optimize the process that runs multiple
+    * evaluations (during tuning, for example). By default, this method calls
+    * [[eval]] for each element in the engine parameters list.
+    *
+    * @param sc An instance of SparkContext.
+    * @param engineParamsList A list of [[EngineParams]] for running batch evaluation.
+    * @param params An instance of [[WorkflowParams]] that controls the workflow.
+    * @return A list of engine parameters and evaluation result (from [[eval]]) tuples.
+    */
+  @DeveloperApi
+  def batchEval(
+    sc: SparkContext,
+    engineParamsList: Seq[EngineParams],
+    params: WorkflowParams)
+  : Seq[(EngineParams, Seq[(EI, RDD[(Q, P, A)])])] = {
+    engineParamsList.map { engineParams =>
+      (engineParams, eval(sc, engineParams, params))
+    }
+  }
+
+  /** :: DeveloperApi ::
+    * Implement this method to convert a JValue (read from an engine variant
+    * JSON file) to an instance of [[EngineParams]].
+    *
+    * @param variantJson Content of the engine variant JSON as JValue.
+    * @param jsonExtractor Content of the engine variant JSON as JValue.
+    * @return An instance of [[EngineParams]] converted from JSON.
+    */
+  @DeveloperApi
+  def jValueToEngineParams(variantJson: JValue, jsonExtractor: JsonExtractorOption): EngineParams =
+    throw new NotImplementedError("JSON to EngineParams is not implemented.")
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/core/BaseEvaluator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/core/BaseEvaluator.scala b/core/src/main/scala/org/apache/predictionio/core/BaseEvaluator.scala
new file mode 100644
index 0000000..71a086d
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/core/BaseEvaluator.scala
@@ -0,0 +1,72 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.core
+
+import org.apache.predictionio.annotation.DeveloperApi
+import org.apache.predictionio.annotation.Experimental
+import org.apache.predictionio.controller.EngineParams
+import org.apache.predictionio.controller.Evaluation
+import org.apache.predictionio.workflow.WorkflowParams
+import org.apache.spark.SparkContext
+import org.apache.spark.rdd.RDD
+
+/** :: DeveloperApi ::
+  * Base class of all evaluator controller classes
+  *
+  * @tparam EI Evaluation information class
+  * @tparam Q Query class
+  * @tparam P Predicted result class
+  * @tparam A Actual result class
+  * @tparam ER Evaluation result class
+  */
+@DeveloperApi
+abstract class BaseEvaluator[EI, Q, P, A, ER <: BaseEvaluatorResult]
+  extends AbstractDoer {
+  /** :: DeveloperApi ::
+    * Engine developers should not use this directly. This is called by
+    * evaluation workflow to perform evaluation.
+    *
+    * @param sc Spark context
+    * @param evaluation Evaluation to run
+    * @param engineEvalDataSet Sets of engine parameters and data for evaluation
+    * @param params Evaluation workflow parameters
+    * @return Evaluation result
+    */
+  @DeveloperApi
+  def evaluateBase(
+    sc: SparkContext,
+    evaluation: Evaluation,
+    engineEvalDataSet: Seq[(EngineParams, Seq[(EI, RDD[(Q, P, A)])])],
+    params: WorkflowParams): ER
+}
+
+/** Base trait of evaluator result */
+trait BaseEvaluatorResult extends Serializable {
+  /** A short description of the result */
+  def toOneLiner(): String = ""
+
+  /** HTML portion of the rendered evaluator results */
+  def toHTML(): String = ""
+
+  /** JSON portion of the rendered evaluator results */
+  def toJSON(): String = ""
+
+  /** :: Experimental ::
+    * Indicate if this result is inserted into database
+    */
+  @Experimental
+  val noSave: Boolean = false
+}


[30/34] incubator-predictionio git commit: change all to org.apache.predictionio except docs

Posted by do...@apache.org.
change all to org.apache.predictionio except docs


Project: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/commit/6d160b6b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/tree/6d160b6b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/diff/6d160b6b

Branch: refs/heads/develop
Commit: 6d160b6b5a3bf2870427cf8e27d00ece026055e2
Parents: 4f03388
Author: Xusen Yin <yi...@gmail.com>
Authored: Thu Jul 7 16:41:51 2016 -0700
Committer: Xusen Yin <yi...@gmail.com>
Committed: Thu Jul 7 16:41:51 2016 -0700

----------------------------------------------------------------------
 bin/pio                                         |  2 +-
 bin/pio-daemon                                  |  2 +-
 build.sbt                                       | 44 ++++++++++----------
 .../authentication/KeyAuthentication.scala      |  2 +-
 .../configuration/SSLConfiguration.scala        |  6 +--
 conf/server.conf                                |  6 +--
 .../predictionio/workflow/index.scala.html      |  6 +--
 data/README.md                                  | 18 ++++----
 .../data/storage/hbase/upgrade/Upgrade.scala    | 10 ++---
 .../storage/hbase/upgrade/Upgrade_0_8_3.scala   | 14 +++----
 data/src/test/resources/application.conf        |  2 +-
 .../data/api/SegmentIOAuthSpec.scala            |  4 +-
 .../java-local-helloworld/MyAlgorithm.java      |  2 +-
 .../java-local-helloworld/MyDataSource.java     |  2 +-
 .../java-local-helloworld/MyEngineFactory.java  |  2 +-
 .../java-local-helloworld/build.sbt             |  2 +-
 .../java-local-regression/README.md             |  2 +-
 .../java-local-regression/build.sbt             |  2 +-
 .../java-local-regression/engine.json           |  2 +-
 .../src/main/java/DataSource.java               |  4 +-
 .../src/main/java/DataSourceParams.java         |  4 +-
 .../src/main/java/DefaultAlgorithm.java         |  4 +-
 .../src/main/java/DefaultAlgorithmParams.java   |  4 +-
 .../src/main/java/EngineFactory.java            |  8 ++--
 .../src/main/java/MeanSquareEvaluator.java      |  6 +--
 .../src/main/java/OLSAlgorithm.java             |  6 +--
 .../src/main/java/Preparator.java               |  4 +-
 .../src/main/java/PreparatorParams.java         |  4 +-
 .../src/main/java/Run.java                      | 22 +++++-----
 .../src/main/java/Serving.java                  |  6 +--
 .../src/main/java/TrainingData.java             |  2 +-
 .../src/main/java/examples/manifest.json        |  4 +-
 .../experimental/java-local-tutorial/build.sbt  |  6 +--
 .../recommendations/tutorial1/AlgoParams.java   |  4 +-
 .../recommendations/tutorial1/Algorithm.java    |  4 +-
 .../recommendations/tutorial1/DataSource.java   |  4 +-
 .../tutorial1/DataSourceParams.java             |  4 +-
 .../tutorial1/EngineFactory.java                |  8 ++--
 .../java/recommendations/tutorial1/Model.java   |  2 +-
 .../java/recommendations/tutorial1/Query.java   |  2 +-
 .../recommendations/tutorial1/TrainingData.java |  2 +-
 .../java/recommendations/tutorial1/engine.json  |  4 +-
 .../java/recommendations/tutorial2/Runner1.java | 26 ++++++------
 .../java/recommendations/tutorial2/Runner2.java | 36 ++++++++--------
 .../recommendations/tutorial3/DataSource.java   | 10 ++---
 .../tutorial3/EngineFactory.java                | 14 +++----
 .../recommendations/tutorial3/Evaluator.java    |  8 ++--
 .../java/recommendations/tutorial3/Runner3.java | 22 +++++-----
 .../CollaborativeFilteringAlgorithm.java        |  4 +-
 .../CollaborativeFilteringAlgorithmParams.java  |  4 +-
 .../tutorial4/CollaborativeFilteringModel.java  |  2 +-
 .../recommendations/tutorial4/DataSource.java   |  6 +--
 .../tutorial4/DataSourceParams.java             |  4 +-
 .../tutorial4/EngineFactory.java                | 10 ++---
 .../tutorial4/FeatureBasedAlgorithm.java        |  4 +-
 .../tutorial4/FeatureBasedAlgorithmParams.java  |  4 +-
 .../tutorial4/FeatureBasedModel.java            |  2 +-
 .../recommendations/tutorial4/Preparator.java   |  6 +--
 .../recommendations/tutorial4/PreparedData.java |  2 +-
 .../java/recommendations/tutorial4/Query.java   |  2 +-
 .../recommendations/tutorial4/Runner4a.java     | 20 ++++-----
 .../recommendations/tutorial4/Runner4b.java     | 20 ++++-----
 .../recommendations/tutorial4/Runner4c.java     | 24 +++++------
 .../recommendations/tutorial4/Runner4d.java     | 24 +++++------
 .../java/recommendations/tutorial4/Serving.java |  6 +--
 .../tutorial4/SingleEngineFactory.java          | 12 +++---
 .../recommendations/tutorial4/TrainingData.java |  2 +-
 .../tutorial4/multiple-algo-engine.json         |  4 +-
 .../tutorial4/single-algo-engine.json           |  4 +-
 .../tutorial5/EngineFactory.java                | 14 +++----
 .../tutorial5/MahoutAlgoModel.java              |  2 +-
 .../tutorial5/MahoutAlgoParams.java             |  4 +-
 .../tutorial5/MahoutAlgorithm.java              | 12 +++---
 .../java/recommendations/tutorial5/Runner5.java | 22 +++++-----
 .../recommendations/tutorial5/manifest.json     |  4 +-
 .../java-parallel-helloworld/build.sbt          |  2 +-
 .../java-parallel-helloworld/engine.json        |  2 +-
 .../src/main/java/parallel/Algorithm.java       |  6 +--
 .../src/main/java/parallel/DataSource.java      |  6 +--
 .../src/main/java/parallel/EngineFactory.java   | 10 ++---
 .../src/main/java/parallel/Model.java           |  2 +-
 .../src/main/java/parallel/Preparator.java      |  6 +--
 .../src/main/java/parallel/Query.java           |  2 +-
 .../src/main/java/parallel/Runner.java          | 20 ++++-----
 .../src/main/java/parallel/Serving.java         |  6 +--
 .../src/main/java/parallel/build.sbt            |  6 +--
 .../experimental/scala-cleanup-app/build.sbt    |  4 +-
 .../experimental/scala-cleanup-app/engine.json  |  2 +-
 .../src/main/scala/Algorithm.scala              |  6 +--
 .../src/main/scala/DataSource.scala             | 16 +++----
 .../src/main/scala/Engine.scala                 |  8 ++--
 .../src/main/scala/Preparator.scala             |  6 +--
 .../src/main/scala/Serving.scala                |  6 +--
 .../scala-local-friend-recommendation/build.sbt |  6 +--
 .../keyword_similarity_engine.json              |  4 +-
 .../random_engine.json                          |  4 +-
 .../scala/FriendRecommendationAlgoParams.scala  |  4 +-
 .../scala/FriendRecommendationDataSource.scala  |  4 +-
 .../FriendRecommendationDataSourceParams.scala  |  4 +-
 .../scala/FriendRecommendationPrediction.scala  |  2 +-
 .../main/scala/FriendRecommendationQuery.scala  |  2 +-
 .../FriendRecommendationTrainingData.scala      |  2 +-
 .../main/scala/KeywordSimilarityAlgorithm.scala |  4 +-
 .../scala/KeywordSimilarityEngineFactory.scala  |  4 +-
 .../src/main/scala/KeywordSimilarityModel.scala |  2 +-
 .../src/main/scala/RandomAlgorithm.scala        |  4 +-
 .../src/main/scala/RandomEngineFactory.scala    |  4 +-
 .../src/main/scala/RandomModel.scala            |  2 +-
 .../scala-local-helloworld/HelloWorld.scala     |  2 +-
 .../scala-local-helloworld/build.sbt            |  4 +-
 .../scala-local-movielens-evaluation/build.sbt  |  4 +-
 .../src/main/scala/Evaluation.scala             | 28 ++++++-------
 .../src/main/scala/ItemRecEvaluation.scala      | 28 ++++++-------
 .../scala-local-movielens-filtering/build.sbt   |  4 +-
 .../src/main/scala/Engine.scala                 | 10 ++---
 .../src/main/scala/Filtering.scala              |  8 ++--
 .../scala-local-regression/README.md            |  4 +-
 .../scala-local-regression/Run.scala            | 30 ++++++-------
 .../scala-local-regression/build.sbt            |  4 +-
 .../scala-local-regression/engine.json          |  2 +-
 .../README.md                                   |  2 +-
 .../build.sbt                                   |  4 +-
 .../engine-forest.json                          |  2 +-
 .../engine.json                                 |  2 +-
 .../src/main/scala/DataSource.scala             | 14 +++----
 .../src/main/scala/DeltaSimRankRDD.scala        |  2 +-
 .../src/main/scala/Engine.scala                 |  6 +--
 .../src/main/scala/Preparator.scala             | 10 ++---
 .../src/main/scala/Sampling.scala               |  2 +-
 .../src/main/scala/Serving.scala                |  6 +--
 .../src/main/scala/SimRankAlgorithm.scala       |  6 +--
 .../scala-parallel-recommendation-cat/build.sbt |  4 +-
 .../src/main/scala/ALSAlgorithm.scala           | 10 ++---
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../src/main/scala/Engine.scala                 |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../src/main/scala/Serving.scala                |  2 +-
 .../build.sbt                                   |  4 +-
 .../src/main/scala/ALSAlgorithm.scala           |  6 +--
 .../src/main/scala/ALSModel.scala               |  6 +--
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../src/main/scala/Engine.scala                 |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../src/main/scala/Serving.scala                |  2 +-
 .../build.sbt                                   |  4 +-
 .../src/main/scala/ALSAlgorithm.scala           |  4 +-
 .../src/main/scala/ALSModel.scala               |  6 +--
 .../src/main/scala/DataSource.scala             | 14 +++----
 .../src/main/scala/Engine.scala                 |  4 +-
 .../src/main/scala/Preparator.scala             |  4 +-
 .../src/main/scala/Serving.scala                |  2 +-
 .../build.sbt                                   |  4 +-
 .../src/main/scala/ALSAlgorithm.scala           |  6 +--
 .../src/main/scala/ALSModel.scala               |  6 +--
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../src/main/scala/Engine.scala                 |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../src/main/scala/Serving.scala                |  2 +-
 .../scala-parallel-regression/Run.scala         | 24 +++++------
 .../scala-parallel-regression/build.sbt         |  2 +-
 .../scala-parallel-regression/engine.json       |  2 +-
 .../build.sbt                                   |  4 +-
 .../src/main/scala/DIMSUMAlgorithm.scala        | 10 ++---
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../src/main/scala/Engine.scala                 |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../src/main/scala/Serving.scala                |  2 +-
 .../build.sbt                                   |  4 +-
 .../src/main/scala/ALSAlgorithm.scala           | 10 ++---
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../src/main/scala/Engine.scala                 |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../src/main/scala/Serving.scala                |  2 +-
 .../scala-parallel-trim-app/build.sbt           |  4 +-
 .../scala-parallel-trim-app/engine.json         |  2 +-
 .../src/main/scala/Algorithm.scala              |  6 +--
 .../src/main/scala/DataSource.scala             | 16 +++----
 .../src/main/scala/Engine.scala                 |  8 ++--
 .../src/main/scala/Preparator.scala             |  6 +--
 .../src/main/scala/Serving.scala                |  6 +--
 .../scala-recommendations/build.sbt             |  4 +-
 .../src/main/scala/Run.scala                    | 24 +++++------
 .../experimental/scala-refactor-test/build.sbt  |  6 +--
 .../src/main/scala/Algorithm.scala              |  4 +-
 .../src/main/scala/DataSource.scala             | 14 +++----
 .../src/main/scala/Engine.scala                 |  8 ++--
 .../src/main/scala/Evaluator.scala              |  6 +--
 .../src/main/scala/Preparator.scala             |  4 +-
 .../src/main/scala/Serving.scala                |  2 +-
 examples/experimental/scala-stock/README.md     | 10 ++---
 examples/experimental/scala-stock/build.sbt     |  6 +--
 .../scala-stock/src/main/scala/Algorithm.scala  |  6 +--
 .../src/main/scala/BackTestingMetrics.scala     | 10 ++---
 .../scala-stock/src/main/scala/Data.scala       |  2 +-
 .../scala-stock/src/main/scala/DataSource.scala | 10 ++---
 .../scala-stock/src/main/scala/Indicators.scala |  2 +-
 .../src/main/scala/RegressionStrategy.scala     |  4 +-
 .../scala-stock/src/main/scala/Run.scala        | 16 +++----
 .../src/main/scala/YahooDataSource.scala        | 14 +++----
 .../examples/stock/backtesting.scala.html       |  2 +-
 .../add-algorithm/build.sbt                     |  4 +-
 .../add-algorithm/project/pio-build.sbt         |  2 +-
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../add-algorithm/src/main/scala/Engine.scala   |  4 +-
 .../src/main/scala/NaiveBayesAlgorithm.scala    |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../src/main/scala/RandomForestAlgorithm.scala  |  4 +-
 .../add-algorithm/src/main/scala/Serving.scala  |  2 +-
 .../custom-attributes/build.sbt                 |  6 +--
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../src/main/scala/Engine.scala                 |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../src/main/scala/RandomForestAlgorithm.scala  |  4 +-
 .../src/main/scala/Serving.scala                |  2 +-
 .../train-with-rate-event/build.sbt             |  4 +-
 .../train-with-rate-event/project/pio-build.sbt |  2 +-
 .../src/main/scala/ALSAlgorithm.scala           | 10 ++---
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../src/main/scala/Engine.scala                 |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../src/main/scala/Serving.scala                |  2 +-
 .../weighted-items/build.sbt                    |  4 +-
 .../weighted-items/project/pio-build.sbt        |  2 +-
 .../src/main/scala/ALSAlgorithm.scala           | 10 ++---
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../weighted-items/src/main/scala/Engine.scala  |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../weighted-items/src/main/scala/Serving.scala |  2 +-
 .../custom-prepartor/build.sbt                  |  4 +-
 .../custom-prepartor/project/pio-build.sbt      |  2 +-
 .../src/main/scala/ALSAlgorithm.scala           |  6 +--
 .../src/main/scala/ALSModel.scala               |  6 +--
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../src/main/scala/Engine.scala                 |  4 +-
 .../src/main/scala/Preparator.scala             |  4 +-
 .../src/main/scala/Serving.scala                |  2 +-
 .../custom-query/build.sbt                      |  4 +-
 .../custom-query/data/build.sbt                 |  2 +-
 .../recommendation/ImportDataScript.scala       |  2 +-
 .../src/main/scala/ALSAlgorithm.scala           |  6 +--
 .../custom-query/src/main/scala/ALSModel.scala  |  6 +--
 .../src/main/scala/DataSource.scala             | 10 ++---
 .../custom-query/src/main/scala/Engine.scala    |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../custom-query/src/main/scala/Serving.scala   |  2 +-
 .../custom-serving/build.sbt                    |  4 +-
 .../custom-serving/project/pio-build.sbt        |  2 +-
 .../src/main/scala/ALSAlgorithm.scala           |  6 +--
 .../src/main/scala/ALSModel.scala               |  6 +--
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../custom-serving/src/main/scala/Engine.scala  |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../custom-serving/src/main/scala/Serving.scala |  4 +-
 .../filter-by-category/build.sbt                |  6 +--
 .../filter-by-category/project/pio-build.sbt    |  2 +-
 .../src/main/scala/ALSAlgorithm.scala           |  6 +--
 .../src/main/scala/ALSModel.scala               |  6 +--
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../src/main/scala/Engine.scala                 |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../src/main/scala/Serving.scala                |  2 +-
 .../add-and-return-item-properties/build.sbt    |  6 +--
 .../project/pio-build.sbt                       |  2 +-
 .../src/main/scala/ALSAlgorithm.scala           |  6 +--
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../src/main/scala/Engine.scala                 |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../src/main/scala/Serving.scala                |  2 +-
 .../add-rateevent/build.sbt                     |  4 +-
 .../add-rateevent/project/pio-build.sbt         |  2 +-
 .../src/main/scala/ALSAlgorithm.scala           |  6 +--
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../add-rateevent/src/main/scala/Engine.scala   |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../add-rateevent/src/main/scala/Serving.scala  |  2 +-
 .../filterbyyear/build.sbt                      |  6 +--
 .../src/main/scala/ALSAlgorithm.scala           |  6 +--
 .../src/main/scala/DataSource.scala             | 12 +++---
 .../filterbyyear/src/main/scala/Engine.scala    |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../filterbyyear/src/main/scala/Serving.scala   |  2 +-
 .../multi/build.sbt                             |  4 +-
 .../multi/project/pio-build.sbt                 |  2 +-
 .../multi/src/main/scala/ALSAlgorithm.scala     | 10 ++---
 .../multi/src/main/scala/DataSource.scala       | 12 +++---
 .../multi/src/main/scala/Engine.scala           |  4 +-
 .../multi/src/main/scala/LikeAlgorithm.scala    |  2 +-
 .../multi/src/main/scala/Preparator.scala       |  2 +-
 .../multi/src/main/scala/Serving.scala          |  2 +-
 .../no-set-user/build.sbt                       |  4 +-
 .../no-set-user/project/pio-build.sbt           |  2 +-
 .../src/main/scala/ALSAlgorithm.scala           |  6 +--
 .../no-set-user/src/main/scala/DataSource.scala | 12 +++---
 .../no-set-user/src/main/scala/Engine.scala     |  4 +-
 .../no-set-user/src/main/scala/Preparator.scala |  2 +-
 .../no-set-user/src/main/scala/Serving.scala    |  2 +-
 .../recommended-user/build.sbt                  |  4 +-
 .../recommended-user/project/pio-build.sbt      |  2 +-
 .../src/main/scala/ALSAlgorithm.scala           |  4 +-
 .../src/main/scala/DataSource.scala             |  4 +-
 .../src/main/scala/Engine.scala                 |  4 +-
 .../src/main/scala/Preparator.scala             |  2 +-
 .../src/main/scala/Serving.scala                |  2 +-
 .../apache/predictionio/tools/admin/README.md   |  4 +-
 .../tools/templates/scala/buildSbt.scala.txt    |  2 +-
 305 files changed, 936 insertions(+), 936 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/bin/pio
----------------------------------------------------------------------
diff --git a/bin/pio b/bin/pio
index f2ca006..4a030c0 100755
--- a/bin/pio
+++ b/bin/pio
@@ -39,4 +39,4 @@ FIRST_HALF="${@:1:$FIRST_SEP}"
 
 SECOND_HALF="${@:$FIRST_SEP+1}"
 
-exec ${PIO_HOME}/bin/pio-class io.prediction.tools.console.Console ${FIRST_HALF} --pio-home ${PIO_HOME} ${SECOND_HALF}
+exec ${PIO_HOME}/bin/pio-class org.apache.predictionio.tools.console.Console ${FIRST_HALF} --pio-home ${PIO_HOME} ${SECOND_HALF}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/bin/pio-daemon
----------------------------------------------------------------------
diff --git a/bin/pio-daemon b/bin/pio-daemon
index b63ddb6..18aaa60 100755
--- a/bin/pio-daemon
+++ b/bin/pio-daemon
@@ -43,6 +43,6 @@ FIRST_HALF="${@:1:$FIRST_SEP}"
 
 SECOND_HALF="${@:$FIRST_SEP+1}"
 
-exec nohup ${PIO_HOME}/bin/pio-class io.prediction.tools.console.Console ${FIRST_HALF} --pio-home ${PIO_HOME} ${SECOND_HALF} <&- > /dev/null 2>&1 &
+exec nohup ${PIO_HOME}/bin/pio-class org.apache.predictionio.tools.console.Console ${FIRST_HALF} --pio-home ${PIO_HOME} ${SECOND_HALF} <&- > /dev/null 2>&1 &
 
 echo $! > ${PIDFILE}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/build.sbt
----------------------------------------------------------------------
diff --git a/build.sbt b/build.sbt
index 74daaad..9342be0 100644
--- a/build.sbt
+++ b/build.sbt
@@ -18,7 +18,7 @@ name := "pio"
 
 version in ThisBuild := "0.9.7-SNAPSHOT"
 
-organization in ThisBuild := "io.prediction"
+organization in ThisBuild := "org.apache.predictionio"
 
 scalaVersion in ThisBuild := "2.10.5"
 
@@ -45,7 +45,7 @@ lazy val pioBuildInfoSettings = buildInfoSettings ++ Seq(
     scalaVersion,
     sbtVersion,
     sparkVersion),
-  buildInfoPackage := "io.prediction.core")
+  buildInfoPackage := "org.apache.predictionio.core")
 
 lazy val conf = file(".") / "conf"
 
@@ -97,12 +97,12 @@ scalacOptions in (ScalaUnidoc, unidoc) ++= Seq(
     "akka",
     "breeze",
     "html",
-    "io.prediction.annotation",
-    "io.prediction.controller.html",
-    "io.prediction.data.api",
-    "io.prediction.data.view",
-    "io.prediction.workflow",
-    "io.prediction.tools",
+    "org.apache.predictionio.annotation",
+    "org.apache.predictionio.controller.html",
+    "org.apache.predictionio.data.api",
+    "org.apache.predictionio.data.view",
+    "org.apache.predictionio.workflow",
+    "org.apache.predictionio.tools",
     "org",
     "scalikejdbc").mkString(":"),
   "-doc-title",
@@ -114,31 +114,31 @@ scalacOptions in (ScalaUnidoc, unidoc) ++= Seq(
 
 javacOptions in (JavaUnidoc, unidoc) := Seq(
   "-subpackages",
-  "io.prediction",
+  "org.apache.predictionio",
   "-exclude",
   Seq(
-    "io.prediction.controller.html",
-    "io.prediction.data.api",
-    "io.prediction.data.view",
-    "io.prediction.data.webhooks.*",
-    "io.prediction.workflow",
-    "io.prediction.tools",
+    "org.apache.predictionio.controller.html",
+    "org.apache.predictionio.data.api",
+    "org.apache.predictionio.data.view",
+    "org.apache.predictionio.data.webhooks.*",
+    "org.apache.predictionio.workflow",
+    "org.apache.predictionio.tools",
     "org.apache.hadoop").mkString(":"),
   "-windowtitle",
   "PredictionIO Javadoc " + version.value,
   "-group",
   "Java Controllers",
   Seq(
-    "io.prediction.controller.java",
-    "io.prediction.data.store.java").mkString(":"),
+    "org.apache.predictionio.controller.java",
+    "org.apache.predictionio.data.store.java").mkString(":"),
   "-group",
   "Scala Base Classes",
   Seq(
-    "io.prediction.controller",
-    "io.prediction.core",
-    "io.prediction.data.storage",
-    "io.prediction.data.storage.*",
-    "io.prediction.data.store").mkString(":"),
+    "org.apache.predictionio.controller",
+    "org.apache.predictionio.core",
+    "org.apache.predictionio.data.storage",
+    "org.apache.predictionio.data.storage.*",
+    "org.apache.predictionio.data.store").mkString(":"),
   "-overview",
   "docs/javadoc/javadoc-overview.html",
   "-noqualifier",

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/common/src/main/scala/org/apache/predictionio/authentication/KeyAuthentication.scala
----------------------------------------------------------------------
diff --git a/common/src/main/scala/org/apache/predictionio/authentication/KeyAuthentication.scala b/common/src/main/scala/org/apache/predictionio/authentication/KeyAuthentication.scala
index 0553952..6b5995d 100644
--- a/common/src/main/scala/org/apache/predictionio/authentication/KeyAuthentication.scala
+++ b/common/src/main/scala/org/apache/predictionio/authentication/KeyAuthentication.scala
@@ -32,7 +32,7 @@ trait KeyAuthentication {
 
   object ServerKey {
     private val config = ConfigFactory.load("server.conf")
-    val get = config.getString("io.prediction.server.accessKey")
+    val get = config.getString("org.apache.predictionio.server.accessKey")
     val param = "accessKey"
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/common/src/main/scala/org/apache/predictionio/configuration/SSLConfiguration.scala
----------------------------------------------------------------------
diff --git a/common/src/main/scala/org/apache/predictionio/configuration/SSLConfiguration.scala b/common/src/main/scala/org/apache/predictionio/configuration/SSLConfiguration.scala
index 2a9344d..311802f 100644
--- a/common/src/main/scala/org/apache/predictionio/configuration/SSLConfiguration.scala
+++ b/common/src/main/scala/org/apache/predictionio/configuration/SSLConfiguration.scala
@@ -31,9 +31,9 @@ trait SSLConfiguration {
   private val serverConfig = ConfigFactory.load("server.conf")
 
   private val keyStoreResource =
-    serverConfig.getString("io.prediction.server.ssl-keystore-resource")
-  private val password = serverConfig.getString("io.prediction.server.ssl-keystore-pass")
-  private val keyAlias = serverConfig.getString("io.prediction.server.ssl-key-alias")
+    serverConfig.getString("org.apache.predictionio.server.ssl-keystore-resource")
+  private val password = serverConfig.getString("org.apache.predictionio.server.ssl-keystore-pass")
+  private val keyAlias = serverConfig.getString("org.apache.predictionio.server.ssl-key-alias")
 
   private val keyStore = {
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/conf/server.conf
----------------------------------------------------------------------
diff --git a/conf/server.conf b/conf/server.conf
index 2d2f628..93ff6e1 100644
--- a/conf/server.conf
+++ b/conf/server.conf
@@ -1,12 +1,12 @@
 # Engine and dashboard Server related configurations
-io.prediction.server {
+org.apache.predictionio.server {
 
-  # This access key is used by io.prediction.authentication.KeyAuthentication
+  # This access key is used by org.apache.predictionio.authentication.KeyAuthentication
   # to authenticate Evalutaion Dashboard, and Engine Server /stop and /reload enpoints
   # Should be passed as a query string param
   accessKey = ""
 
-  # configs used by io.prediction.configuration.SSLConfiguration
+  # configs used by org.apache.predictionio.configuration.SSLConfiguration
   ssl-keystore-resource = "keystore.jks"
   ssl-keystore-pass = "pioserver"
   ssl-key-alias = "selfsigned"

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/core/src/main/twirl/org/apache/predictionio/workflow/index.scala.html
----------------------------------------------------------------------
diff --git a/core/src/main/twirl/org/apache/predictionio/workflow/index.scala.html b/core/src/main/twirl/org/apache/predictionio/workflow/index.scala.html
index 4e0707b..5a3894f 100644
--- a/core/src/main/twirl/org/apache/predictionio/workflow/index.scala.html
+++ b/core/src/main/twirl/org/apache/predictionio/workflow/index.scala.html
@@ -1,6 +1,6 @@
-@import io.prediction.data.storage.EngineInstance
-@import io.prediction.data.storage.EngineManifest
-@import io.prediction.workflow.ServerConfig
+@import org.apache.predictionio.data.storage.EngineInstance
+@import org.apache.predictionio.data.storage.EngineManifest
+@import org.apache.predictionio.workflow.ServerConfig
 @import org.joda.time.DateTime
 @import org.joda.time.format.DateTimeFormat
 @(args: ServerConfig,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/data/README.md
----------------------------------------------------------------------
diff --git a/data/README.md b/data/README.md
index cead614..8f4e121 100644
--- a/data/README.md
+++ b/data/README.md
@@ -11,7 +11,7 @@ $ sbt/sbt "data/compile"
 $ set -a
 $ source conf/pio-env.sh
 $ set +a
-$ sbt/sbt "data/run-main io.prediction.data.api.Run"
+$ sbt/sbt "data/run-main org.apache.predictionio.data.api.Run"
 ```
 
 ### Very simple test
@@ -32,20 +32,20 @@ $ sbt/sbt "data/test"
 - test for EventService
 
 ```
-$ sbt/sbt "data/test-only io.prediction.data.api.EventServiceSpec"
+$ sbt/sbt "data/test-only org.apache.predictionio.data.api.EventServiceSpec"
 ```
 
 - test for LEvents
 
 ```
-$ sbt/sbt "data/test-only io.prediction.data.storage.LEventsSpec"
+$ sbt/sbt "data/test-only org.apache.predictionio.data.storage.LEventsSpec"
 ```
 
 - test for ExampleJson and ExampleForm webhooks
 
 ```
-$ sbt/sbt "data/test-only io.prediction.data.webhooks.examplejson.ExampleJsonConnectorSpec"
-$ sbt/sbt "data/test-only io.prediction.data.webhooks.exampleform.ExampleFormConnectorSpec"
+$ sbt/sbt "data/test-only org.apache.predictionio.data.webhooks.examplejson.ExampleJsonConnectorSpec"
+$ sbt/sbt "data/test-only org.apache.predictionio.data.webhooks.exampleform.ExampleFormConnectorSpec"
 ```
 
 ### Upgrade from 0.8.0/0.8.1 to 0.8.2
@@ -63,7 +63,7 @@ Replace <to app ID> by the returned app ID:
 $ set -a
 $ source conf/pio-env.sh
 $ set +a
-$ sbt/sbt "data/run-main io.prediction.data.storage.hbase.upgrade.Upgrade <from app ID>" "<to app ID>"
+$ sbt/sbt "data/run-main org.apache.predictionio.data.storage.hbase.upgrade.Upgrade <from app ID>" "<to app ID>"
 ```
 
 ### Upgrade from 0.8.2 to 0.8.3
@@ -91,7 +91,7 @@ $ bin/pio app new NewApp
 The App with `<new_app_id>` must be empty before you upgrade. You can check the status of this new created app using:
 
 ```
-$ sbt/sbt "data/run-main io.prediction.data.storage.hbase.upgrade.CheckDistribution <new_app_id>"
+$ sbt/sbt "data/run-main org.apache.predictionio.data.storage.hbase.upgrade.CheckDistribution <new_app_id>"
 ```
 
 If it shows that it is non-empty, you can clean it with
@@ -103,12 +103,12 @@ $ bin/pio app data-delete <new_app_name>
 #### 2. Run the following to migrate from <old_app_id> to <new_app_id>
 
 ```
-$ sbt/sbt "data/run-main io.prediction.data.storage.hbase.upgrade.Upgrade_0_8_3 <old_app_id> <new_app_id>"
+$ sbt/sbt "data/run-main org.apache.predictionio.data.storage.hbase.upgrade.Upgrade_0_8_3 <old_app_id> <new_app_id>"
 ... Done.
 ```
 
 You can use the following to check the <new_app_id> again. It should display the number of data being migrated:
 
 ```
-$ sbt/sbt "data/run-main io.prediction.data.storage.hbase.upgrade.CheckDistribution <new_app_id>"
+$ sbt/sbt "data/run-main org.apache.predictionio.data.storage.hbase.upgrade.CheckDistribution <new_app_id>"
 ```

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade.scala b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade.scala
index 7ef5305..dfbf415 100644
--- a/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade.scala
@@ -13,13 +13,13 @@
   * limitations under the License.
   */
 
-package io.prediction.data.storage.hbase.upgrade
+package org.apache.predictionio.data.storage.hbase.upgrade
 
-import io.prediction.annotation.Experimental
+import org.apache.predictionio.annotation.Experimental
 
-import io.prediction.data.storage.Storage
-import io.prediction.data.storage.hbase.HBLEvents
-import io.prediction.data.storage.hbase.HBEventsUtil
+import org.apache.predictionio.data.storage.Storage
+import org.apache.predictionio.data.storage.hbase.HBLEvents
+import org.apache.predictionio.data.storage.hbase.HBEventsUtil
 
 import scala.collection.JavaConversions._
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade_0_8_3.scala
----------------------------------------------------------------------
diff --git a/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade_0_8_3.scala b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade_0_8_3.scala
index 8b80b83..78859d3 100644
--- a/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade_0_8_3.scala
+++ b/data/src/main/scala/org/apache/predictionio/data/storage/hbase/upgrade/Upgrade_0_8_3.scala
@@ -13,21 +13,21 @@
   * limitations under the License.
   */
 
-package io.prediction.data.storage.hbase.upgrade
+package org.apache.predictionio.data.storage.hbase.upgrade
 
-import io.prediction.annotation.Experimental
+import org.apache.predictionio.annotation.Experimental
 
 import grizzled.slf4j.Logger
-import io.prediction.data.storage.Storage
-import io.prediction.data.storage.DataMap
-import io.prediction.data.storage.hbase.HBLEvents
-import io.prediction.data.storage.hbase.HBEventsUtil
+import org.apache.predictionio.data.storage.Storage
+import org.apache.predictionio.data.storage.DataMap
+import org.apache.predictionio.data.storage.hbase.HBLEvents
+import org.apache.predictionio.data.storage.hbase.HBEventsUtil
 
 import scala.collection.JavaConversions._
 
 import scala.concurrent._
 import ExecutionContext.Implicits.global
-import io.prediction.data.storage.LEvents
+import org.apache.predictionio.data.storage.LEvents
 import scala.concurrent.Await
 import scala.concurrent.duration.Duration
 import java.lang.Thread

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/data/src/test/resources/application.conf
----------------------------------------------------------------------
diff --git a/data/src/test/resources/application.conf b/data/src/test/resources/application.conf
index 2e1229d..eecae44 100644
--- a/data/src/test/resources/application.conf
+++ b/data/src/test/resources/application.conf
@@ -1,4 +1,4 @@
-io.prediction.data.storage {
+org.apache.predictionio.data.storage {
   sources {
     mongodb {
       type = mongodb

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/data/src/test/scala/org/apache/predictionio/data/api/SegmentIOAuthSpec.scala
----------------------------------------------------------------------
diff --git a/data/src/test/scala/org/apache/predictionio/data/api/SegmentIOAuthSpec.scala b/data/src/test/scala/org/apache/predictionio/data/api/SegmentIOAuthSpec.scala
index bae0f0b..31fe1a6 100644
--- a/data/src/test/scala/org/apache/predictionio/data/api/SegmentIOAuthSpec.scala
+++ b/data/src/test/scala/org/apache/predictionio/data/api/SegmentIOAuthSpec.scala
@@ -1,8 +1,8 @@
-package io.prediction.data.api
+package org.apache.predictionio.data.api
 
 import akka.actor.{ActorSystem, Props}
 import akka.testkit.TestProbe
-import io.prediction.data.storage._
+import org.apache.predictionio.data.storage._
 import org.joda.time.DateTime
 import org.specs2.mutable.Specification
 import spray.http.HttpHeaders.RawHeader

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-helloworld/MyAlgorithm.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-helloworld/MyAlgorithm.java b/examples/experimental/java-local-helloworld/MyAlgorithm.java
index 2294791..be68077 100644
--- a/examples/experimental/java-local-helloworld/MyAlgorithm.java
+++ b/examples/experimental/java-local-helloworld/MyAlgorithm.java
@@ -1,6 +1,6 @@
 package org.sample.java.helloworld;
 
-import io.prediction.controller.java.*;
+import org.apache.predictionio.controller.java.*;
 
 import java.util.Map;
 import java.util.HashMap;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-helloworld/MyDataSource.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-helloworld/MyDataSource.java b/examples/experimental/java-local-helloworld/MyDataSource.java
index 3fe9510..c0c4ce5 100644
--- a/examples/experimental/java-local-helloworld/MyDataSource.java
+++ b/examples/experimental/java-local-helloworld/MyDataSource.java
@@ -1,6 +1,6 @@
 package org.sample.java.helloworld;
 
-import io.prediction.controller.java.*;
+import org.apache.predictionio.controller.java.*;
 
 import java.util.List;
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-helloworld/MyEngineFactory.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-helloworld/MyEngineFactory.java b/examples/experimental/java-local-helloworld/MyEngineFactory.java
index a545c82..6faae88 100644
--- a/examples/experimental/java-local-helloworld/MyEngineFactory.java
+++ b/examples/experimental/java-local-helloworld/MyEngineFactory.java
@@ -1,6 +1,6 @@
 package org.sample.java.helloworld;
 
-import io.prediction.controller.java.*;
+import org.apache.predictionio.controller.java.*;
 
 public class MyEngineFactory implements IJavaEngineFactory {
   public JavaSimpleEngine<MyTrainingData, EmptyDataParams, MyQuery,

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-helloworld/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-helloworld/build.sbt b/examples/experimental/java-local-helloworld/build.sbt
index d233429..198a6d9 100644
--- a/examples/experimental/java-local-helloworld/build.sbt
+++ b/examples/experimental/java-local-helloworld/build.sbt
@@ -7,5 +7,5 @@ name := "example-java-local-helloworld"
 organization := "org.sample"
 
 libraryDependencies ++= Seq(
-  "io.prediction" %% "core" % "0.9.1" % "provided",
+  "org.apache.predictionio" %% "core" % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core" % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/README.md
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/README.md b/examples/experimental/java-local-regression/README.md
index 86c7e48..2a4be0b 100644
--- a/examples/experimental/java-local-regression/README.md
+++ b/examples/experimental/java-local-regression/README.md
@@ -65,7 +65,7 @@ $ curl -X POST http://localhost:9997/queries.json -d \
 You can evaluate the engine instance too.
 ```
 $  pio eval --batch JavaRegressionEval \
---metrics-class io.prediction.examples.java.regression.MeanSquareMetrics
+--metrics-class org.apache.predictionio.examples.java.regression.MeanSquareMetrics
 
 ...
 2014-09-24 03:23:07,170 INFO  spark.SparkContext - Job finished: collect at Workflow.scala:695, took 0.092829 s

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/build.sbt b/examples/experimental/java-local-regression/build.sbt
index 2a95cdd..dc4100b 100644
--- a/examples/experimental/java-local-regression/build.sbt
+++ b/examples/experimental/java-local-regression/build.sbt
@@ -10,5 +10,5 @@ organization := "myorg"
 version := "0.0.1-SNAPSHOT"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/engine.json
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/engine.json b/examples/experimental/java-local-regression/engine.json
index 9cb6c16..a80ef7d 100644
--- a/examples/experimental/java-local-regression/engine.json
+++ b/examples/experimental/java-local-regression/engine.json
@@ -1,7 +1,7 @@
 {
   "id": "java-local-regression",
   "description": "java-local-regression",
-  "engineFactory": "io.prediction.examples.java.regression.EngineFactory",
+  "engineFactory": "org.apache.predictionio.examples.java.regression.EngineFactory",
   "datasource": {
     "params": {
       "filepath": "PATH-TO-lr_data.txt"

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/src/main/java/DataSource.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/src/main/java/DataSource.java b/examples/experimental/java-local-regression/src/main/java/DataSource.java
index 82a4365..ad36904 100644
--- a/examples/experimental/java-local-regression/src/main/java/DataSource.java
+++ b/examples/experimental/java-local-regression/src/main/java/DataSource.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.regression;
+package org.apache.predictionio.examples.java.regression;
 
-import io.prediction.controller.java.LJavaDataSource;
+import org.apache.predictionio.controller.java.LJavaDataSource;
 
 import scala.Tuple2;
 import scala.Tuple3;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/src/main/java/DataSourceParams.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/src/main/java/DataSourceParams.java b/examples/experimental/java-local-regression/src/main/java/DataSourceParams.java
index a7fb887..324ab7f 100644
--- a/examples/experimental/java-local-regression/src/main/java/DataSourceParams.java
+++ b/examples/experimental/java-local-regression/src/main/java/DataSourceParams.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.regression;
+package org.apache.predictionio.examples.java.regression;
 
-import io.prediction.controller.java.JavaParams;
+import org.apache.predictionio.controller.java.JavaParams;
 
 public class DataSourceParams implements JavaParams {
   public final String filepath;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/src/main/java/DefaultAlgorithm.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/src/main/java/DefaultAlgorithm.java b/examples/experimental/java-local-regression/src/main/java/DefaultAlgorithm.java
index 64b150c..6fb7e61 100644
--- a/examples/experimental/java-local-regression/src/main/java/DefaultAlgorithm.java
+++ b/examples/experimental/java-local-regression/src/main/java/DefaultAlgorithm.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.regression;
+package org.apache.predictionio.examples.java.regression;
 
-import io.prediction.controller.java.LJavaAlgorithm;
+import org.apache.predictionio.controller.java.LJavaAlgorithm;
 
 // This algorithm is for illustration only. It returns a constant.
 public class DefaultAlgorithm

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/src/main/java/DefaultAlgorithmParams.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/src/main/java/DefaultAlgorithmParams.java b/examples/experimental/java-local-regression/src/main/java/DefaultAlgorithmParams.java
index c20cdd7..a62a08c 100644
--- a/examples/experimental/java-local-regression/src/main/java/DefaultAlgorithmParams.java
+++ b/examples/experimental/java-local-regression/src/main/java/DefaultAlgorithmParams.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.regression;
+package org.apache.predictionio.examples.java.regression;
 
-import io.prediction.controller.java.JavaParams;
+import org.apache.predictionio.controller.java.JavaParams;
 
 public class DefaultAlgorithmParams implements JavaParams {
   public final double v;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/src/main/java/EngineFactory.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/src/main/java/EngineFactory.java b/examples/experimental/java-local-regression/src/main/java/EngineFactory.java
index 1b61141..09886a4 100644
--- a/examples/experimental/java-local-regression/src/main/java/EngineFactory.java
+++ b/examples/experimental/java-local-regression/src/main/java/EngineFactory.java
@@ -1,8 +1,8 @@
-package io.prediction.examples.java.regression;
+package org.apache.predictionio.examples.java.regression;
 
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaEngine;
-import io.prediction.controller.java.JavaEngineBuilder;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaEngine;
+import org.apache.predictionio.controller.java.JavaEngineBuilder;
 
 public class EngineFactory implements IJavaEngineFactory {
   public JavaEngine<TrainingData, Integer, TrainingData, Double[], Double, Double> apply() {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/src/main/java/MeanSquareEvaluator.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/src/main/java/MeanSquareEvaluator.java b/examples/experimental/java-local-regression/src/main/java/MeanSquareEvaluator.java
index 263e5dc..a556adc 100644
--- a/examples/experimental/java-local-regression/src/main/java/MeanSquareEvaluator.java
+++ b/examples/experimental/java-local-regression/src/main/java/MeanSquareEvaluator.java
@@ -1,8 +1,8 @@
-package io.prediction.examples.java.regression;
+package org.apache.predictionio.examples.java.regression;
 
-import io.prediction.controller.java.JavaEvaluator;
+import org.apache.predictionio.controller.java.JavaEvaluator;
 import java.lang.Iterable;
-import io.prediction.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.EmptyParams;
 import scala.Tuple2;
 import java.util.List;
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/src/main/java/OLSAlgorithm.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/src/main/java/OLSAlgorithm.java b/examples/experimental/java-local-regression/src/main/java/OLSAlgorithm.java
index 72c47a1..86d047a 100644
--- a/examples/experimental/java-local-regression/src/main/java/OLSAlgorithm.java
+++ b/examples/experimental/java-local-regression/src/main/java/OLSAlgorithm.java
@@ -1,11 +1,11 @@
-package io.prediction.examples.java.regression;
+package org.apache.predictionio.examples.java.regression;
 
 import org.apache.commons.math3.stat.regression.OLSMultipleLinearRegression;
 import org.apache.commons.math3.linear.RealVector;
 import java.util.Arrays;
-import io.prediction.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.EmptyParams;
 
-import io.prediction.controller.java.LJavaAlgorithm;
+import org.apache.predictionio.controller.java.LJavaAlgorithm;
 
 public class OLSAlgorithm
   extends LJavaAlgorithm<EmptyParams, TrainingData, Double[], Double[], Double> {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/src/main/java/Preparator.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/src/main/java/Preparator.java b/examples/experimental/java-local-regression/src/main/java/Preparator.java
index 8015d6e..b3e8173 100644
--- a/examples/experimental/java-local-regression/src/main/java/Preparator.java
+++ b/examples/experimental/java-local-regression/src/main/java/Preparator.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.regression;
+package org.apache.predictionio.examples.java.regression;
 
-import io.prediction.controller.java.LJavaPreparator;
+import org.apache.predictionio.controller.java.LJavaPreparator;
 
 // This Preparator is just a proof-of-concept. It removes a fraction of the
 // training data to make training more "efficient".

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/src/main/java/PreparatorParams.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/src/main/java/PreparatorParams.java b/examples/experimental/java-local-regression/src/main/java/PreparatorParams.java
index 6b38e53..38028c4 100644
--- a/examples/experimental/java-local-regression/src/main/java/PreparatorParams.java
+++ b/examples/experimental/java-local-regression/src/main/java/PreparatorParams.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.regression;
+package org.apache.predictionio.examples.java.regression;
 
-import io.prediction.controller.java.JavaParams;
+import org.apache.predictionio.controller.java.JavaParams;
 
 public class PreparatorParams implements JavaParams {
   // Take the r-fraction of data in training.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/src/main/java/Run.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/src/main/java/Run.java b/examples/experimental/java-local-regression/src/main/java/Run.java
index 356fb9a..b0edaf0 100644
--- a/examples/experimental/java-local-regression/src/main/java/Run.java
+++ b/examples/experimental/java-local-regression/src/main/java/Run.java
@@ -1,15 +1,15 @@
-package io.prediction.examples.java.regression;
+package org.apache.predictionio.examples.java.regression;
 
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaParams;
-import io.prediction.controller.java.JavaEngine;
-import io.prediction.controller.java.JavaEngineBuilder;
-import io.prediction.controller.java.JavaEngineParams;
-import io.prediction.controller.java.JavaEngineParamsBuilder;
-import io.prediction.controller.java.LJavaAlgorithm;
-import io.prediction.controller.java.JavaWorkflow;
-import io.prediction.controller.java.WorkflowParamsBuilder;
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaParams;
+import org.apache.predictionio.controller.java.JavaEngine;
+import org.apache.predictionio.controller.java.JavaEngineBuilder;
+import org.apache.predictionio.controller.java.JavaEngineParams;
+import org.apache.predictionio.controller.java.JavaEngineParamsBuilder;
+import org.apache.predictionio.controller.java.LJavaAlgorithm;
+import org.apache.predictionio.controller.java.JavaWorkflow;
+import org.apache.predictionio.controller.java.WorkflowParamsBuilder;
 import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/src/main/java/Serving.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/src/main/java/Serving.java b/examples/experimental/java-local-regression/src/main/java/Serving.java
index f6cfecd..2fa0bb4 100644
--- a/examples/experimental/java-local-regression/src/main/java/Serving.java
+++ b/examples/experimental/java-local-regression/src/main/java/Serving.java
@@ -1,7 +1,7 @@
-package io.prediction.examples.java.regression;
+package org.apache.predictionio.examples.java.regression;
 
-import io.prediction.controller.java.LJavaServing;
-import io.prediction.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.LJavaServing;
+import org.apache.predictionio.controller.java.EmptyParams;
 
 import java.lang.Iterable;
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/src/main/java/TrainingData.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/src/main/java/TrainingData.java b/examples/experimental/java-local-regression/src/main/java/TrainingData.java
index ec8a11f..3666c6c 100644
--- a/examples/experimental/java-local-regression/src/main/java/TrainingData.java
+++ b/examples/experimental/java-local-regression/src/main/java/TrainingData.java
@@ -1,4 +1,4 @@
-package io.prediction.examples.java.regression;
+package org.apache.predictionio.examples.java.regression;
 
 import java.io.Serializable;
 import java.util.Arrays;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-regression/src/main/java/examples/manifest.json
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-regression/src/main/java/examples/manifest.json b/examples/experimental/java-local-regression/src/main/java/examples/manifest.json
index 886baca..191de43 100644
--- a/examples/experimental/java-local-regression/src/main/java/examples/manifest.json
+++ b/examples/experimental/java-local-regression/src/main/java/examples/manifest.json
@@ -1,6 +1,6 @@
 {
-  "id": "io.prediction.examples.java.regression",
+  "id": "org.apache.predictionio.examples.java.regression",
   "version": "0.8.1-SNAPSHOT",
   "name": "Apache Java Regression Engine",
-  "engineFactory": "io.prediction.examples.java.regression.EngineFactory"
+  "engineFactory": "org.apache.predictionio.examples.java.regression.EngineFactory"
 }

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/build.sbt
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/build.sbt b/examples/experimental/java-local-tutorial/build.sbt
index 0f92e55..b7a4ccd 100644
--- a/examples/experimental/java-local-tutorial/build.sbt
+++ b/examples/experimental/java-local-tutorial/build.sbt
@@ -5,12 +5,12 @@ assemblySettings
 
 name := "java-local-tutorial"
 
-organization := "io.prediction"
+organization := "org.apache.predictionio"
 
 version := "0.9.1"
 
 libraryDependencies ++= Seq(
-  "io.prediction"    %% "core"          % "0.9.1" % "provided",
-  "io.prediction"    %% "engines"       % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "core"          % "0.9.1" % "provided",
+  "org.apache.predictionio"    %% "engines"       % "0.9.1" % "provided",
   "org.apache.mahout" % "mahout-core"   % "0.9",
   "org.apache.spark" %% "spark-core"    % "1.2.0" % "provided")

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/AlgoParams.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/AlgoParams.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/AlgoParams.java
index dbf6779..a687a1c 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/AlgoParams.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/AlgoParams.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.recommendations.tutorial1;
+package org.apache.predictionio.examples.java.recommendations.tutorial1;
 
-import io.prediction.controller.java.JavaParams;
+import org.apache.predictionio.controller.java.JavaParams;
 
 public class AlgoParams implements JavaParams {
   public double threshold;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Algorithm.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Algorithm.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Algorithm.java
index 0c2ab4d..616d014 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Algorithm.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Algorithm.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.recommendations.tutorial1;
+package org.apache.predictionio.examples.java.recommendations.tutorial1;
 
-import io.prediction.controller.java.LJavaAlgorithm;
+import org.apache.predictionio.controller.java.LJavaAlgorithm;
 import org.apache.commons.math3.linear.RealVector;
 import org.apache.commons.math3.linear.ArrayRealVector;
 import org.apache.commons.math3.linear.OpenMapRealVector;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/DataSource.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/DataSource.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/DataSource.java
index edc6642..3f7b356 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/DataSource.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/DataSource.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.recommendations.tutorial1;
+package org.apache.predictionio.examples.java.recommendations.tutorial1;
 
-import io.prediction.controller.java.LJavaDataSource;
+import org.apache.predictionio.controller.java.LJavaDataSource;
 import scala.Tuple2;
 import scala.Tuple3;
 import java.io.File;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/DataSourceParams.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/DataSourceParams.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/DataSourceParams.java
index ee4471c..411cf88 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/DataSourceParams.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/DataSourceParams.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.recommendations.tutorial1;
+package org.apache.predictionio.examples.java.recommendations.tutorial1;
 
-import io.prediction.controller.java.JavaParams;
+import org.apache.predictionio.controller.java.JavaParams;
 
 public class DataSourceParams implements JavaParams {
   public String filePath; // file path

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/EngineFactory.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/EngineFactory.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/EngineFactory.java
index c791ccb..56881de 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/EngineFactory.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/EngineFactory.java
@@ -1,8 +1,8 @@
-package io.prediction.examples.java.recommendations.tutorial1;
+package org.apache.predictionio.examples.java.recommendations.tutorial1;
 
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaSimpleEngine;
-import io.prediction.controller.java.JavaSimpleEngineBuilder;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaSimpleEngine;
+import org.apache.predictionio.controller.java.JavaSimpleEngineBuilder;
 
 public class EngineFactory implements IJavaEngineFactory {
   public JavaSimpleEngine<TrainingData, Object, Query, Float, Object> apply() {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Model.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Model.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Model.java
index 842bc34..a839010 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Model.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Model.java
@@ -1,4 +1,4 @@
-package io.prediction.examples.java.recommendations.tutorial1;
+package org.apache.predictionio.examples.java.recommendations.tutorial1;
 
 import java.io.Serializable;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Query.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Query.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Query.java
index ef09d41..bfca90c 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Query.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/Query.java
@@ -1,4 +1,4 @@
-package io.prediction.examples.java.recommendations.tutorial1;
+package org.apache.predictionio.examples.java.recommendations.tutorial1;
 
 import java.io.Serializable;
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/TrainingData.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/TrainingData.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/TrainingData.java
index e92af22..1f015d9 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/TrainingData.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/TrainingData.java
@@ -1,4 +1,4 @@
-package io.prediction.examples.java.recommendations.tutorial1;
+package org.apache.predictionio.examples.java.recommendations.tutorial1;
 
 import java.io.Serializable;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/engine.json
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/engine.json b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/engine.json
index c43174e..12c3927 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/engine.json
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial1/engine.json
@@ -1,6 +1,6 @@
 {
-  "id": "io.prediction.examples.java.recommendations.tutorial1.EngineFactory",
+  "id": "org.apache.predictionio.examples.java.recommendations.tutorial1.EngineFactory",
   "version": "0.9.1",
   "name": "Simple Recommendations Engine",
-  "engineFactory": "io.prediction.examples.java.recommendations.tutorial1.EngineFactory"
+  "engineFactory": "org.apache.predictionio.examples.java.recommendations.tutorial1.EngineFactory"
 }

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial2/Runner1.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial2/Runner1.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial2/Runner1.java
index 6efc798..b121b37 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial2/Runner1.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial2/Runner1.java
@@ -1,18 +1,18 @@
-package io.prediction.examples.java.recommendations.tutorial2;
+package org.apache.predictionio.examples.java.recommendations.tutorial2;
 
-import io.prediction.examples.java.recommendations.tutorial1.TrainingData;
-import io.prediction.examples.java.recommendations.tutorial1.Query;
-import io.prediction.examples.java.recommendations.tutorial1.DataSource;
-import io.prediction.examples.java.recommendations.tutorial1.DataSourceParams;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.TrainingData;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.Query;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.DataSource;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.DataSourceParams;
 
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaSimpleEngine;
-import io.prediction.controller.java.JavaSimpleEngineBuilder;
-import io.prediction.controller.java.JavaEngineParams;
-import io.prediction.controller.java.JavaEngineParamsBuilder;
-import io.prediction.controller.java.JavaWorkflow;
-import io.prediction.controller.java.WorkflowParamsBuilder;
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaSimpleEngine;
+import org.apache.predictionio.controller.java.JavaSimpleEngineBuilder;
+import org.apache.predictionio.controller.java.JavaEngineParams;
+import org.apache.predictionio.controller.java.JavaEngineParamsBuilder;
+import org.apache.predictionio.controller.java.JavaWorkflow;
+import org.apache.predictionio.controller.java.WorkflowParamsBuilder;
 import java.util.HashMap;
 
 public class Runner1 {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial2/Runner2.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial2/Runner2.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial2/Runner2.java
index a8838da..3a12b35 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial2/Runner2.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial2/Runner2.java
@@ -1,24 +1,24 @@
-package io.prediction.examples.java.recommendations.tutorial2;
-
-import io.prediction.examples.java.recommendations.tutorial1.TrainingData;
-import io.prediction.examples.java.recommendations.tutorial1.Query;
-import io.prediction.examples.java.recommendations.tutorial1.DataSource;
-import io.prediction.examples.java.recommendations.tutorial1.DataSourceParams;
-import io.prediction.examples.java.recommendations.tutorial1.Algorithm;
-import io.prediction.examples.java.recommendations.tutorial1.AlgoParams;
-
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaSimpleEngine;
-import io.prediction.controller.java.JavaSimpleEngineBuilder;
-import io.prediction.controller.java.JavaEngineParams;
-import io.prediction.controller.java.JavaEngineParamsBuilder;
-import io.prediction.controller.java.JavaWorkflow;
-import io.prediction.controller.java.WorkflowParamsBuilder;
+package org.apache.predictionio.examples.java.recommendations.tutorial2;
+
+import org.apache.predictionio.examples.java.recommendations.tutorial1.TrainingData;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.Query;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.DataSource;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.DataSourceParams;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.Algorithm;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.AlgoParams;
+
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaSimpleEngine;
+import org.apache.predictionio.controller.java.JavaSimpleEngineBuilder;
+import org.apache.predictionio.controller.java.JavaEngineParams;
+import org.apache.predictionio.controller.java.JavaEngineParamsBuilder;
+import org.apache.predictionio.controller.java.JavaWorkflow;
+import org.apache.predictionio.controller.java.WorkflowParamsBuilder;
 
 import java.util.HashMap;
 
-import io.prediction.controller.IdentityPreparator;
+import org.apache.predictionio.controller.IdentityPreparator;
 
 public class Runner2 {
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/DataSource.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/DataSource.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/DataSource.java
index ee4c8e2..426e628 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/DataSource.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/DataSource.java
@@ -1,10 +1,10 @@
-package io.prediction.examples.java.recommendations.tutorial3;
+package org.apache.predictionio.examples.java.recommendations.tutorial3;
 
-import io.prediction.examples.java.recommendations.tutorial1.TrainingData;
-import io.prediction.examples.java.recommendations.tutorial1.Query;
-import io.prediction.examples.java.recommendations.tutorial1.DataSourceParams;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.TrainingData;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.Query;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.DataSourceParams;
 
-import io.prediction.controller.java.LJavaDataSource;
+import org.apache.predictionio.controller.java.LJavaDataSource;
 import scala.Tuple2;
 import scala.Tuple3;
 import java.io.File;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/EngineFactory.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/EngineFactory.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/EngineFactory.java
index 9c74c77..7e7add7 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/EngineFactory.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/EngineFactory.java
@@ -1,12 +1,12 @@
-package io.prediction.examples.java.recommendations.tutorial3;
+package org.apache.predictionio.examples.java.recommendations.tutorial3;
 
-import io.prediction.examples.java.recommendations.tutorial1.Algorithm;
-import io.prediction.examples.java.recommendations.tutorial1.TrainingData;
-import io.prediction.examples.java.recommendations.tutorial1.Query;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.Algorithm;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.TrainingData;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.Query;
 
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaSimpleEngine;
-import io.prediction.controller.java.JavaSimpleEngineBuilder;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaSimpleEngine;
+import org.apache.predictionio.controller.java.JavaSimpleEngineBuilder;
 
 public class EngineFactory implements IJavaEngineFactory {
   public JavaSimpleEngine<TrainingData, Object, Query, Float, Float> apply() {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/Evaluator.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/Evaluator.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/Evaluator.java
index 547eaf0..98042b0 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/Evaluator.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/Evaluator.java
@@ -1,8 +1,8 @@
-package io.prediction.examples.java.recommendations.tutorial3;
+package org.apache.predictionio.examples.java.recommendations.tutorial3;
 
-import io.prediction.examples.java.recommendations.tutorial1.Query;
-import io.prediction.controller.java.JavaEvaluator;
-import io.prediction.controller.java.EmptyParams;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.Query;
+import org.apache.predictionio.controller.java.JavaEvaluator;
+import org.apache.predictionio.controller.java.EmptyParams;
 
 import scala.Tuple2;
 import java.util.Arrays;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/Runner3.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/Runner3.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/Runner3.java
index 7b4f581..5fba5aa 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/Runner3.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial3/Runner3.java
@@ -1,16 +1,16 @@
-package io.prediction.examples.java.recommendations.tutorial3;
+package org.apache.predictionio.examples.java.recommendations.tutorial3;
 
-import io.prediction.examples.java.recommendations.tutorial1.DataSourceParams;
-import io.prediction.examples.java.recommendations.tutorial1.AlgoParams;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.DataSourceParams;
+import org.apache.predictionio.examples.java.recommendations.tutorial1.AlgoParams;
 
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaSimpleEngine;
-import io.prediction.controller.java.JavaSimpleEngineBuilder;
-import io.prediction.controller.java.JavaEngineParams;
-import io.prediction.controller.java.JavaEngineParamsBuilder;
-import io.prediction.controller.java.JavaWorkflow;
-import io.prediction.controller.java.WorkflowParamsBuilder;
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaSimpleEngine;
+import org.apache.predictionio.controller.java.JavaSimpleEngineBuilder;
+import org.apache.predictionio.controller.java.JavaEngineParams;
+import org.apache.predictionio.controller.java.JavaEngineParamsBuilder;
+import org.apache.predictionio.controller.java.JavaWorkflow;
+import org.apache.predictionio.controller.java.WorkflowParamsBuilder;
 
 import java.util.HashMap;
 

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringAlgorithm.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringAlgorithm.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringAlgorithm.java
index 35ecc7e..0aa12f3 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringAlgorithm.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringAlgorithm.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
-import io.prediction.controller.java.LJavaAlgorithm;
+import org.apache.predictionio.controller.java.LJavaAlgorithm;
 import org.apache.commons.math3.linear.RealVector;
 import org.apache.commons.math3.linear.ArrayRealVector;
 import org.apache.commons.math3.linear.OpenMapRealVector;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringAlgorithmParams.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringAlgorithmParams.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringAlgorithmParams.java
index 6da986d..ca9dc2f 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringAlgorithmParams.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringAlgorithmParams.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
-import io.prediction.controller.java.JavaParams;
+import org.apache.predictionio.controller.java.JavaParams;
 
 public class CollaborativeFilteringAlgorithmParams implements JavaParams {
   public double threshold;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringModel.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringModel.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringModel.java
index 62c1b75..523f44a 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringModel.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/CollaborativeFilteringModel.java
@@ -1,4 +1,4 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
 import java.io.Serializable;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/DataSource.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/DataSource.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/DataSource.java
index 6853a32..f3938f2 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/DataSource.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/DataSource.java
@@ -1,8 +1,8 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
 import java.util.Arrays;
-import io.prediction.controller.java.LJavaDataSource;
-import io.prediction.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.LJavaDataSource;
+import org.apache.predictionio.controller.java.EmptyParams;
 import scala.Tuple2;
 import scala.Tuple3;
 import java.io.File;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/DataSourceParams.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/DataSourceParams.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/DataSourceParams.java
index c31b757..322bb68 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/DataSourceParams.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/DataSourceParams.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
-import io.prediction.controller.java.JavaParams;
+import org.apache.predictionio.controller.java.JavaParams;
 
 public class DataSourceParams implements JavaParams {
   public String dir;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/EngineFactory.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/EngineFactory.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/EngineFactory.java
index b06b0a2..e88fa83 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/EngineFactory.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/EngineFactory.java
@@ -1,9 +1,9 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
-import io.prediction.controller.java.EmptyParams;
-import io.prediction.controller.java.IJavaEngineFactory;
-import io.prediction.controller.java.JavaEngine;
-import io.prediction.controller.java.JavaEngineBuilder;
+import org.apache.predictionio.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.IJavaEngineFactory;
+import org.apache.predictionio.controller.java.JavaEngine;
+import org.apache.predictionio.controller.java.JavaEngineBuilder;
 
 public class EngineFactory implements IJavaEngineFactory {
   public JavaEngine<TrainingData, EmptyParams, PreparedData, Query, Float, Object> apply() {

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedAlgorithm.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedAlgorithm.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedAlgorithm.java
index cf35237..e51d821 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedAlgorithm.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedAlgorithm.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
-import io.prediction.controller.java.LJavaAlgorithm;
+import org.apache.predictionio.controller.java.LJavaAlgorithm;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedAlgorithmParams.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedAlgorithmParams.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedAlgorithmParams.java
index 300f95e..4f31fa6 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedAlgorithmParams.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedAlgorithmParams.java
@@ -1,6 +1,6 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
-import io.prediction.controller.java.JavaParams;
+import org.apache.predictionio.controller.java.JavaParams;
 
 // actual score = (rating - drift) / scale if min <= rating <= max
 // if rating is outside [min, max], that scoring will not be used.

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedModel.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedModel.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedModel.java
index 7d13974..b598c38 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedModel.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/FeatureBasedModel.java
@@ -1,4 +1,4 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
 import java.io.Serializable;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Preparator.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Preparator.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Preparator.java
index 9ff4119..3ccce53 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Preparator.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/Preparator.java
@@ -1,7 +1,7 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
-import io.prediction.controller.java.LJavaPreparator;
-import io.prediction.controller.java.EmptyParams;
+import org.apache.predictionio.controller.java.LJavaPreparator;
+import org.apache.predictionio.controller.java.EmptyParams;
 
 import java.util.Arrays;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/6d160b6b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/PreparedData.java
----------------------------------------------------------------------
diff --git a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/PreparedData.java b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/PreparedData.java
index c416eaf..35c46ea 100644
--- a/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/PreparedData.java
+++ b/examples/experimental/java-local-tutorial/src/main/java/recommendations/tutorial4/PreparedData.java
@@ -1,4 +1,4 @@
-package io.prediction.examples.java.recommendations.tutorial4;
+package org.apache.predictionio.examples.java.recommendations.tutorial4;
 
 import java.util.Map;
 import org.apache.commons.math3.linear.RealVector;



[34/34] incubator-predictionio git commit: Merge branch 'yinxusen/pio-2' into develop

Posted by do...@apache.org.
Merge branch 'yinxusen/pio-2' into develop


Project: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/commit/02a5655f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/tree/02a5655f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-predictionio/diff/02a5655f

Branch: refs/heads/develop
Commit: 02a5655fcf4b9df52b60a94410b745a454b00b4b
Parents: 23c21d6 ed9d62d
Author: Donald Szeto <do...@apache.org>
Authored: Mon Jul 18 13:17:10 2016 -0700
Committer: Donald Szeto <do...@apache.org>
Committed: Mon Jul 18 13:17:10 2016 -0700

----------------------------------------------------------------------
 bin/pio                                         |    2 +-
 bin/pio-daemon                                  |    2 +-
 build.sbt                                       |   44 +-
 .../io/prediction/annotation/DeveloperApi.java  |   34 -
 .../io/prediction/annotation/Experimental.java  |   35 -
 .../authentication/KeyAuthentication.scala      |   55 -
 .../configuration/SSLConfiguration.scala        |   71 -
 .../predictionio/annotation/DeveloperApi.java   |   34 +
 .../predictionio/annotation/Experimental.java   |   35 +
 .../authentication/KeyAuthentication.scala      |   55 +
 .../configuration/SSLConfiguration.scala        |   71 +
 conf/server.conf                                |    6 +-
 .../controller/CustomQuerySerializer.scala      |   37 -
 .../io/prediction/controller/Deployment.scala   |   56 -
 .../scala/io/prediction/controller/Engine.scala |  829 ------------
 .../prediction/controller/EngineFactory.scala   |   41 -
 .../io/prediction/controller/EngineParams.scala |  149 --
 .../controller/EngineParamsGenerator.scala      |   43 -
 .../io/prediction/controller/Evaluation.scala   |  122 --
 .../prediction/controller/FastEvalEngine.scala  |  343 -----
 .../controller/IdentityPreparator.scala         |   92 --
 .../io/prediction/controller/LAlgorithm.scala   |  130 --
 .../prediction/controller/LAverageServing.scala |   41 -
 .../io/prediction/controller/LDataSource.scala  |   67 -
 .../prediction/controller/LFirstServing.scala   |   39 -
 .../io/prediction/controller/LPreparator.scala  |   46 -
 .../io/prediction/controller/LServing.scala     |   52 -
 .../LocalFileSystemPersistentModel.scala        |   74 -
 .../scala/io/prediction/controller/Metric.scala |  266 ----
 .../prediction/controller/MetricEvaluator.scala |  260 ----
 .../io/prediction/controller/P2LAlgorithm.scala |  121 --
 .../io/prediction/controller/PAlgorithm.scala   |  126 --
 .../io/prediction/controller/PDataSource.scala  |   57 -
 .../io/prediction/controller/PPreparator.scala  |   44 -
 .../scala/io/prediction/controller/Params.scala |   31 -
 .../prediction/controller/PersistentModel.scala |  112 --
 .../io/prediction/controller/SanityCheck.scala  |   30 -
 .../scala/io/prediction/controller/Utils.scala  |   69 -
 .../java/JavaEngineParamsGenerator.scala        |   39 -
 .../controller/java/JavaEvaluation.scala        |   66 -
 .../controller/java/LJavaAlgorithm.scala        |   31 -
 .../controller/java/LJavaDataSource.scala       |   31 -
 .../controller/java/LJavaPreparator.scala       |   29 -
 .../controller/java/LJavaServing.scala          |   26 -
 .../controller/java/P2LJavaAlgorithm.scala      |   33 -
 .../controller/java/PJavaAlgorithm.scala        |   28 -
 .../controller/java/PJavaDataSource.scala       |   28 -
 .../controller/java/PJavaPreparator.scala       |   26 -
 .../java/SerializableComparator.scala           |   20 -
 .../io/prediction/controller/package.scala      |  168 ---
 .../scala/io/prediction/core/AbstractDoer.scala |   66 -
 .../io/prediction/core/BaseAlgorithm.scala      |  123 --
 .../io/prediction/core/BaseDataSource.scala     |   52 -
 .../scala/io/prediction/core/BaseEngine.scala   |  100 --
 .../io/prediction/core/BaseEvaluator.scala      |   72 -
 .../io/prediction/core/BasePreparator.scala     |   42 -
 .../scala/io/prediction/core/BaseServing.scala  |   51 -
 .../main/scala/io/prediction/core/package.scala |   21 -
 core/src/main/scala/io/prediction/package.scala |   19 -
 .../io/prediction/workflow/CoreWorkflow.scala   |  163 ---
 .../io/prediction/workflow/CreateServer.scala   |  737 ----------
 .../io/prediction/workflow/CreateWorkflow.scala |  274 ----
 .../workflow/EngineServerPlugin.scala           |   40 -
 .../workflow/EngineServerPluginContext.scala    |   88 --
 .../workflow/EngineServerPluginsActor.scala     |   46 -
 .../workflow/EvaluationWorkflow.scala           |   42 -
 .../io/prediction/workflow/FakeWorkflow.scala   |  106 --
 .../io/prediction/workflow/JsonExtractor.scala  |  164 ---
 .../workflow/JsonExtractorOption.scala          |   23 -
 .../workflow/PersistentModelManifest.scala      |   18 -
 .../scala/io/prediction/workflow/Workflow.scala |  135 --
 .../prediction/workflow/WorkflowContext.scala   |   45 -
 .../io/prediction/workflow/WorkflowParams.scala |   42 -
 .../io/prediction/workflow/WorkflowUtils.scala  |  419 ------
 .../controller/CustomQuerySerializer.scala      |   37 +
 .../predictionio/controller/Deployment.scala    |   56 +
 .../apache/predictionio/controller/Engine.scala |  829 ++++++++++++
 .../predictionio/controller/EngineFactory.scala |   41 +
 .../predictionio/controller/EngineParams.scala  |  149 ++
 .../controller/EngineParamsGenerator.scala      |   43 +
 .../predictionio/controller/Evaluation.scala    |  122 ++
 .../controller/FastEvalEngine.scala             |  343 +++++
 .../controller/IdentityPreparator.scala         |   92 ++
 .../predictionio/controller/LAlgorithm.scala    |  130 ++
 .../controller/LAverageServing.scala            |   41 +
 .../predictionio/controller/LDataSource.scala   |   67 +
 .../predictionio/controller/LFirstServing.scala |   39 +
 .../predictionio/controller/LPreparator.scala   |   46 +
 .../predictionio/controller/LServing.scala      |   52 +
 .../LocalFileSystemPersistentModel.scala        |   74 +
 .../apache/predictionio/controller/Metric.scala |  266 ++++
 .../controller/MetricEvaluator.scala            |  260 ++++
 .../predictionio/controller/P2LAlgorithm.scala  |  121 ++
 .../predictionio/controller/PAlgorithm.scala    |  126 ++
 .../predictionio/controller/PDataSource.scala   |   57 +
 .../predictionio/controller/PPreparator.scala   |   44 +
 .../apache/predictionio/controller/Params.scala |   31 +
 .../controller/PersistentModel.scala            |  112 ++
 .../predictionio/controller/SanityCheck.scala   |   30 +
 .../apache/predictionio/controller/Utils.scala  |   69 +
 .../java/JavaEngineParamsGenerator.scala        |   39 +
 .../controller/java/JavaEvaluation.scala        |   66 +
 .../controller/java/LJavaAlgorithm.scala        |   31 +
 .../controller/java/LJavaDataSource.scala       |   31 +
 .../controller/java/LJavaPreparator.scala       |   29 +
 .../controller/java/LJavaServing.scala          |   26 +
 .../controller/java/P2LJavaAlgorithm.scala      |   33 +
 .../controller/java/PJavaAlgorithm.scala        |   28 +
 .../controller/java/PJavaDataSource.scala       |   28 +
 .../controller/java/PJavaPreparator.scala       |   26 +
 .../java/SerializableComparator.scala           |   20 +
 .../predictionio/controller/package.scala       |  168 +++
 .../apache/predictionio/core/AbstractDoer.scala |   66 +
 .../predictionio/core/BaseAlgorithm.scala       |  123 ++
 .../predictionio/core/BaseDataSource.scala      |   52 +
 .../apache/predictionio/core/BaseEngine.scala   |  100 ++
 .../predictionio/core/BaseEvaluator.scala       |   72 +
 .../predictionio/core/BasePreparator.scala      |   42 +
 .../apache/predictionio/core/BaseServing.scala  |   51 +
 .../org/apache/predictionio/core/package.scala  |   21 +
 .../scala/org/apache/predictionio/package.scala |   19 +
 .../predictionio/workflow/CoreWorkflow.scala    |  163 +++
 .../predictionio/workflow/CreateServer.scala    |  737 ++++++++++
 .../predictionio/workflow/CreateWorkflow.scala  |  274 ++++
 .../workflow/EngineServerPlugin.scala           |   40 +
 .../workflow/EngineServerPluginContext.scala    |   88 ++
 .../workflow/EngineServerPluginsActor.scala     |   46 +
 .../workflow/EvaluationWorkflow.scala           |   42 +
 .../predictionio/workflow/FakeWorkflow.scala    |  106 ++
 .../predictionio/workflow/JsonExtractor.scala   |  164 +++
 .../workflow/JsonExtractorOption.scala          |   23 +
 .../workflow/PersistentModelManifest.scala      |   18 +
 .../apache/predictionio/workflow/Workflow.scala |  135 ++
 .../predictionio/workflow/WorkflowContext.scala |   45 +
 .../predictionio/workflow/WorkflowParams.scala  |   42 +
 .../predictionio/workflow/WorkflowUtils.scala   |  419 ++++++
 .../controller/metric_evaluator.scala.html      |   95 --
 .../io/prediction/workflow/index.scala.html     |   92 --
 .../controller/metric_evaluator.scala.html      |   95 ++
 .../predictionio/workflow/index.scala.html      |   92 ++
 .../java/io/prediction/workflow/JavaParams.java |   30 -
 .../java/io/prediction/workflow/JavaQuery.java  |   46 -
 .../workflow/JavaQueryTypeAdapterFactory.java   |   60 -
 .../predictionio/workflow/JavaParams.java       |   30 +
 .../apache/predictionio/workflow/JavaQuery.java |   46 +
 .../workflow/JavaQueryTypeAdapterFactory.java   |   60 +
 .../io/prediction/controller/EngineTest.scala   |  615 ---------
 .../prediction/controller/EvaluationTest.scala  |   46 -
 .../prediction/controller/EvaluatorTest.scala   |   93 --
 .../controller/FastEvalEngineTest.scala         |  181 ---
 .../controller/MetricEvaluatorTest.scala        |   52 -
 .../io/prediction/controller/MetricTest.scala   |  143 --
 .../io/prediction/controller/SampleEngine.scala |  472 -------
 .../scala/io/prediction/workflow/BaseTest.scala |   75 -
 .../workflow/EngineWorkflowTest.scala           |    0
 .../workflow/EvaluationWorkflowTest.scala       |   61 -
 .../workflow/JsonExtractorSuite.scala           |  383 ------
 .../predictionio/controller/EngineTest.scala    |  615 +++++++++
 .../controller/EvaluationTest.scala             |   46 +
 .../predictionio/controller/EvaluatorTest.scala |   93 ++
 .../controller/FastEvalEngineTest.scala         |  181 +++
 .../controller/MetricEvaluatorTest.scala        |   52 +
 .../predictionio/controller/MetricTest.scala    |  143 ++
 .../predictionio/controller/SampleEngine.scala  |  472 +++++++
 .../apache/predictionio/workflow/BaseTest.scala |   75 +
 .../workflow/EngineWorkflowTest.scala           |    0
 .../workflow/EvaluationWorkflowTest.scala       |   61 +
 .../workflow/JsonExtractorSuite.scala           |  383 ++++++
 data/README.md                                  |   18 +-
 .../main/scala/io/prediction/data/Utils.scala   |   50 -
 .../scala/io/prediction/data/api/Common.scala   |   80 --
 .../io/prediction/data/api/EventInfo.scala      |   24 -
 .../io/prediction/data/api/EventServer.scala    |  640 ---------
 .../prediction/data/api/EventServerPlugin.scala |   33 -
 .../data/api/EventServerPluginContext.scala     |   49 -
 .../io/prediction/data/api/PluginsActor.scala   |   52 -
 .../scala/io/prediction/data/api/Stats.scala    |   79 --
 .../io/prediction/data/api/StatsActor.scala     |   74 -
 .../scala/io/prediction/data/api/Webhooks.scala |  151 ---
 .../data/api/WebhooksConnectors.scala           |   34 -
 .../main/scala/io/prediction/data/package.scala |   21 -
 .../io/prediction/data/storage/AccessKeys.scala |   71 -
 .../scala/io/prediction/data/storage/Apps.scala |   58 -
 .../io/prediction/data/storage/BiMap.scala      |  164 ---
 .../io/prediction/data/storage/Channels.scala   |   79 --
 .../io/prediction/data/storage/DataMap.scala    |  241 ----
 .../data/storage/DateTimeJson4sSupport.scala    |   47 -
 .../data/storage/EngineInstances.scala          |  177 ---
 .../data/storage/EngineManifests.scala          |  117 --
 .../io/prediction/data/storage/EntityMap.scala  |   98 --
 .../data/storage/EvaluationInstances.scala      |  135 --
 .../io/prediction/data/storage/Event.scala      |  164 ---
 .../data/storage/EventJson4sSupport.scala       |  236 ----
 .../data/storage/LEventAggregator.scala         |  145 --
 .../io/prediction/data/storage/LEvents.scala    |  489 -------
 .../io/prediction/data/storage/Models.scala     |   80 --
 .../data/storage/PEventAggregator.scala         |  209 ---
 .../io/prediction/data/storage/PEvents.scala    |  182 ---
 .../prediction/data/storage/PropertyMap.scala   |   96 --
 .../io/prediction/data/storage/Storage.scala    |  403 ------
 .../io/prediction/data/storage/Utils.scala      |   47 -
 .../storage/elasticsearch/ESAccessKeys.scala    |  116 --
 .../data/storage/elasticsearch/ESApps.scala     |  127 --
 .../data/storage/elasticsearch/ESChannels.scala |  114 --
 .../elasticsearch/ESEngineInstances.scala       |  155 ---
 .../elasticsearch/ESEngineManifests.scala       |   81 --
 .../elasticsearch/ESEvaluationInstances.scala   |  133 --
 .../storage/elasticsearch/ESSequences.scala     |   61 -
 .../data/storage/elasticsearch/ESUtils.scala    |   45 -
 .../storage/elasticsearch/StorageClient.scala   |   47 -
 .../data/storage/elasticsearch/package.scala    |   22 -
 .../data/storage/hbase/HBEventsUtil.scala       |  412 ------
 .../data/storage/hbase/HBLEvents.scala          |  192 ---
 .../data/storage/hbase/HBPEvents.scala          |  112 --
 .../data/storage/hbase/PIOHBaseUtil.scala       |   28 -
 .../data/storage/hbase/StorageClient.scala      |   83 --
 .../prediction/data/storage/hbase/package.scala |   22 -
 .../data/storage/hbase/upgrade/HB_0_8_0.scala   |  190 ---
 .../data/storage/hbase/upgrade/Upgrade.scala    |   72 -
 .../storage/hbase/upgrade/Upgrade_0_8_3.scala   |  221 ---
 .../data/storage/hdfs/HDFSModels.scala          |   60 -
 .../data/storage/hdfs/StorageClient.scala       |   33 -
 .../prediction/data/storage/hdfs/package.scala  |   22 -
 .../data/storage/jdbc/JDBCAccessKeys.scala      |   84 --
 .../prediction/data/storage/jdbc/JDBCApps.scala |   86 --
 .../data/storage/jdbc/JDBCChannels.scala        |   66 -
 .../data/storage/jdbc/JDBCEngineInstances.scala |  194 ---
 .../data/storage/jdbc/JDBCEngineManifests.scala |  111 --
 .../storage/jdbc/JDBCEvaluationInstances.scala  |  162 ---
 .../data/storage/jdbc/JDBCLEvents.scala         |  241 ----
 .../data/storage/jdbc/JDBCModels.scala          |   52 -
 .../data/storage/jdbc/JDBCPEvents.scala         |  160 ---
 .../data/storage/jdbc/JDBCUtils.scala           |  103 --
 .../data/storage/jdbc/StorageClient.scala       |   50 -
 .../prediction/data/storage/jdbc/package.scala  |   23 -
 .../data/storage/localfs/LocalFSModels.scala    |   59 -
 .../data/storage/localfs/StorageClient.scala    |   43 -
 .../data/storage/localfs/package.scala          |   22 -
 .../io/prediction/data/storage/package.scala    |   26 -
 .../scala/io/prediction/data/store/Common.scala |   50 -
 .../io/prediction/data/store/LEventStore.scala  |  142 --
 .../io/prediction/data/store/PEventStore.scala  |  116 --
 .../data/store/java/LJavaEventStore.scala       |  142 --
 .../data/store/java/OptionHelper.scala          |   29 -
 .../data/store/java/PJavaEventStore.scala       |  109 --
 .../io/prediction/data/store/package.scala      |   21 -
 .../io/prediction/data/view/DataView.scala      |  110 --
 .../io/prediction/data/view/LBatchView.scala    |  200 ---
 .../io/prediction/data/view/PBatchView.scala    |  209 ---
 .../io/prediction/data/view/QuickTest.scala     |   94 --
 .../data/webhooks/ConnectorException.scala      |   31 -
 .../data/webhooks/ConnectorUtil.scala           |   46 -
 .../data/webhooks/FormConnector.scala           |   32 -
 .../data/webhooks/JsonConnector.scala           |   31 -
 .../exampleform/ExampleFormConnector.scala      |  123 --
 .../examplejson/ExampleJsonConnector.scala      |  153 ---
 .../webhooks/mailchimp/MailChimpConnector.scala |  305 -----
 .../webhooks/segmentio/SegmentIOConnector.scala |  306 -----
 .../org/apache/predictionio/data/Utils.scala    |   50 +
 .../apache/predictionio/data/api/Common.scala   |   80 ++
 .../predictionio/data/api/EventInfo.scala       |   24 +
 .../predictionio/data/api/EventServer.scala     |  640 +++++++++
 .../data/api/EventServerPlugin.scala            |   33 +
 .../data/api/EventServerPluginContext.scala     |   49 +
 .../predictionio/data/api/PluginsActor.scala    |   52 +
 .../apache/predictionio/data/api/Stats.scala    |   79 ++
 .../predictionio/data/api/StatsActor.scala      |   74 +
 .../apache/predictionio/data/api/Webhooks.scala |  151 +++
 .../data/api/WebhooksConnectors.scala           |   34 +
 .../org/apache/predictionio/data/package.scala  |   21 +
 .../predictionio/data/storage/AccessKeys.scala  |   71 +
 .../apache/predictionio/data/storage/Apps.scala |   58 +
 .../predictionio/data/storage/BiMap.scala       |  164 +++
 .../predictionio/data/storage/Channels.scala    |   79 ++
 .../predictionio/data/storage/DataMap.scala     |  241 ++++
 .../data/storage/DateTimeJson4sSupport.scala    |   47 +
 .../data/storage/EngineInstances.scala          |  177 +++
 .../data/storage/EngineManifests.scala          |  117 ++
 .../predictionio/data/storage/EntityMap.scala   |   98 ++
 .../data/storage/EvaluationInstances.scala      |  135 ++
 .../predictionio/data/storage/Event.scala       |  164 +++
 .../data/storage/EventJson4sSupport.scala       |  236 ++++
 .../data/storage/LEventAggregator.scala         |  145 ++
 .../predictionio/data/storage/LEvents.scala     |  489 +++++++
 .../predictionio/data/storage/Models.scala      |   80 ++
 .../data/storage/PEventAggregator.scala         |  209 +++
 .../predictionio/data/storage/PEvents.scala     |  182 +++
 .../predictionio/data/storage/PropertyMap.scala |   96 ++
 .../predictionio/data/storage/Storage.scala     |  403 ++++++
 .../predictionio/data/storage/Utils.scala       |   47 +
 .../storage/elasticsearch/ESAccessKeys.scala    |  116 ++
 .../data/storage/elasticsearch/ESApps.scala     |  127 ++
 .../data/storage/elasticsearch/ESChannels.scala |  114 ++
 .../elasticsearch/ESEngineInstances.scala       |  155 +++
 .../elasticsearch/ESEngineManifests.scala       |   81 ++
 .../elasticsearch/ESEvaluationInstances.scala   |  133 ++
 .../storage/elasticsearch/ESSequences.scala     |   61 +
 .../data/storage/elasticsearch/ESUtils.scala    |   45 +
 .../storage/elasticsearch/StorageClient.scala   |   47 +
 .../data/storage/elasticsearch/package.scala    |   22 +
 .../data/storage/hbase/HBEventsUtil.scala       |  412 ++++++
 .../data/storage/hbase/HBLEvents.scala          |  192 +++
 .../data/storage/hbase/HBPEvents.scala          |  112 ++
 .../data/storage/hbase/PIOHBaseUtil.scala       |   28 +
 .../data/storage/hbase/StorageClient.scala      |   83 ++
 .../data/storage/hbase/package.scala            |   22 +
 .../data/storage/hbase/upgrade/HB_0_8_0.scala   |  190 +++
 .../data/storage/hbase/upgrade/Upgrade.scala    |   72 +
 .../storage/hbase/upgrade/Upgrade_0_8_3.scala   |  221 +++
 .../data/storage/hdfs/HDFSModels.scala          |   60 +
 .../data/storage/hdfs/StorageClient.scala       |   33 +
 .../data/storage/hdfs/package.scala             |   22 +
 .../data/storage/jdbc/JDBCAccessKeys.scala      |   84 ++
 .../data/storage/jdbc/JDBCApps.scala            |   86 ++
 .../data/storage/jdbc/JDBCChannels.scala        |   66 +
 .../data/storage/jdbc/JDBCEngineInstances.scala |  194 +++
 .../data/storage/jdbc/JDBCEngineManifests.scala |  111 ++
 .../storage/jdbc/JDBCEvaluationInstances.scala  |  162 +++
 .../data/storage/jdbc/JDBCLEvents.scala         |  241 ++++
 .../data/storage/jdbc/JDBCModels.scala          |   52 +
 .../data/storage/jdbc/JDBCPEvents.scala         |  160 +++
 .../data/storage/jdbc/JDBCUtils.scala           |  103 ++
 .../data/storage/jdbc/StorageClient.scala       |   50 +
 .../data/storage/jdbc/package.scala             |   23 +
 .../data/storage/localfs/LocalFSModels.scala    |   59 +
 .../data/storage/localfs/StorageClient.scala    |   43 +
 .../data/storage/localfs/package.scala          |   22 +
 .../predictionio/data/storage/package.scala     |   26 +
 .../apache/predictionio/data/store/Common.scala |   50 +
 .../predictionio/data/store/LEventStore.scala   |  142 ++
 .../predictionio/data/store/PEventStore.scala   |  116 ++
 .../data/store/java/LJavaEventStore.scala       |  142 ++
 .../data/store/java/OptionHelper.scala          |   29 +
 .../data/store/java/PJavaEventStore.scala       |  109 ++
 .../predictionio/data/store/package.scala       |   21 +
 .../predictionio/data/view/DataView.scala       |  110 ++
 .../predictionio/data/view/LBatchView.scala     |  200 +++
 .../predictionio/data/view/PBatchView.scala     |  209 +++
 .../predictionio/data/view/QuickTest.scala      |   94 ++
 .../data/webhooks/ConnectorException.scala      |   31 +
 .../data/webhooks/ConnectorUtil.scala           |   46 +
 .../data/webhooks/FormConnector.scala           |   32 +
 .../data/webhooks/JsonConnector.scala           |   31 +
 .../exampleform/ExampleFormConnector.scala      |  123 ++
 .../examplejson/ExampleJsonConnector.scala      |  153 +++
 .../webhooks/mailchimp/MailChimpConnector.scala |  305 +++++
 .../webhooks/segmentio/SegmentIOConnector.scala |  306 +++++
 data/src/test/resources/application.conf        |    2 +-
 .../prediction/data/api/EventServiceSpec.scala  |   68 -
 .../prediction/data/api/SegmentIOAuthSpec.scala |  175 ---
 .../io/prediction/data/storage/BiMapSpec.scala  |  196 ---
 .../prediction/data/storage/DataMapSpec.scala   |  243 ----
 .../data/storage/LEventAggregatorSpec.scala     |  103 --
 .../prediction/data/storage/LEventsSpec.scala   |  245 ----
 .../data/storage/PEventAggregatorSpec.scala     |   72 -
 .../prediction/data/storage/PEventsSpec.scala   |  210 ---
 .../data/storage/StorageTestUtils.scala         |   42 -
 .../io/prediction/data/storage/TestEvents.scala |  263 ----
 .../data/webhooks/ConnectorTestUtil.scala       |   47 -
 .../exampleform/ExampleFormConnectorSpec.scala  |  164 ---
 .../examplejson/ExampleJsonConnectorSpec.scala  |  179 ---
 .../mailchimp/MailChimpConnectorSpec.scala      |  254 ----
 .../segmentio/SegmentIOConnectorSpec.scala      |  335 -----
 .../data/api/EventServiceSpec.scala             |   68 +
 .../data/api/SegmentIOAuthSpec.scala            |  175 +++
 .../predictionio/data/storage/BiMapSpec.scala   |  196 +++
 .../predictionio/data/storage/DataMapSpec.scala |  243 ++++
 .../data/storage/LEventAggregatorSpec.scala     |  103 ++
 .../predictionio/data/storage/LEventsSpec.scala |  245 ++++
 .../data/storage/PEventAggregatorSpec.scala     |   72 +
 .../predictionio/data/storage/PEventsSpec.scala |  210 +++
 .../data/storage/StorageTestUtils.scala         |   42 +
 .../predictionio/data/storage/TestEvents.scala  |  263 ++++
 .../data/webhooks/ConnectorTestUtil.scala       |   47 +
 .../exampleform/ExampleFormConnectorSpec.scala  |  164 +++
 .../examplejson/ExampleJsonConnectorSpec.scala  |  179 +++
 .../mailchimp/MailChimpConnectorSpec.scala      |  254 ++++
 .../segmentio/SegmentIOConnectorSpec.scala      |  335 +++++
 docs/javadoc/javadoc-overview.html              |    8 +-
 .../manual/obsolete/cookbook/evaluation.html.md |    4 +-
 .../cookbook/filteringunavailable.html.md       |    6 +-
 docs/manual/obsolete/engines/index.html.md      |    4 +-
 .../obsolete/engines/itemrank/index.html.md     |   10 +-
 .../obsolete/engines/itemrec/index.html.md      |    2 +-
 .../obsolete/engines/itemsim/index.html.md      |    2 +-
 .../enginebuilders/local-helloworld.html.md     |    6 +-
 .../stepbystep/combiningalgorithms.html.md      |   10 +-
 .../stepbystep/dataalgorithm.html.md            |   12 +-
 .../stepbystep/evaluation.html.md               |   12 +-
 .../stepbystep/testcomponents.html.md           |    4 +-
 .../tutorials/engines/quickstart.html.md        |   22 +-
 .../tutorials/recommendation/movielens.html.md  |    6 +-
 .../tutorials/recommendation/yelp.html.md       |   10 +-
 .../source/community/contribute-webhook.html.md |   24 +-
 docs/manual/source/customize/dase.html.md.erb   |   14 +-
 .../source/customize/troubleshooting.html.md    |   14 +-
 .../source/datacollection/eventapi.html.md      |    2 +-
 .../datacollection/eventmodel.html.md.erb       |   10 +-
 .../source/demo/textclassification.html.md      |    6 +-
 .../dimensionalityreduction.html.md             |    6 +-
 .../shared/install/_dependent_services.html.erb |    4 +-
 docs/manual/source/resources/faq.html.md        |    4 +-
 .../source/resources/intellij.html.md.erb       |    6 +-
 docs/manual/source/resources/upgrade.html.md    |   20 +-
 docs/manual/source/sdk/java.html.md.erb         |    2 +-
 docs/manual/source/sdk/php.html.md.erb          |    2 +-
 .../source/system/anotherdatastore.html.md      |    4 +-
 .../classification/quickstart.html.md.erb       |    6 +-
 .../quickstart.html.md.erb                      |    6 +-
 .../quickstart.html.md.erb                      |    6 +-
 .../quickstart.html.md.erb                      |    6 +-
 .../leadscoring/quickstart.html.md.erb          |    6 +-
 .../productranking/quickstart.html.md.erb       |    6 +-
 .../recommendation/batch-evaluator.html.md      |   24 +-
 .../recommendation/customize-data-prep.html.md  |    2 +-
 .../recommendation/customize-serving.html.md    |    4 +-
 .../recommendation/quickstart.html.md.erb       |    6 +-
 .../multi-events-multi-algos.html.md.erb        |    2 +-
 .../similarproduct/quickstart.html.md.erb       |    6 +-
 .../templates/vanilla/quickstart.html.md.erb    |    2 +-
 docs/scaladoc/rootdoc.txt                       |    4 +-
 .../prediction/e2/engine/BinaryVectorizer.scala |   61 -
 .../e2/engine/CategoricalNaiveBayes.scala       |  176 ---
 .../io/prediction/e2/engine/MarkovChain.scala   |   89 --
 .../e2/evaluation/CrossValidation.scala         |   64 -
 .../main/scala/io/prediction/e2/package.scala   |   22 -
 e2/src/main/scala/io/prediction/package.scala   |   21 -
 .../e2/engine/BinaryVectorizer.scala            |   61 +
 .../e2/engine/CategoricalNaiveBayes.scala       |  176 +++
 .../predictionio/e2/engine/MarkovChain.scala    |   89 ++
 .../e2/evaluation/CrossValidation.scala         |   64 +
 .../org/apache/predictionio/e2/package.scala    |   22 +
 .../scala/org/apache/predictionio/package.scala |   21 +
 .../e2/engine/BinaryVectorizerTest.scala        |   56 -
 .../e2/engine/CategoricalNaiveBayesTest.scala   |  132 --
 .../prediction/e2/engine/MarkovChainTest.scala  |   49 -
 .../e2/evaluation/CrossValidationTest.scala     |  111 --
 .../e2/fixture/BinaryVectorizerFixture.scala    |   59 -
 .../e2/fixture/MarkovChainFixture.scala         |   39 -
 .../e2/fixture/NaiveBayesFixture.scala          |   48 -
 .../e2/fixture/SharedSparkContext.scala         |   51 -
 .../e2/engine/BinaryVectorizerTest.scala        |   56 +
 .../e2/engine/CategoricalNaiveBayesTest.scala   |  132 ++
 .../e2/engine/MarkovChainTest.scala             |   49 +
 .../e2/evaluation/CrossValidationTest.scala     |  111 ++
 .../e2/fixture/BinaryVectorizerFixture.scala    |   59 +
 .../e2/fixture/MarkovChainFixture.scala         |   39 +
 .../e2/fixture/NaiveBayesFixture.scala          |   48 +
 .../e2/fixture/SharedSparkContext.scala         |   51 +
 .../java-local-helloworld/MyAlgorithm.java      |    2 +-
 .../java-local-helloworld/MyDataSource.java     |    2 +-
 .../java-local-helloworld/MyEngineFactory.java  |    2 +-
 .../java-local-helloworld/build.sbt             |    2 +-
 .../java-local-regression/README.md             |    2 +-
 .../java-local-regression/build.sbt             |    2 +-
 .../java-local-regression/engine.json           |    2 +-
 .../src/main/java/DataSource.java               |    4 +-
 .../src/main/java/DataSourceParams.java         |    4 +-
 .../src/main/java/DefaultAlgorithm.java         |    4 +-
 .../src/main/java/DefaultAlgorithmParams.java   |    4 +-
 .../src/main/java/EngineFactory.java            |    8 +-
 .../src/main/java/MeanSquareEvaluator.java      |    6 +-
 .../src/main/java/OLSAlgorithm.java             |    6 +-
 .../src/main/java/Preparator.java               |    4 +-
 .../src/main/java/PreparatorParams.java         |    4 +-
 .../src/main/java/Run.java                      |   22 +-
 .../src/main/java/Serving.java                  |    6 +-
 .../src/main/java/TrainingData.java             |    2 +-
 .../src/main/java/examples/manifest.json        |    4 +-
 .../experimental/java-local-tutorial/build.sbt  |    6 +-
 .../recommendations/tutorial1/AlgoParams.java   |    4 +-
 .../recommendations/tutorial1/Algorithm.java    |    4 +-
 .../recommendations/tutorial1/DataSource.java   |    4 +-
 .../tutorial1/DataSourceParams.java             |    4 +-
 .../tutorial1/EngineFactory.java                |    8 +-
 .../java/recommendations/tutorial1/Model.java   |    2 +-
 .../java/recommendations/tutorial1/Query.java   |    2 +-
 .../recommendations/tutorial1/TrainingData.java |    2 +-
 .../java/recommendations/tutorial1/engine.json  |    4 +-
 .../java/recommendations/tutorial2/Runner1.java |   26 +-
 .../java/recommendations/tutorial2/Runner2.java |   36 +-
 .../recommendations/tutorial3/DataSource.java   |   10 +-
 .../tutorial3/EngineFactory.java                |   14 +-
 .../recommendations/tutorial3/Evaluator.java    |    8 +-
 .../java/recommendations/tutorial3/Runner3.java |   22 +-
 .../CollaborativeFilteringAlgorithm.java        |    4 +-
 .../CollaborativeFilteringAlgorithmParams.java  |    4 +-
 .../tutorial4/CollaborativeFilteringModel.java  |    2 +-
 .../recommendations/tutorial4/DataSource.java   |    6 +-
 .../tutorial4/DataSourceParams.java             |    4 +-
 .../tutorial4/EngineFactory.java                |   10 +-
 .../tutorial4/FeatureBasedAlgorithm.java        |    4 +-
 .../tutorial4/FeatureBasedAlgorithmParams.java  |    4 +-
 .../tutorial4/FeatureBasedModel.java            |    2 +-
 .../recommendations/tutorial4/Preparator.java   |    6 +-
 .../recommendations/tutorial4/PreparedData.java |    2 +-
 .../java/recommendations/tutorial4/Query.java   |    2 +-
 .../recommendations/tutorial4/Runner4a.java     |   20 +-
 .../recommendations/tutorial4/Runner4b.java     |   20 +-
 .../recommendations/tutorial4/Runner4c.java     |   24 +-
 .../recommendations/tutorial4/Runner4d.java     |   24 +-
 .../java/recommendations/tutorial4/Serving.java |    6 +-
 .../tutorial4/SingleEngineFactory.java          |   12 +-
 .../recommendations/tutorial4/TrainingData.java |    2 +-
 .../tutorial4/multiple-algo-engine.json         |    4 +-
 .../tutorial4/single-algo-engine.json           |    4 +-
 .../tutorial5/EngineFactory.java                |   14 +-
 .../tutorial5/MahoutAlgoModel.java              |    2 +-
 .../tutorial5/MahoutAlgoParams.java             |    4 +-
 .../tutorial5/MahoutAlgorithm.java              |   12 +-
 .../java/recommendations/tutorial5/Runner5.java |   22 +-
 .../recommendations/tutorial5/manifest.json     |    4 +-
 .../java-parallel-helloworld/build.sbt          |    2 +-
 .../java-parallel-helloworld/engine.json        |    2 +-
 .../src/main/java/parallel/Algorithm.java       |    6 +-
 .../src/main/java/parallel/DataSource.java      |    6 +-
 .../src/main/java/parallel/EngineFactory.java   |   10 +-
 .../src/main/java/parallel/Model.java           |    2 +-
 .../src/main/java/parallel/Preparator.java      |    6 +-
 .../src/main/java/parallel/Query.java           |    2 +-
 .../src/main/java/parallel/Runner.java          |   20 +-
 .../src/main/java/parallel/Serving.java         |    6 +-
 .../src/main/java/parallel/build.sbt            |    6 +-
 .../experimental/scala-cleanup-app/build.sbt    |    4 +-
 .../experimental/scala-cleanup-app/engine.json  |    2 +-
 .../src/main/scala/Algorithm.scala              |    6 +-
 .../src/main/scala/DataSource.scala             |   16 +-
 .../src/main/scala/Engine.scala                 |    8 +-
 .../src/main/scala/Preparator.scala             |    6 +-
 .../src/main/scala/Serving.scala                |    6 +-
 .../scala-local-friend-recommendation/build.sbt |    6 +-
 .../keyword_similarity_engine.json              |    4 +-
 .../random_engine.json                          |    4 +-
 .../scala/FriendRecommendationAlgoParams.scala  |    4 +-
 .../scala/FriendRecommendationDataSource.scala  |    4 +-
 .../FriendRecommendationDataSourceParams.scala  |    4 +-
 .../scala/FriendRecommendationPrediction.scala  |    2 +-
 .../main/scala/FriendRecommendationQuery.scala  |    2 +-
 .../FriendRecommendationTrainingData.scala      |    2 +-
 .../main/scala/KeywordSimilarityAlgorithm.scala |    4 +-
 .../scala/KeywordSimilarityEngineFactory.scala  |    4 +-
 .../src/main/scala/KeywordSimilarityModel.scala |    2 +-
 .../src/main/scala/RandomAlgorithm.scala        |    4 +-
 .../src/main/scala/RandomEngineFactory.scala    |    4 +-
 .../src/main/scala/RandomModel.scala            |    2 +-
 .../scala-local-helloworld/HelloWorld.scala     |    2 +-
 .../scala-local-helloworld/build.sbt            |    4 +-
 .../scala-local-movielens-evaluation/build.sbt  |    4 +-
 .../src/main/scala/Evaluation.scala             |   28 +-
 .../src/main/scala/ItemRecEvaluation.scala      |   28 +-
 .../scala-local-movielens-filtering/build.sbt   |    4 +-
 .../src/main/scala/Engine.scala                 |   10 +-
 .../src/main/scala/Filtering.scala              |    8 +-
 .../scala-local-regression/README.md            |    4 +-
 .../scala-local-regression/Run.scala            |   30 +-
 .../scala-local-regression/build.sbt            |    4 +-
 .../scala-local-regression/engine.json          |    2 +-
 .../README.md                                   |    2 +-
 .../build.sbt                                   |    4 +-
 .../engine-forest.json                          |    2 +-
 .../engine.json                                 |    2 +-
 .../src/main/scala/DataSource.scala             |   14 +-
 .../src/main/scala/DeltaSimRankRDD.scala        |    2 +-
 .../src/main/scala/Engine.scala                 |    6 +-
 .../src/main/scala/Preparator.scala             |   10 +-
 .../src/main/scala/Sampling.scala               |    2 +-
 .../src/main/scala/Serving.scala                |    6 +-
 .../src/main/scala/SimRankAlgorithm.scala       |    6 +-
 .../scala-parallel-recommendation-cat/build.sbt |    4 +-
 .../src/main/scala/ALSAlgorithm.scala           |   10 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../src/main/scala/Engine.scala                 |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../src/main/scala/Serving.scala                |    2 +-
 .../build.sbt                                   |    4 +-
 .../src/main/scala/ALSAlgorithm.scala           |    6 +-
 .../src/main/scala/ALSModel.scala               |    6 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../src/main/scala/Engine.scala                 |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../src/main/scala/Serving.scala                |    2 +-
 .../build.sbt                                   |    4 +-
 .../src/main/scala/ALSAlgorithm.scala           |    4 +-
 .../src/main/scala/ALSModel.scala               |    6 +-
 .../src/main/scala/DataSource.scala             |   14 +-
 .../src/main/scala/Engine.scala                 |    4 +-
 .../src/main/scala/Preparator.scala             |    4 +-
 .../src/main/scala/Serving.scala                |    2 +-
 .../build.sbt                                   |    4 +-
 .../src/main/scala/ALSAlgorithm.scala           |    6 +-
 .../src/main/scala/ALSModel.scala               |    6 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../src/main/scala/Engine.scala                 |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../src/main/scala/Serving.scala                |    2 +-
 .../scala-parallel-regression/Run.scala         |   24 +-
 .../scala-parallel-regression/build.sbt         |    2 +-
 .../scala-parallel-regression/engine.json       |    2 +-
 .../build.sbt                                   |    4 +-
 .../src/main/scala/DIMSUMAlgorithm.scala        |   10 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../src/main/scala/Engine.scala                 |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../src/main/scala/Serving.scala                |    2 +-
 .../build.sbt                                   |    4 +-
 .../src/main/scala/ALSAlgorithm.scala           |   10 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../src/main/scala/Engine.scala                 |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../src/main/scala/Serving.scala                |    2 +-
 .../scala-parallel-trim-app/build.sbt           |    4 +-
 .../scala-parallel-trim-app/engine.json         |    2 +-
 .../src/main/scala/Algorithm.scala              |    6 +-
 .../src/main/scala/DataSource.scala             |   16 +-
 .../src/main/scala/Engine.scala                 |    8 +-
 .../src/main/scala/Preparator.scala             |    6 +-
 .../src/main/scala/Serving.scala                |    6 +-
 .../scala-recommendations/build.sbt             |    4 +-
 .../src/main/scala/Run.scala                    |   24 +-
 .../experimental/scala-refactor-test/build.sbt  |    6 +-
 .../src/main/scala/Algorithm.scala              |    4 +-
 .../src/main/scala/DataSource.scala             |   14 +-
 .../src/main/scala/Engine.scala                 |    8 +-
 .../src/main/scala/Evaluator.scala              |    6 +-
 .../src/main/scala/Preparator.scala             |    4 +-
 .../src/main/scala/Serving.scala                |    2 +-
 examples/experimental/scala-stock/README.md     |   10 +-
 examples/experimental/scala-stock/build.sbt     |    6 +-
 .../scala-stock/src/main/scala/Algorithm.scala  |    6 +-
 .../src/main/scala/BackTestingMetrics.scala     |   10 +-
 .../scala-stock/src/main/scala/Data.scala       |    2 +-
 .../scala-stock/src/main/scala/DataSource.scala |   10 +-
 .../scala-stock/src/main/scala/Indicators.scala |    2 +-
 .../src/main/scala/RegressionStrategy.scala     |    4 +-
 .../scala-stock/src/main/scala/Run.scala        |   16 +-
 .../src/main/scala/YahooDataSource.scala        |   14 +-
 .../examples/stock/backtesting.scala.html       |    2 +-
 .../add-algorithm/build.sbt                     |    4 +-
 .../add-algorithm/project/pio-build.sbt         |    2 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../add-algorithm/src/main/scala/Engine.scala   |    4 +-
 .../src/main/scala/NaiveBayesAlgorithm.scala    |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../src/main/scala/RandomForestAlgorithm.scala  |    4 +-
 .../add-algorithm/src/main/scala/Serving.scala  |    2 +-
 .../custom-attributes/build.sbt                 |    6 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../src/main/scala/Engine.scala                 |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../src/main/scala/RandomForestAlgorithm.scala  |    4 +-
 .../src/main/scala/Serving.scala                |    2 +-
 .../train-with-rate-event/build.sbt             |    4 +-
 .../train-with-rate-event/project/pio-build.sbt |    2 +-
 .../src/main/scala/ALSAlgorithm.scala           |   10 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../src/main/scala/Engine.scala                 |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../src/main/scala/Serving.scala                |    2 +-
 .../weighted-items/build.sbt                    |    4 +-
 .../weighted-items/project/pio-build.sbt        |    2 +-
 .../src/main/scala/ALSAlgorithm.scala           |   10 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../weighted-items/src/main/scala/Engine.scala  |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../weighted-items/src/main/scala/Serving.scala |    2 +-
 .../custom-prepartor/build.sbt                  |    4 +-
 .../custom-prepartor/project/pio-build.sbt      |    2 +-
 .../src/main/scala/ALSAlgorithm.scala           |    6 +-
 .../src/main/scala/ALSModel.scala               |    6 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../src/main/scala/Engine.scala                 |    4 +-
 .../src/main/scala/Preparator.scala             |    4 +-
 .../src/main/scala/Serving.scala                |    2 +-
 .../custom-query/build.sbt                      |    4 +-
 .../custom-query/data/build.sbt                 |    2 +-
 .../recommendation/ImportDataScript.scala       |    2 +-
 .../src/main/scala/ALSAlgorithm.scala           |    6 +-
 .../custom-query/src/main/scala/ALSModel.scala  |    6 +-
 .../src/main/scala/DataSource.scala             |   10 +-
 .../custom-query/src/main/scala/Engine.scala    |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../custom-query/src/main/scala/Serving.scala   |    2 +-
 .../custom-serving/build.sbt                    |    4 +-
 .../custom-serving/project/pio-build.sbt        |    2 +-
 .../src/main/scala/ALSAlgorithm.scala           |    6 +-
 .../src/main/scala/ALSModel.scala               |    6 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../custom-serving/src/main/scala/Engine.scala  |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../custom-serving/src/main/scala/Serving.scala |    4 +-
 .../filter-by-category/build.sbt                |    6 +-
 .../filter-by-category/project/pio-build.sbt    |    2 +-
 .../src/main/scala/ALSAlgorithm.scala           |    6 +-
 .../src/main/scala/ALSModel.scala               |    6 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../src/main/scala/Engine.scala                 |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../src/main/scala/Serving.scala                |    2 +-
 .../add-and-return-item-properties/build.sbt    |    6 +-
 .../project/pio-build.sbt                       |    2 +-
 .../src/main/scala/ALSAlgorithm.scala           |    6 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../src/main/scala/Engine.scala                 |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../src/main/scala/Serving.scala                |    2 +-
 .../add-rateevent/build.sbt                     |    4 +-
 .../add-rateevent/project/pio-build.sbt         |    2 +-
 .../src/main/scala/ALSAlgorithm.scala           |    6 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../add-rateevent/src/main/scala/Engine.scala   |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../add-rateevent/src/main/scala/Serving.scala  |    2 +-
 .../filterbyyear/build.sbt                      |    6 +-
 .../src/main/scala/ALSAlgorithm.scala           |    6 +-
 .../src/main/scala/DataSource.scala             |   12 +-
 .../filterbyyear/src/main/scala/Engine.scala    |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../filterbyyear/src/main/scala/Serving.scala   |    2 +-
 .../multi/build.sbt                             |    4 +-
 .../multi/project/pio-build.sbt                 |    2 +-
 .../multi/src/main/scala/ALSAlgorithm.scala     |   10 +-
 .../multi/src/main/scala/DataSource.scala       |   12 +-
 .../multi/src/main/scala/Engine.scala           |    4 +-
 .../multi/src/main/scala/LikeAlgorithm.scala    |    2 +-
 .../multi/src/main/scala/Preparator.scala       |    2 +-
 .../multi/src/main/scala/Serving.scala          |    2 +-
 .../no-set-user/build.sbt                       |    4 +-
 .../no-set-user/project/pio-build.sbt           |    2 +-
 .../src/main/scala/ALSAlgorithm.scala           |    6 +-
 .../no-set-user/src/main/scala/DataSource.scala |   12 +-
 .../no-set-user/src/main/scala/Engine.scala     |    4 +-
 .../no-set-user/src/main/scala/Preparator.scala |    2 +-
 .../no-set-user/src/main/scala/Serving.scala    |    2 +-
 .../recommended-user/build.sbt                  |    4 +-
 .../recommended-user/project/pio-build.sbt      |    2 +-
 .../src/main/scala/ALSAlgorithm.scala           |    4 +-
 .../src/main/scala/DataSource.scala             |    4 +-
 .../src/main/scala/Engine.scala                 |    4 +-
 .../src/main/scala/Preparator.scala             |    2 +-
 .../src/main/scala/Serving.scala                |    2 +-
 .../io/prediction/tools/RegisterEngine.scala    |   84 --
 .../scala/io/prediction/tools/RunServer.scala   |  178 ---
 .../scala/io/prediction/tools/RunWorkflow.scala |  212 ---
 .../main/scala/io/prediction/tools/Runner.scala |  211 ---
 .../io/prediction/tools/admin/AdminAPI.scala    |  156 ---
 .../prediction/tools/admin/CommandClient.scala  |  160 ---
 .../scala/io/prediction/tools/admin/README.md   |  161 ---
 .../io/prediction/tools/console/AccessKey.scala |   83 --
 .../scala/io/prediction/tools/console/App.scala |  537 --------
 .../io/prediction/tools/console/Console.scala   | 1277 ------------------
 .../io/prediction/tools/console/Export.scala    |   42 -
 .../io/prediction/tools/console/Import.scala    |   39 -
 .../io/prediction/tools/console/Template.scala  |  429 ------
 .../tools/dashboard/CorsSupport.scala           |   75 -
 .../prediction/tools/dashboard/Dashboard.scala  |  156 ---
 .../prediction/tools/export/EventsToFile.scala  |  104 --
 .../prediction/tools/imprt/FileToEvents.scala   |  103 --
 .../predictionio/tools/RegisterEngine.scala     |   84 ++
 .../apache/predictionio/tools/RunServer.scala   |  178 +++
 .../apache/predictionio/tools/RunWorkflow.scala |  212 +++
 .../org/apache/predictionio/tools/Runner.scala  |  211 +++
 .../predictionio/tools/admin/AdminAPI.scala     |  156 +++
 .../tools/admin/CommandClient.scala             |  160 +++
 .../apache/predictionio/tools/admin/README.md   |  161 +++
 .../predictionio/tools/console/AccessKey.scala  |   83 ++
 .../apache/predictionio/tools/console/App.scala |  537 ++++++++
 .../predictionio/tools/console/Console.scala    | 1277 ++++++++++++++++++
 .../predictionio/tools/console/Export.scala     |   42 +
 .../predictionio/tools/console/Import.scala     |   39 +
 .../predictionio/tools/console/Template.scala   |  429 ++++++
 .../tools/dashboard/CorsSupport.scala           |   75 +
 .../tools/dashboard/Dashboard.scala             |  156 +++
 .../tools/export/EventsToFile.scala             |  104 ++
 .../predictionio/tools/imprt/FileToEvents.scala |  103 ++
 .../tools/console/accesskey.scala.txt           |   20 -
 .../tools/console/adminserver.scala.txt         |    6 -
 .../io/prediction/tools/console/app.scala.txt   |   74 -
 .../io/prediction/tools/console/build.scala.txt |   11 -
 .../tools/console/dashboard.scala.txt           |    6 -
 .../prediction/tools/console/deploy.scala.txt   |   29 -
 .../io/prediction/tools/console/eval.scala.txt  |   10 -
 .../tools/console/eventserver.scala.txt         |    8 -
 .../prediction/tools/console/export.scala.txt   |   14 -
 .../io/prediction/tools/console/imprt.scala.txt |   12 -
 .../io/prediction/tools/console/main.scala.txt  |   52 -
 .../io/prediction/tools/console/run.scala.txt   |   17 -
 .../prediction/tools/console/status.scala.txt   |    3 -
 .../prediction/tools/console/template.scala.txt |   25 -
 .../io/prediction/tools/console/train.scala.txt |   28 -
 .../prediction/tools/console/upgrade.scala.txt  |   15 -
 .../prediction/tools/console/version.scala.txt  |    3 -
 .../prediction/tools/dashboard/index.scala.html |   99 --
 .../itemrank/params/algorithmsJson.scala.txt    |   16 -
 .../itemrank/params/datasourceJson.scala.txt    |   26 -
 .../itemrank/params/preparatorJson.scala.txt    |   10 -
 .../itemrank/params/servingJson.scala.txt       |    1 -
 .../itemrec/params/algorithmsJson.scala.txt     |   15 -
 .../itemrec/params/datasourceJson.scala.txt     |   26 -
 .../itemrec/params/preparatorJson.scala.txt     |   10 -
 .../itemrec/params/servingJson.scala.txt        |    1 -
 .../itemsim/params/algorithmsJson.scala.txt     |   13 -
 .../itemsim/params/datasourceJson.scala.txt     |   26 -
 .../itemsim/params/preparatorJson.scala.txt     |   10 -
 .../itemsim/params/servingJson.scala.txt        |    1 -
 .../tools/templates/scala/buildSbt.scala.txt    |   14 -
 .../tools/templates/scala/engineJson.scala.txt  |    9 -
 .../templates/scala/manifestJson.scala.txt      |    9 -
 .../scala/project/assemblySbt.scala.txt         |    1 -
 .../scala/src/main/scala/engine.scala.txt       |   76 --
 .../tools/console/accesskey.scala.txt           |   20 +
 .../tools/console/adminserver.scala.txt         |    6 +
 .../predictionio/tools/console/app.scala.txt    |   74 +
 .../predictionio/tools/console/build.scala.txt  |   11 +
 .../tools/console/dashboard.scala.txt           |    6 +
 .../predictionio/tools/console/deploy.scala.txt |   29 +
 .../predictionio/tools/console/eval.scala.txt   |   10 +
 .../tools/console/eventserver.scala.txt         |    8 +
 .../predictionio/tools/console/export.scala.txt |   14 +
 .../predictionio/tools/console/imprt.scala.txt  |   12 +
 .../predictionio/tools/console/main.scala.txt   |   52 +
 .../predictionio/tools/console/run.scala.txt    |   17 +
 .../predictionio/tools/console/status.scala.txt |    3 +
 .../tools/console/template.scala.txt            |   25 +
 .../predictionio/tools/console/train.scala.txt  |   28 +
 .../tools/console/upgrade.scala.txt             |   15 +
 .../tools/console/version.scala.txt             |    3 +
 .../tools/dashboard/index.scala.html            |   99 ++
 .../itemrank/params/algorithmsJson.scala.txt    |   16 +
 .../itemrank/params/datasourceJson.scala.txt    |   26 +
 .../itemrank/params/preparatorJson.scala.txt    |   10 +
 .../itemrank/params/servingJson.scala.txt       |    1 +
 .../itemrec/params/algorithmsJson.scala.txt     |   15 +
 .../itemrec/params/datasourceJson.scala.txt     |   26 +
 .../itemrec/params/preparatorJson.scala.txt     |   10 +
 .../itemrec/params/servingJson.scala.txt        |    1 +
 .../itemsim/params/algorithmsJson.scala.txt     |   13 +
 .../itemsim/params/datasourceJson.scala.txt     |   26 +
 .../itemsim/params/preparatorJson.scala.txt     |   10 +
 .../itemsim/params/servingJson.scala.txt        |    1 +
 .../tools/templates/scala/buildSbt.scala.txt    |   14 +
 .../tools/templates/scala/engineJson.scala.txt  |    9 +
 .../templates/scala/manifestJson.scala.txt      |    9 +
 .../scala/project/assemblySbt.scala.txt         |    1 +
 .../scala/src/main/scala/engine.scala.txt       |   76 ++
 .../prediction/tools/admin/AdminAPISpec.scala   |   66 -
 .../predictionio/tools/admin/AdminAPISpec.scala |   66 +
 846 files changed, 29349 insertions(+), 29349 deletions(-)
----------------------------------------------------------------------



[19/34] incubator-predictionio git commit: rename all except examples

Posted by do...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala b/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala
new file mode 100644
index 0000000..e26b754
--- /dev/null
+++ b/core/src/main/scala/org/apache/predictionio/workflow/WorkflowUtils.scala
@@ -0,0 +1,419 @@
+/** Copyright 2015 TappingStone, Inc.
+  *
+  * Licensed under the Apache License, Version 2.0 (the "License");
+  * you may not use this file except in compliance with the License.
+  * You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.predictionio.workflow
+
+import java.io.File
+import java.io.FileNotFoundException
+
+import org.apache.predictionio.controller.EmptyParams
+import org.apache.predictionio.controller.EngineFactory
+import org.apache.predictionio.controller.EngineParamsGenerator
+import org.apache.predictionio.controller.Evaluation
+import org.apache.predictionio.controller.Params
+import org.apache.predictionio.controller.PersistentModelLoader
+import org.apache.predictionio.controller.Utils
+import org.apache.predictionio.core.BuildInfo
+
+import com.google.gson.Gson
+import com.google.gson.JsonSyntaxException
+import grizzled.slf4j.Logging
+import org.apache.predictionio.workflow.JsonExtractorOption.JsonExtractorOption
+import org.apache.log4j.Level
+import org.apache.log4j.LogManager
+import org.apache.spark.SparkContext
+import org.apache.spark.api.java.JavaRDDLike
+import org.apache.spark.rdd.RDD
+import org.json4s.JsonAST.JValue
+import org.json4s.MappingException
+import org.json4s._
+import org.json4s.native.JsonMethods._
+
+import scala.io.Source
+import scala.language.existentials
+import scala.reflect.runtime.universe
+
+/** Collection of reusable workflow related utilities. */
+object WorkflowUtils extends Logging {
+  @transient private lazy val gson = new Gson
+
+  /** Obtains an Engine object in Scala, or instantiate an Engine in Java.
+    *
+    * @param engine Engine factory name.
+    * @param cl A Java ClassLoader to look for engine-related classes.
+    *
+    * @throws ClassNotFoundException
+    *         Thrown when engine factory class does not exist.
+    * @throws NoSuchMethodException
+    *         Thrown when engine factory's apply() method is not implemented.
+    */
+  def getEngine(engine: String, cl: ClassLoader): (EngineLanguage.Value, EngineFactory) = {
+    val runtimeMirror = universe.runtimeMirror(cl)
+    val engineModule = runtimeMirror.staticModule(engine)
+    val engineObject = runtimeMirror.reflectModule(engineModule)
+    try {
+      (
+        EngineLanguage.Scala,
+        engineObject.instance.asInstanceOf[EngineFactory]
+      )
+    } catch {
+      case e @ (_: NoSuchFieldException | _: ClassNotFoundException) => try {
+        (
+          EngineLanguage.Java,
+          Class.forName(engine).newInstance.asInstanceOf[EngineFactory]
+        )
+      }
+    }
+  }
+
+  def getEngineParamsGenerator(epg: String, cl: ClassLoader):
+    (EngineLanguage.Value, EngineParamsGenerator) = {
+    val runtimeMirror = universe.runtimeMirror(cl)
+    val epgModule = runtimeMirror.staticModule(epg)
+    val epgObject = runtimeMirror.reflectModule(epgModule)
+    try {
+      (
+        EngineLanguage.Scala,
+        epgObject.instance.asInstanceOf[EngineParamsGenerator]
+      )
+    } catch {
+      case e @ (_: NoSuchFieldException | _: ClassNotFoundException) => try {
+        (
+          EngineLanguage.Java,
+          Class.forName(epg).newInstance.asInstanceOf[EngineParamsGenerator]
+        )
+      }
+    }
+  }
+
+  def getEvaluation(evaluation: String, cl: ClassLoader): (EngineLanguage.Value, Evaluation) = {
+    val runtimeMirror = universe.runtimeMirror(cl)
+    val evaluationModule = runtimeMirror.staticModule(evaluation)
+    val evaluationObject = runtimeMirror.reflectModule(evaluationModule)
+    try {
+      (
+        EngineLanguage.Scala,
+        evaluationObject.instance.asInstanceOf[Evaluation]
+      )
+    } catch {
+      case e @ (_: NoSuchFieldException | _: ClassNotFoundException) => try {
+        (
+          EngineLanguage.Java,
+          Class.forName(evaluation).newInstance.asInstanceOf[Evaluation]
+        )
+      }
+    }
+  }
+
+  /** Converts a JSON document to an instance of Params.
+    *
+    * @param language Engine's programming language.
+    * @param json JSON document.
+    * @param clazz Class of the component that is going to receive the resulting
+    *              Params instance as a constructor argument.
+    * @param jsonExtractor JSON extractor option.
+    * @param formats JSON4S serializers for deserialization.
+    *
+    * @throws MappingException Thrown when JSON4S fails to perform conversion.
+    * @throws JsonSyntaxException Thrown when GSON fails to perform conversion.
+    */
+  def extractParams(
+      language: EngineLanguage.Value = EngineLanguage.Scala,
+      json: String,
+      clazz: Class[_],
+      jsonExtractor: JsonExtractorOption,
+      formats: Formats = Utils.json4sDefaultFormats): Params = {
+    implicit val f = formats
+    val pClass = clazz.getConstructors.head.getParameterTypes
+    if (pClass.size == 0) {
+      if (json != "") {
+        warn(s"Non-empty parameters supplied to ${clazz.getName}, but its " +
+          "constructor does not accept any arguments. Stubbing with empty " +
+          "parameters.")
+      }
+      EmptyParams()
+    } else {
+      val apClass = pClass.head
+      try {
+        JsonExtractor.extract(jsonExtractor, json, apClass, f).asInstanceOf[Params]
+      } catch {
+        case e@(_: MappingException | _: JsonSyntaxException) =>
+          error(
+            s"Unable to extract parameters for ${apClass.getName} from " +
+              s"JSON string: $json. Aborting workflow.",
+            e)
+          throw e
+      }
+    }
+  }
+
+  def getParamsFromJsonByFieldAndClass(
+      variantJson: JValue,
+      field: String,
+      classMap: Map[String, Class[_]],
+      engineLanguage: EngineLanguage.Value,
+      jsonExtractor: JsonExtractorOption): (String, Params) = {
+    variantJson findField {
+      case JField(f, _) => f == field
+      case _ => false
+    } map { jv =>
+      implicit lazy val formats = Utils.json4sDefaultFormats + new NameParamsSerializer
+      val np: NameParams = try {
+        jv._2.extract[NameParams]
+      } catch {
+        case e: Exception =>
+          error(s"Unable to extract $field name and params $jv")
+          throw e
+      }
+      val extractedParams = np.params.map { p =>
+        try {
+          if (!classMap.contains(np.name)) {
+            error(s"Unable to find $field class with name '${np.name}'" +
+              " defined in Engine.")
+            sys.exit(1)
+          }
+          WorkflowUtils.extractParams(
+            engineLanguage,
+            compact(render(p)),
+            classMap(np.name),
+            jsonExtractor,
+            formats)
+        } catch {
+          case e: Exception =>
+            error(s"Unable to extract $field params $p")
+            throw e
+        }
+      }.getOrElse(EmptyParams())
+
+      (np.name, extractedParams)
+    } getOrElse("", EmptyParams())
+  }
+
+  /** Grab environmental variables that starts with 'PIO_'. */
+  def pioEnvVars: Map[String, String] =
+    sys.env.filter(kv => kv._1.startsWith("PIO_"))
+
+  /** Converts Java (non-Scala) objects to a JSON4S JValue.
+    *
+    * @param params The Java object to be converted.
+    */
+  def javaObjectToJValue(params: AnyRef): JValue = parse(gson.toJson(params))
+
+  private[prediction] def checkUpgrade(
+      component: String = "core",
+      engine: String = ""): Unit = {
+    val runner = new Thread(new UpgradeCheckRunner(component, engine))
+    runner.start()
+  }
+
+  // Extract debug string by recursively traversing the data.
+  def debugString[D](data: D): String = {
+    val s: String = data match {
+      case rdd: RDD[_] => {
+        debugString(rdd.collect())
+      }
+      case javaRdd: JavaRDDLike[_, _] => {
+        debugString(javaRdd.collect())
+      }
+      case array: Array[_] => {
+        "[" + array.map(debugString).mkString(",") + "]"
+      }
+      case d: AnyRef => {
+        d.toString
+      }
+      case null => "null"
+    }
+    s
+  }
+
+  /** Detect third party software configuration files to be submitted as
+    * extras to Apache Spark. This makes sure all executors receive the same
+    * configuration.
+    */
+  def thirdPartyConfFiles: Seq[String] = {
+    val thirdPartyFiles = Map(
+      "PIO_CONF_DIR" -> "log4j.properties",
+      "ES_CONF_DIR" -> "elasticsearch.yml",
+      "HADOOP_CONF_DIR" -> "core-site.xml",
+      "HBASE_CONF_DIR" -> "hbase-site.xml")
+
+    thirdPartyFiles.keys.toSeq.map { k: String =>
+      sys.env.get(k) map { x =>
+        val p = Seq(x, thirdPartyFiles(k)).mkString(File.separator)
+        if (new File(p).exists) Seq(p) else Seq[String]()
+      } getOrElse Seq[String]()
+    }.flatten
+  }
+
+  def thirdPartyClasspaths: Seq[String] = {
+    val thirdPartyPaths = Seq(
+      "PIO_CONF_DIR",
+      "ES_CONF_DIR",
+      "POSTGRES_JDBC_DRIVER",
+      "MYSQL_JDBC_DRIVER",
+      "HADOOP_CONF_DIR",
+      "HBASE_CONF_DIR")
+    thirdPartyPaths.map(p =>
+      sys.env.get(p).map(Seq(_)).getOrElse(Seq[String]())
+    ).flatten
+  }
+
+  def modifyLogging(verbose: Boolean): Unit = {
+    val rootLoggerLevel = if (verbose) Level.TRACE else Level.INFO
+    val chattyLoggerLevel = if (verbose) Level.INFO else Level.WARN
+
+    LogManager.getRootLogger.setLevel(rootLoggerLevel)
+
+    LogManager.getLogger("org.elasticsearch").setLevel(chattyLoggerLevel)
+    LogManager.getLogger("org.apache.hadoop").setLevel(chattyLoggerLevel)
+    LogManager.getLogger("org.apache.spark").setLevel(chattyLoggerLevel)
+    LogManager.getLogger("org.eclipse.jetty").setLevel(chattyLoggerLevel)
+    LogManager.getLogger("akka").setLevel(chattyLoggerLevel)
+  }
+
+  def extractNameParams(jv: JValue): NameParams = {
+    implicit val formats = Utils.json4sDefaultFormats
+    val nameOpt = (jv \ "name").extract[Option[String]]
+    val paramsOpt = (jv \ "params").extract[Option[JValue]]
+
+    if (nameOpt.isEmpty && paramsOpt.isEmpty) {
+      error("Unable to find 'name' or 'params' fields in" +
+        s" ${compact(render(jv))}.\n" +
+        "Since 0.8.4, the 'params' field is required in engine.json" +
+        " in order to specify parameters for DataSource, Preparator or" +
+        " Serving.\n" +
+        "Please go to https://docs.prediction.io/resources/upgrade/" +
+        " for detailed instruction of how to change engine.json.")
+      sys.exit(1)
+    }
+
+    if (nameOpt.isEmpty) {
+      info(s"No 'name' is found. Default empty String will be used.")
+    }
+
+    if (paramsOpt.isEmpty) {
+      info(s"No 'params' is found. Default EmptyParams will be used.")
+    }
+
+    NameParams(
+      name = nameOpt.getOrElse(""),
+      params = paramsOpt
+    )
+  }
+
+  def extractSparkConf(root: JValue): List[(String, String)] = {
+    def flatten(jv: JValue): List[(List[String], String)] = {
+      jv match {
+        case JObject(fields) =>
+          for ((namePrefix, childJV) <- fields;
+               (name, value) <- flatten(childJV))
+          yield (namePrefix :: name) -> value
+        case JArray(_) => {
+          error("Arrays are not allowed in the sparkConf section of engine.js.")
+          sys.exit(1)
+        }
+        case JNothing => List()
+        case _ => List(List() -> jv.values.toString)
+      }
+    }
+
+    flatten(root \ "sparkConf").map(x =>
+      (x._1.reduce((a, b) => s"$a.$b"), x._2))
+  }
+}
+
+case class NameParams(name: String, params: Option[JValue])
+
+class NameParamsSerializer extends CustomSerializer[NameParams](format => ( {
+  case jv: JValue => WorkflowUtils.extractNameParams(jv)
+}, {
+  case x: NameParams =>
+    JObject(JField("name", JString(x.name)) ::
+      JField("params", x.params.getOrElse(JNothing)) :: Nil)
+}
+  ))
+
+/** Collection of reusable workflow related utilities that touch on Apache
+  * Spark. They are separated to avoid compilation problems with certain code.
+  */
+object SparkWorkflowUtils extends Logging {
+  def getPersistentModel[AP <: Params, M](
+      pmm: PersistentModelManifest,
+      runId: String,
+      params: AP,
+      sc: Option[SparkContext],
+      cl: ClassLoader): M = {
+    val runtimeMirror = universe.runtimeMirror(cl)
+    val pmmModule = runtimeMirror.staticModule(pmm.className)
+    val pmmObject = runtimeMirror.reflectModule(pmmModule)
+    try {
+      pmmObject.instance.asInstanceOf[PersistentModelLoader[AP, M]](
+        runId,
+        params,
+        sc)
+    } catch {
+      case e @ (_: NoSuchFieldException | _: ClassNotFoundException) => try {
+        val loadMethod = Class.forName(pmm.className).getMethod(
+          "load",
+          classOf[String],
+          classOf[Params],
+          classOf[SparkContext])
+        loadMethod.invoke(null, runId, params, sc.orNull).asInstanceOf[M]
+      } catch {
+        case e: ClassNotFoundException =>
+          error(s"Model class ${pmm.className} cannot be found.")
+          throw e
+        case e: NoSuchMethodException =>
+          error(
+            "The load(String, Params, SparkContext) method cannot be found.")
+          throw e
+      }
+    }
+  }
+}
+
+class UpgradeCheckRunner(
+    val component: String,
+    val engine: String) extends Runnable with Logging {
+  val version = BuildInfo.version
+  val versionsHost = "https://direct.prediction.io/"
+
+  def run(): Unit = {
+    val url = if (engine == "") {
+      s"$versionsHost$version/$component.json"
+    } else {
+      s"$versionsHost$version/$component/$engine.json"
+    }
+    try {
+      val upgradeData = Source.fromURL(url)
+    } catch {
+      case e: FileNotFoundException =>
+        debug(s"Update metainfo not found. $url")
+      case e: java.net.UnknownHostException =>
+        debug(s"${e.getClass.getName}: {e.getMessage}")
+    }
+    // TODO: Implement upgrade logic
+  }
+}
+
+class WorkflowInterruption() extends Exception
+
+case class StopAfterReadInterruption() extends WorkflowInterruption
+
+case class StopAfterPrepareInterruption() extends WorkflowInterruption
+
+object EngineLanguage extends Enumeration {
+  val Scala, Java = Value
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/twirl/io/prediction/controller/metric_evaluator.scala.html
----------------------------------------------------------------------
diff --git a/core/src/main/twirl/io/prediction/controller/metric_evaluator.scala.html b/core/src/main/twirl/io/prediction/controller/metric_evaluator.scala.html
deleted file mode 100644
index 2e679a5..0000000
--- a/core/src/main/twirl/io/prediction/controller/metric_evaluator.scala.html
+++ /dev/null
@@ -1,95 +0,0 @@
-<html>
-  <head>
-    <script type='text/javascript' src='https://www.google.com/jsapi'></script>
-    <script src="http://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
-    <script>
-      google.load('visualization', '1', {packages:['table', 'corechart',]});
-    </script>
-  </head>
-  <body>
-    <h1>Metric Evaluator</h1>
-    <div id='debug'></div>
-    <div id='table'>
-      <h3>Engine Params Evaluation Results</h3>
-      <div>Click on table to view the engine params</div>
-    </div>
-    <pre id='engineParams'></div>
-    <script type='text/javascript'>
-      google.setOnLoadCallback(load);
-
-      //var url =  'http://localhost:9000/engine_instances/ky01Q-glQheNE_s885JTSg/local_evaluator_results.json';
-      var url = 'evaluator_results.json';
-      var rawData;
-      var metricHeader;
-      var otherMetricHeaders;
-      var engineParamsScores;
-      var table;
-      var dataTable;
-
-      function load() {
-        rawData = JSON.parse(
-            jQuery.ajax({
-              url: url,
-              dataType: 'json',
-              async: false,
-              }).responseText);
-
-        metricHeader = rawData['metricHeader'];
-        otherMetricHeaders = rawData['otherMetricHeaders'];
-        engineParamsScores = rawData['engineParamsScores'];
-
-        drawTable();
-      }
-
-      function tableSelectHandler() {
-        var selection = table.getSelection();
-        if (selection.length > 0) {
-          var row = selection[0].row;
-          var idx = dataTable.getValue(row, 0);
-          var engineParams = engineParamsScores[idx]._1;
-
-          document.getElementById('engineParams').innerHTML = JSON.stringify(
-            engineParams, null, 2);
-        } else {
-          document.getElementById('engineParams').innerHTML = "";
-        }
-      }
-
-      function drawTable() {
-        var tableDiv = document.createElement('div');
-        document.getElementById('table').appendChild(tableDiv);
-
-
-        var dataArray = [];
-
-        var headers = ['Index', 'Best', metricHeader].concat(otherMetricHeaders);
-        dataArray.push(headers);
-
-        for (epIdx = 0; epIdx < engineParamsScores.length; epIdx++) {
-          var epScore = engineParamsScores[epIdx];
-          var isBest = (epIdx == rawData.bestIdx ? "*" : "");
-          dataArray.push([epIdx, isBest, epScore._2.score].concat(epScore._2.otherScores));
-        }
-
-        dataTable = google.visualization.arrayToDataTable(dataArray, false);
-
-        // formatter
-        var numberFormatter = new google.visualization.NumberFormat({fractionDigits: 4});
-
-        for (colIdx = 1; colIdx < dataTable.getNumberOfColumns(); colIdx++) {
-          if (dataTable.getColumnType(colIdx) == "number") {
-            numberFormatter.format(dataTable, colIdx);
-          }
-        }
-
-        table = new google.visualization.Table(tableDiv);
-
-        // select handler
-        google.visualization.events.addListener(table, 'select', tableSelectHandler);
-
-        table.draw(dataTable);
-      }
-
-    </script>
-  </body>
-</html>

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/twirl/io/prediction/workflow/index.scala.html
----------------------------------------------------------------------
diff --git a/core/src/main/twirl/io/prediction/workflow/index.scala.html b/core/src/main/twirl/io/prediction/workflow/index.scala.html
deleted file mode 100644
index 4e0707b..0000000
--- a/core/src/main/twirl/io/prediction/workflow/index.scala.html
+++ /dev/null
@@ -1,92 +0,0 @@
-@import io.prediction.data.storage.EngineInstance
-@import io.prediction.data.storage.EngineManifest
-@import io.prediction.workflow.ServerConfig
-@import org.joda.time.DateTime
-@import org.joda.time.format.DateTimeFormat
-@(args: ServerConfig,
-  manifest: EngineManifest,
-  engineInstance: EngineInstance,
-  algorithms: Seq[String],
-  algorithmsParams: Seq[String],
-  models: Seq[String],
-  dataSourceParams: String,
-  preparatorParams: String,
-  servingParams: String,
-  serverStartTime: DateTime,
-  feedback: Boolean,
-  eventServerIp: String,
-  eventServerPort: Int,
-  requestCount: Int,
-  avgServingSec: Double,
-  lastServingSec: Double
-  )
-<!DOCTYPE html>
-<html lang="en">
-  <head>
-    <title>@{engineInstance.engineFactory} (@{engineInstance.engineVariant}) - PredictionIO Engine Server at @{args.ip}:@{args.port}</title>
-    <link href="/assets/bootstrap-3.2.0-dist/css/bootstrap.min.css" rel="stylesheet">
-    <style type="text/css">
-      td { font-family: Menlo, Monaco, Consolas, "Courier New", monospace; }
-    </style>
-  </head>
-  <body>
-    <div class="container">
-      <div class="page-header">
-        <h1>PredictionIO Engine Server at @{args.ip}:@{args.port}</h1>
-        <p class="lead">@{engineInstance.engineFactory} (@{engineInstance.engineVariant})</p>
-      </div>
-      <h2>Engine Information</h2>
-      <table class="table table-bordered table-striped">
-        <tr><th>Training Start Time</th><td>@{DateTimeFormat.forStyle("FF").print(engineInstance.startTime)}</td></tr>
-        <tr><th>Training End Time</th><td>@{DateTimeFormat.forStyle("FF").print(engineInstance.endTime)}</td></tr>
-        <tr><th>Variant ID</th><td>@{engineInstance.engineVariant}</td></tr>
-        <tr><th>Instance ID</th><td>@{engineInstance.id}</td></tr>
-      </table>
-      <h2>Server Information</h2>
-      <table class="table table-bordered table-striped">
-        <tr><th>Start Time</th><td>@{DateTimeFormat.forStyle("FF").print(serverStartTime)}</td></tr>
-        <tr><th>Request Count</th><td>@{requestCount}</td></tr>
-        <tr><th>Average Serving Time</th><td>@{f"${avgServingSec}%.4f"} seconds</td></tr>
-        <tr><th>Last Serving Time</th><td>@{f"${lastServingSec}%.4f"} seconds</td></tr>
-        <tr><th>Engine Factory Class (Scala/Java)</th><td>@{engineInstance.engineFactory}</td></tr>
-        <tr>
-          <th rowspan="@(manifest.files.size)">Library Files</th>
-          <td>@{manifest.files.head}</td>
-        </tr>
-        @for(f <- manifest.files.drop(1)) {
-        <tr><td>@f</td></tr>
-        }
-      </table>
-      <h2>Data Source</h2>
-      <table class="table table-bordered table-striped">
-        <tr><th>Parameters</th><td>@{dataSourceParams}</td></tr>
-      </table>
-      <h2>Data Preparator</h2>
-      <table class="table table-bordered table-striped">
-        <tr><th>Parameters</th><td>@{preparatorParams}</td></tr>
-      </table>
-      <h2>Algorithms and Models</h2>
-        <table class="table table-bordered table-striped">
-          <tr><th>#</th><th colspan="2">Information</th></tr>
-          @for(a <- algorithms.zip(algorithmsParams).zip(models).zipWithIndex) {
-          <tr>
-            <th rowspan="3">@{a._2 + 1}</th>
-            <th>Class</th><td>@{a._1._1._1}</td>
-          </tr>
-          <tr><th>Parameters</th><td>@{a._1._1._2}</td></tr>
-          <tr><th>Model</th><td>@{a._1._2}</td></tr>
-          }
-        </table>
-      <h2>Serving</h2>
-      <table class="table table-bordered table-striped">
-        <tr><th>Parameters</th><td>@{servingParams}</td></tr>
-      </table>
-      <h2>Feedback Loop Information</h2>
-      <table class="table table-bordered table-striped">
-        <tr><th>Feedback Loop Enabled?</th><td>@{feedback}</td></tr>
-        <tr><th>Event Server IP</th><td>@{eventServerIp}</td></tr>
-        <tr><th>Event Server Port</th><td>@{eventServerPort}</td></tr>
-      </table>
-    </div>
-  </body>
-</html>

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/twirl/org/apache/predictionio/controller/metric_evaluator.scala.html
----------------------------------------------------------------------
diff --git a/core/src/main/twirl/org/apache/predictionio/controller/metric_evaluator.scala.html b/core/src/main/twirl/org/apache/predictionio/controller/metric_evaluator.scala.html
new file mode 100644
index 0000000..2e679a5
--- /dev/null
+++ b/core/src/main/twirl/org/apache/predictionio/controller/metric_evaluator.scala.html
@@ -0,0 +1,95 @@
+<html>
+  <head>
+    <script type='text/javascript' src='https://www.google.com/jsapi'></script>
+    <script src="http://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
+    <script>
+      google.load('visualization', '1', {packages:['table', 'corechart',]});
+    </script>
+  </head>
+  <body>
+    <h1>Metric Evaluator</h1>
+    <div id='debug'></div>
+    <div id='table'>
+      <h3>Engine Params Evaluation Results</h3>
+      <div>Click on table to view the engine params</div>
+    </div>
+    <pre id='engineParams'></div>
+    <script type='text/javascript'>
+      google.setOnLoadCallback(load);
+
+      //var url =  'http://localhost:9000/engine_instances/ky01Q-glQheNE_s885JTSg/local_evaluator_results.json';
+      var url = 'evaluator_results.json';
+      var rawData;
+      var metricHeader;
+      var otherMetricHeaders;
+      var engineParamsScores;
+      var table;
+      var dataTable;
+
+      function load() {
+        rawData = JSON.parse(
+            jQuery.ajax({
+              url: url,
+              dataType: 'json',
+              async: false,
+              }).responseText);
+
+        metricHeader = rawData['metricHeader'];
+        otherMetricHeaders = rawData['otherMetricHeaders'];
+        engineParamsScores = rawData['engineParamsScores'];
+
+        drawTable();
+      }
+
+      function tableSelectHandler() {
+        var selection = table.getSelection();
+        if (selection.length > 0) {
+          var row = selection[0].row;
+          var idx = dataTable.getValue(row, 0);
+          var engineParams = engineParamsScores[idx]._1;
+
+          document.getElementById('engineParams').innerHTML = JSON.stringify(
+            engineParams, null, 2);
+        } else {
+          document.getElementById('engineParams').innerHTML = "";
+        }
+      }
+
+      function drawTable() {
+        var tableDiv = document.createElement('div');
+        document.getElementById('table').appendChild(tableDiv);
+
+
+        var dataArray = [];
+
+        var headers = ['Index', 'Best', metricHeader].concat(otherMetricHeaders);
+        dataArray.push(headers);
+
+        for (epIdx = 0; epIdx < engineParamsScores.length; epIdx++) {
+          var epScore = engineParamsScores[epIdx];
+          var isBest = (epIdx == rawData.bestIdx ? "*" : "");
+          dataArray.push([epIdx, isBest, epScore._2.score].concat(epScore._2.otherScores));
+        }
+
+        dataTable = google.visualization.arrayToDataTable(dataArray, false);
+
+        // formatter
+        var numberFormatter = new google.visualization.NumberFormat({fractionDigits: 4});
+
+        for (colIdx = 1; colIdx < dataTable.getNumberOfColumns(); colIdx++) {
+          if (dataTable.getColumnType(colIdx) == "number") {
+            numberFormatter.format(dataTable, colIdx);
+          }
+        }
+
+        table = new google.visualization.Table(tableDiv);
+
+        // select handler
+        google.visualization.events.addListener(table, 'select', tableSelectHandler);
+
+        table.draw(dataTable);
+      }
+
+    </script>
+  </body>
+</html>

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/main/twirl/org/apache/predictionio/workflow/index.scala.html
----------------------------------------------------------------------
diff --git a/core/src/main/twirl/org/apache/predictionio/workflow/index.scala.html b/core/src/main/twirl/org/apache/predictionio/workflow/index.scala.html
new file mode 100644
index 0000000..4e0707b
--- /dev/null
+++ b/core/src/main/twirl/org/apache/predictionio/workflow/index.scala.html
@@ -0,0 +1,92 @@
+@import io.prediction.data.storage.EngineInstance
+@import io.prediction.data.storage.EngineManifest
+@import io.prediction.workflow.ServerConfig
+@import org.joda.time.DateTime
+@import org.joda.time.format.DateTimeFormat
+@(args: ServerConfig,
+  manifest: EngineManifest,
+  engineInstance: EngineInstance,
+  algorithms: Seq[String],
+  algorithmsParams: Seq[String],
+  models: Seq[String],
+  dataSourceParams: String,
+  preparatorParams: String,
+  servingParams: String,
+  serverStartTime: DateTime,
+  feedback: Boolean,
+  eventServerIp: String,
+  eventServerPort: Int,
+  requestCount: Int,
+  avgServingSec: Double,
+  lastServingSec: Double
+  )
+<!DOCTYPE html>
+<html lang="en">
+  <head>
+    <title>@{engineInstance.engineFactory} (@{engineInstance.engineVariant}) - PredictionIO Engine Server at @{args.ip}:@{args.port}</title>
+    <link href="/assets/bootstrap-3.2.0-dist/css/bootstrap.min.css" rel="stylesheet">
+    <style type="text/css">
+      td { font-family: Menlo, Monaco, Consolas, "Courier New", monospace; }
+    </style>
+  </head>
+  <body>
+    <div class="container">
+      <div class="page-header">
+        <h1>PredictionIO Engine Server at @{args.ip}:@{args.port}</h1>
+        <p class="lead">@{engineInstance.engineFactory} (@{engineInstance.engineVariant})</p>
+      </div>
+      <h2>Engine Information</h2>
+      <table class="table table-bordered table-striped">
+        <tr><th>Training Start Time</th><td>@{DateTimeFormat.forStyle("FF").print(engineInstance.startTime)}</td></tr>
+        <tr><th>Training End Time</th><td>@{DateTimeFormat.forStyle("FF").print(engineInstance.endTime)}</td></tr>
+        <tr><th>Variant ID</th><td>@{engineInstance.engineVariant}</td></tr>
+        <tr><th>Instance ID</th><td>@{engineInstance.id}</td></tr>
+      </table>
+      <h2>Server Information</h2>
+      <table class="table table-bordered table-striped">
+        <tr><th>Start Time</th><td>@{DateTimeFormat.forStyle("FF").print(serverStartTime)}</td></tr>
+        <tr><th>Request Count</th><td>@{requestCount}</td></tr>
+        <tr><th>Average Serving Time</th><td>@{f"${avgServingSec}%.4f"} seconds</td></tr>
+        <tr><th>Last Serving Time</th><td>@{f"${lastServingSec}%.4f"} seconds</td></tr>
+        <tr><th>Engine Factory Class (Scala/Java)</th><td>@{engineInstance.engineFactory}</td></tr>
+        <tr>
+          <th rowspan="@(manifest.files.size)">Library Files</th>
+          <td>@{manifest.files.head}</td>
+        </tr>
+        @for(f <- manifest.files.drop(1)) {
+        <tr><td>@f</td></tr>
+        }
+      </table>
+      <h2>Data Source</h2>
+      <table class="table table-bordered table-striped">
+        <tr><th>Parameters</th><td>@{dataSourceParams}</td></tr>
+      </table>
+      <h2>Data Preparator</h2>
+      <table class="table table-bordered table-striped">
+        <tr><th>Parameters</th><td>@{preparatorParams}</td></tr>
+      </table>
+      <h2>Algorithms and Models</h2>
+        <table class="table table-bordered table-striped">
+          <tr><th>#</th><th colspan="2">Information</th></tr>
+          @for(a <- algorithms.zip(algorithmsParams).zip(models).zipWithIndex) {
+          <tr>
+            <th rowspan="3">@{a._2 + 1}</th>
+            <th>Class</th><td>@{a._1._1._1}</td>
+          </tr>
+          <tr><th>Parameters</th><td>@{a._1._1._2}</td></tr>
+          <tr><th>Model</th><td>@{a._1._2}</td></tr>
+          }
+        </table>
+      <h2>Serving</h2>
+      <table class="table table-bordered table-striped">
+        <tr><th>Parameters</th><td>@{servingParams}</td></tr>
+      </table>
+      <h2>Feedback Loop Information</h2>
+      <table class="table table-bordered table-striped">
+        <tr><th>Feedback Loop Enabled?</th><td>@{feedback}</td></tr>
+        <tr><th>Event Server IP</th><td>@{eventServerIp}</td></tr>
+        <tr><th>Event Server Port</th><td>@{eventServerPort}</td></tr>
+      </table>
+    </div>
+  </body>
+</html>

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/java/io/prediction/workflow/JavaParams.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/io/prediction/workflow/JavaParams.java b/core/src/test/java/io/prediction/workflow/JavaParams.java
deleted file mode 100644
index 65108b5..0000000
--- a/core/src/test/java/io/prediction/workflow/JavaParams.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.prediction.workflow;
-
-import io.prediction.controller.Params;
-
-public class JavaParams implements Params {
-    private final String p;
-
-    public JavaParams(String p) {
-        this.p = p;
-    }
-
-    public String getP() {
-        return p;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/java/io/prediction/workflow/JavaQuery.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/io/prediction/workflow/JavaQuery.java b/core/src/test/java/io/prediction/workflow/JavaQuery.java
deleted file mode 100644
index 1630a2d..0000000
--- a/core/src/test/java/io/prediction/workflow/JavaQuery.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.prediction.workflow;
-
-import java.io.Serializable;
-
-public class JavaQuery implements Serializable{
-    private final String q;
-
-    public JavaQuery(String q) {
-        this.q = q;
-    }
-
-    public String getQ() {
-        return q;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
-
-        JavaQuery javaQuery = (JavaQuery) o;
-
-        return !(q != null ? !q.equals(javaQuery.q) : javaQuery.q != null);
-
-    }
-
-    @Override
-    public int hashCode() {
-        return q != null ? q.hashCode() : 0;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/java/io/prediction/workflow/JavaQueryTypeAdapterFactory.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/io/prediction/workflow/JavaQueryTypeAdapterFactory.java b/core/src/test/java/io/prediction/workflow/JavaQueryTypeAdapterFactory.java
deleted file mode 100644
index 409859d..0000000
--- a/core/src/test/java/io/prediction/workflow/JavaQueryTypeAdapterFactory.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package io.prediction.workflow;
-
-import com.google.gson.Gson;
-import com.google.gson.TypeAdapter;
-import com.google.gson.TypeAdapterFactory;
-import com.google.gson.reflect.TypeToken;
-import com.google.gson.stream.JsonReader;
-import com.google.gson.stream.JsonToken;
-import com.google.gson.stream.JsonWriter;
-
-import java.io.IOException;
-
-public class JavaQueryTypeAdapterFactory implements TypeAdapterFactory {
-    @Override
-    public <T> TypeAdapter<T> create(Gson gson, TypeToken<T> type) {
-        if (type.getRawType().equals(JavaQuery.class)) {
-            return (TypeAdapter<T>) new TypeAdapter<JavaQuery>() {
-                public void write(JsonWriter out, JavaQuery value) throws IOException {
-                    if (value == null) {
-                        out.nullValue();
-                    } else {
-                        out.beginObject();
-                        out.name("q").value(value.getQ().toUpperCase());
-                        out.endObject();
-                    }
-                }
-
-                public JavaQuery read(JsonReader reader) throws IOException {
-                    if (reader.peek() == JsonToken.NULL) {
-                        reader.nextNull();
-                        return null;
-                    } else {
-                        reader.beginObject();
-                        reader.nextName();
-                        String q = reader.nextString();
-                        reader.endObject();
-                        return new JavaQuery(q.toUpperCase());
-                    }
-                }
-            };
-        } else {
-            return null;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/java/org/apache/predictionio/workflow/JavaParams.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/predictionio/workflow/JavaParams.java b/core/src/test/java/org/apache/predictionio/workflow/JavaParams.java
new file mode 100644
index 0000000..982ecbf
--- /dev/null
+++ b/core/src/test/java/org/apache/predictionio/workflow/JavaParams.java
@@ -0,0 +1,30 @@
+/** Copyright 2015 TappingStone, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.workflow;
+
+import org.apache.predictionio.controller.Params;
+
+public class JavaParams implements Params {
+    private final String p;
+
+    public JavaParams(String p) {
+        this.p = p;
+    }
+
+    public String getP() {
+        return p;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/java/org/apache/predictionio/workflow/JavaQuery.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/predictionio/workflow/JavaQuery.java b/core/src/test/java/org/apache/predictionio/workflow/JavaQuery.java
new file mode 100644
index 0000000..f4a6359
--- /dev/null
+++ b/core/src/test/java/org/apache/predictionio/workflow/JavaQuery.java
@@ -0,0 +1,46 @@
+/** Copyright 2015 TappingStone, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.workflow;
+
+import java.io.Serializable;
+
+public class JavaQuery implements Serializable{
+    private final String q;
+
+    public JavaQuery(String q) {
+        this.q = q;
+    }
+
+    public String getQ() {
+        return q;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+
+        JavaQuery javaQuery = (JavaQuery) o;
+
+        return !(q != null ? !q.equals(javaQuery.q) : javaQuery.q != null);
+
+    }
+
+    @Override
+    public int hashCode() {
+        return q != null ? q.hashCode() : 0;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/java/org/apache/predictionio/workflow/JavaQueryTypeAdapterFactory.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/predictionio/workflow/JavaQueryTypeAdapterFactory.java b/core/src/test/java/org/apache/predictionio/workflow/JavaQueryTypeAdapterFactory.java
new file mode 100644
index 0000000..46854d6
--- /dev/null
+++ b/core/src/test/java/org/apache/predictionio/workflow/JavaQueryTypeAdapterFactory.java
@@ -0,0 +1,60 @@
+/** Copyright 2015 TappingStone, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.predictionio.workflow;
+
+import com.google.gson.Gson;
+import com.google.gson.TypeAdapter;
+import com.google.gson.TypeAdapterFactory;
+import com.google.gson.reflect.TypeToken;
+import com.google.gson.stream.JsonReader;
+import com.google.gson.stream.JsonToken;
+import com.google.gson.stream.JsonWriter;
+
+import java.io.IOException;
+
+public class JavaQueryTypeAdapterFactory implements TypeAdapterFactory {
+    @Override
+    public <T> TypeAdapter<T> create(Gson gson, TypeToken<T> type) {
+        if (type.getRawType().equals(JavaQuery.class)) {
+            return (TypeAdapter<T>) new TypeAdapter<JavaQuery>() {
+                public void write(JsonWriter out, JavaQuery value) throws IOException {
+                    if (value == null) {
+                        out.nullValue();
+                    } else {
+                        out.beginObject();
+                        out.name("q").value(value.getQ().toUpperCase());
+                        out.endObject();
+                    }
+                }
+
+                public JavaQuery read(JsonReader reader) throws IOException {
+                    if (reader.peek() == JsonToken.NULL) {
+                        reader.nextNull();
+                        return null;
+                    } else {
+                        reader.beginObject();
+                        reader.nextName();
+                        String q = reader.nextString();
+                        reader.endObject();
+                        return new JavaQuery(q.toUpperCase());
+                    }
+                }
+            };
+        } else {
+            return null;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/io/prediction/controller/EngineTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/io/prediction/controller/EngineTest.scala b/core/src/test/scala/io/prediction/controller/EngineTest.scala
deleted file mode 100644
index cc84249..0000000
--- a/core/src/test/scala/io/prediction/controller/EngineTest.scala
+++ /dev/null
@@ -1,615 +0,0 @@
-package io.prediction.controller
-
-import io.prediction.workflow.PersistentModelManifest
-import io.prediction.workflow.SharedSparkContext
-import io.prediction.workflow.StopAfterPrepareInterruption
-import io.prediction.workflow.StopAfterReadInterruption
-
-import grizzled.slf4j.Logger
-import io.prediction.workflow.WorkflowParams
-import org.apache.spark.rdd.RDD
-import org.scalatest.Inspectors._
-import org.scalatest.Matchers._
-import org.scalatest.FunSuite
-import org.scalatest.Inside
-
-import scala.util.Random
-
-class EngineSuite
-extends FunSuite with Inside with SharedSparkContext {
-  import io.prediction.controller.Engine0._
-  @transient lazy val logger = Logger[this.type] 
-
-  test("Engine.train") {
-    val engine = new Engine(
-      classOf[PDataSource2],
-      classOf[PPreparator1],
-      Map("" -> classOf[PAlgo2]),
-      classOf[LServing1])
-
-    val engineParams = EngineParams(
-      dataSourceParams = PDataSource2.Params(0),
-      preparatorParams = PPreparator1.Params(1),
-      algorithmParamsList = Seq(("", PAlgo2.Params(2))),
-      servingParams = LServing1.Params(3))
-
-    val models = engine.train(
-      sc, 
-      engineParams, 
-      engineInstanceId = "",
-      params = WorkflowParams())
-    
-    val pd = ProcessedData(1, TrainingData(0))
-
-    // PAlgo2.Model doesn't have IPersistentModel trait implemented. Hence the
-    // model extract after train is Unit.
-    models should contain theSameElementsAs Seq(Unit)
-  }
-
-  test("Engine.train persisting PAlgo.Model") {
-    val engine = new Engine(
-      classOf[PDataSource2],
-      classOf[PPreparator1],
-      Map(
-        "PAlgo2" -> classOf[PAlgo2],
-        "PAlgo3" -> classOf[PAlgo3]
-      ),
-      classOf[LServing1])
-
-    val engineParams = EngineParams(
-      dataSourceParams = PDataSource2.Params(0),
-      preparatorParams = PPreparator1.Params(1),
-      algorithmParamsList = Seq(
-        ("PAlgo2", PAlgo2.Params(2)),
-        ("PAlgo3", PAlgo3.Params(21)),
-        ("PAlgo3", PAlgo3.Params(22))
-      ),
-      servingParams = LServing1.Params(3))
-
-    val pd = ProcessedData(1, TrainingData(0))
-    val model21 = PAlgo3.Model(21, pd)
-    val model22 = PAlgo3.Model(22, pd)
-
-    val models = engine.train(
-      sc, 
-      engineParams, 
-      engineInstanceId = "",
-      params = WorkflowParams())
-
-    val pModel21 = PersistentModelManifest(model21.getClass.getName)
-    val pModel22 = PersistentModelManifest(model22.getClass.getName)
-    
-    models should contain theSameElementsAs Seq(Unit, pModel21, pModel22)
-  }
-
-  test("Engine.train persisting LAlgo.Model") {
-    val engine = Engine(
-      classOf[LDataSource1],
-      classOf[LPreparator1],
-      Map(
-        "LAlgo1" -> classOf[LAlgo1],
-        "LAlgo2" -> classOf[LAlgo2],
-        "LAlgo3" -> classOf[LAlgo3]
-      ),
-      classOf[LServing1])
-
-    val engineParams = EngineParams(
-      dataSourceParams = LDataSource1.Params(0),
-      preparatorParams = LPreparator1.Params(1),
-      algorithmParamsList = Seq(
-        ("LAlgo2", LAlgo2.Params(20)),
-        ("LAlgo2", LAlgo2.Params(21)),
-        ("LAlgo3", LAlgo3.Params(22))),
-      servingParams = LServing1.Params(3))
-
-    val pd = ProcessedData(1, TrainingData(0))
-    val model20 = LAlgo2.Model(20, pd)
-    val model21 = LAlgo2.Model(21, pd)
-    val model22 = LAlgo3.Model(22, pd)
-
-    //val models = engine.train(sc, engineParams, WorkflowParams())
-    val models = engine.train(
-      sc, 
-      engineParams, 
-      engineInstanceId = "",
-      params = WorkflowParams())
-
-    val pModel20 = PersistentModelManifest(model20.getClass.getName)
-    val pModel21 = PersistentModelManifest(model21.getClass.getName)
-    
-    models should contain theSameElementsAs Seq(pModel20, pModel21, model22)
-  }
-  
-  test("Engine.train persisting P&NAlgo.Model") {
-    val engine = new Engine(
-      classOf[PDataSource2],
-      classOf[PPreparator1],
-      Map(
-        "PAlgo2" -> classOf[PAlgo2],
-        "PAlgo3" -> classOf[PAlgo3],
-        "NAlgo2" -> classOf[NAlgo2],
-        "NAlgo3" -> classOf[NAlgo3]
-      ),
-      classOf[LServing1])
-
-    val engineParams = EngineParams(
-      dataSourceParams = PDataSource2.Params(0),
-      preparatorParams = PPreparator1.Params(1),
-      algorithmParamsList = Seq(
-        ("PAlgo2", PAlgo2.Params(20)),
-        ("PAlgo3", PAlgo3.Params(21)),
-        ("PAlgo3", PAlgo3.Params(22)),
-        ("NAlgo2", NAlgo2.Params(23)),
-        ("NAlgo3", NAlgo3.Params(24)),
-        ("NAlgo3", NAlgo3.Params(25))
-      ),
-      servingParams = LServing1.Params(3))
-
-    val pd = ProcessedData(1, TrainingData(0))
-    val model21 = PAlgo3.Model(21, pd)
-    val model22 = PAlgo3.Model(22, pd)
-    val model23 = NAlgo2.Model(23, pd)
-    val model24 = NAlgo3.Model(24, pd)
-    val model25 = NAlgo3.Model(25, pd)
-
-    //val models = engine.train(sc, engineParams, WorkflowParams())
-    val models = engine.train(
-      sc, 
-      engineParams, 
-      engineInstanceId = "",
-      params = WorkflowParams())
-
-    val pModel21 = PersistentModelManifest(model21.getClass.getName)
-    val pModel22 = PersistentModelManifest(model22.getClass.getName)
-    val pModel23 = PersistentModelManifest(model23.getClass.getName)
-    
-    models should contain theSameElementsAs Seq(
-      Unit, pModel21, pModel22, pModel23, model24, model25)
-  }
-
-  test("Engine.eval") {
-    val engine = new Engine(
-      classOf[PDataSource2],
-      classOf[PPreparator1],
-      Map("" -> classOf[PAlgo2]),
-      classOf[LServing1])
-
-    val qn = 10
-    val en = 3
-
-    val engineParams = EngineParams(
-      dataSourceParams = PDataSource2.Params(id = 0, en = en, qn = qn),
-      preparatorParams = PPreparator1.Params(1),
-      algorithmParamsList = Seq(("", PAlgo2.Params(2))),
-      servingParams = LServing1.Params(3))
-
-    val algoCount = engineParams.algorithmParamsList.size
-    val pd = ProcessedData(1, TrainingData(0))
-    val model0 = PAlgo2.Model(2, pd)
-
-    val evalDataSet = engine.eval(sc, engineParams, WorkflowParams())
-
-    evalDataSet should have size en
-
-    forAll(evalDataSet.zipWithIndex) { case (evalData, ex) => {
-      val (evalInfo, qpaRDD) = evalData
-      evalInfo shouldBe EvalInfo(0)
-
-      val qpaSeq: Seq[(Query, Prediction, Actual)] = qpaRDD.collect
-
-      qpaSeq should have size qn
-
-      forAll (qpaSeq) { case (q, p, a) => 
-        val Query(qId, qEx, qQx, _) = q
-        val Actual(aId, aEx, aQx) = a
-        qId shouldBe aId
-        qEx shouldBe ex
-        aEx shouldBe ex
-        qQx shouldBe aQx
-
-        inside (p) { case Prediction(pId, pQ, pModels, pPs) => {
-          pId shouldBe 3
-          pQ shouldBe q
-          pModels shouldBe None
-          pPs should have size algoCount
-          pPs shouldBe Seq(
-            Prediction(id = 2, q = q, models = Some(model0)))
-        }}
-      }
-    }}
-  }
-
-  test("Engine.prepareDeploy PAlgo") {
-    val engine = new Engine(
-      classOf[PDataSource2],
-      classOf[PPreparator1],
-      Map(
-        "PAlgo2" -> classOf[PAlgo2],
-        "PAlgo3" -> classOf[PAlgo3],
-        "NAlgo2" -> classOf[NAlgo2],
-        "NAlgo3" -> classOf[NAlgo3]
-      ),
-      classOf[LServing1])
-
-    val engineParams = EngineParams(
-      dataSourceParams = PDataSource2.Params(0),
-      preparatorParams = PPreparator1.Params(1),
-      algorithmParamsList = Seq(
-        ("PAlgo2", PAlgo2.Params(20)),
-        ("PAlgo3", PAlgo3.Params(21)),
-        ("PAlgo3", PAlgo3.Params(22)),
-        ("NAlgo2", NAlgo2.Params(23)),
-        ("NAlgo3", NAlgo3.Params(24)),
-        ("NAlgo3", NAlgo3.Params(25))
-      ),
-      servingParams = LServing1.Params(3))
-
-    val pd = ProcessedData(1, TrainingData(0))
-    val model20 = PAlgo2.Model(20, pd)
-    val model21 = PAlgo3.Model(21, pd)
-    val model22 = PAlgo3.Model(22, pd)
-    val model23 = NAlgo2.Model(23, pd)
-    val model24 = NAlgo3.Model(24, pd)
-    val model25 = NAlgo3.Model(25, pd)
-
-    val rand = new Random()
-
-    val fakeEngineInstanceId = s"FakeInstanceId-${rand.nextLong()}"
-
-    val persistedModels = engine.train(
-      sc,
-      engineParams,
-      engineInstanceId = fakeEngineInstanceId,
-      params = WorkflowParams()
-    )
-
-    val deployableModels = engine.prepareDeploy(
-      sc,
-      engineParams,
-      fakeEngineInstanceId,
-      persistedModels,
-      params = WorkflowParams()
-    )
-
-    deployableModels should contain theSameElementsAs Seq(
-      model20, model21, model22, model23, model24, model25)
-  }
-}
-
-class EngineTrainSuite extends FunSuite with SharedSparkContext {
-  import io.prediction.controller.Engine0._
-  val defaultWorkflowParams: WorkflowParams = WorkflowParams()
-
-  test("Parallel DS/P/Algos") {
-    val models = Engine.train(
-      sc,
-      new PDataSource0(0),
-      new PPreparator0(1),
-      Seq(
-        new PAlgo0(2),
-        new PAlgo1(3),
-        new PAlgo0(4)),
-      defaultWorkflowParams
-    )
-
-    val pd = ProcessedData(1, TrainingData(0))
-
-    models should contain theSameElementsAs Seq(
-      PAlgo0.Model(2, pd), PAlgo1.Model(3, pd), PAlgo0.Model(4, pd))
-  }
-
-  test("Local DS/P/Algos") {
-    val models = Engine.train(
-      sc,
-      new LDataSource0(0),
-      new LPreparator0(1),
-      Seq(
-        new LAlgo0(2),
-        new LAlgo1(3),
-        new LAlgo0(4)),
-      defaultWorkflowParams
-    )
-    
-    val pd = ProcessedData(1, TrainingData(0))
-
-    val expectedResults = Seq(
-      LAlgo0.Model(2, pd),
-      LAlgo1.Model(3, pd),
-      LAlgo0.Model(4, pd))
-
-    forAll(models.zip(expectedResults)) { case (model, expected) => 
-      model shouldBe a [RDD[_]]
-      val localModel = model.asInstanceOf[RDD[_]].collect
-      localModel should contain theSameElementsAs Seq(expected)
-    }
-  }
-
-  test("P2L DS/P/Algos") {
-    val models = Engine.train(
-      sc,
-      new PDataSource0(0),
-      new PPreparator0(1),
-      Seq(
-        new NAlgo0(2),
-        new NAlgo1(3),
-        new NAlgo0(4)),
-      defaultWorkflowParams
-    )
-
-    val pd = ProcessedData(1, TrainingData(0))
-    
-    models should contain theSameElementsAs Seq(
-      NAlgo0.Model(2, pd), NAlgo1.Model(3, pd), NAlgo0.Model(4, pd))
-  }
-  
-  test("Parallel DS/P/Algos Stop-After-Read") {
-    val workflowParams = defaultWorkflowParams.copy(
-      stopAfterRead = true)
-
-    an [StopAfterReadInterruption] should be thrownBy Engine.train(
-      sc,
-      new PDataSource0(0),
-      new PPreparator0(1),
-      Seq(
-        new PAlgo0(2),
-        new PAlgo1(3),
-        new PAlgo0(4)),
-      workflowParams
-    )
-  }
-  
-  test("Parallel DS/P/Algos Stop-After-Prepare") {
-    val workflowParams = defaultWorkflowParams.copy(
-      stopAfterPrepare = true)
-
-    an [StopAfterPrepareInterruption] should be thrownBy Engine.train(
-      sc,
-      new PDataSource0(0),
-      new PPreparator0(1),
-      Seq(
-        new PAlgo0(2),
-        new PAlgo1(3),
-        new PAlgo0(4)),
-      workflowParams
-    )
-  }
-  
-  test("Parallel DS/P/Algos Dirty TrainingData") {
-    val workflowParams = defaultWorkflowParams.copy(
-      skipSanityCheck = false)
-
-    an [AssertionError] should be thrownBy Engine.train(
-      sc,
-      new PDataSource3(0, error = true),
-      new PPreparator0(1),
-      Seq(
-        new PAlgo0(2),
-        new PAlgo1(3),
-        new PAlgo0(4)),
-      workflowParams
-    )
-  }
-  
-  test("Parallel DS/P/Algos Dirty TrainingData But Skip Check") {
-    val workflowParams = defaultWorkflowParams.copy(
-      skipSanityCheck = true)
-
-    val models = Engine.train(
-      sc,
-      new PDataSource3(0, error = true),
-      new PPreparator0(1),
-      Seq(
-        new PAlgo0(2),
-        new PAlgo1(3),
-        new PAlgo0(4)),
-      workflowParams
-    )
-    
-  val pd = ProcessedData(1, TrainingData(0, error = true))
-
-    models should contain theSameElementsAs Seq(
-      PAlgo0.Model(2, pd), PAlgo1.Model(3, pd), PAlgo0.Model(4, pd))
-  }
-}
-
-
-class EngineEvalSuite
-extends FunSuite with Inside with SharedSparkContext {
-  import io.prediction.controller.Engine0._
-
-  @transient lazy val logger = Logger[this.type] 
-  
-  test("Simple Parallel DS/P/A/S") {
-    val en = 2
-    val qn = 5
-
-    val evalDataSet: Seq[(EvalInfo, RDD[(Query, Prediction, Actual)])] = 
-    Engine.eval(
-      sc,
-      new PDataSource1(id = 1, en = en, qn = qn),
-      new PPreparator0(id = 2),
-      Seq(new PAlgo0(id = 3)),
-      new LServing0(id = 10))
-
-    val pd = ProcessedData(2, TrainingData(1))
-    val model0 = PAlgo0.Model(3, pd)
-
-    forAll(evalDataSet.zipWithIndex) { case (evalData, ex) => {
-      val (evalInfo, qpaRDD) = evalData
-      evalInfo shouldBe EvalInfo(1)
-
-      val qpaSeq: Seq[(Query, Prediction, Actual)] = qpaRDD.collect
-      forAll (qpaSeq) { case (q, p, a) => 
-        val Query(qId, qEx, qQx, _) = q
-        val Actual(aId, aEx, aQx) = a
-        qId shouldBe aId
-        qEx shouldBe ex
-        aEx shouldBe ex
-        qQx shouldBe aQx
-
-        inside (p) { case Prediction(pId, pQ, pModels, pPs) => {
-          pId shouldBe 10
-          pQ shouldBe q
-          pModels shouldBe None
-          pPs should have size 1
-          pPs shouldBe Seq(
-            Prediction(id = 3, q = q, models = Some(model0)))
-        }}
-      }
-
-    }}
-
-  }
-
-  test("Parallel DS/P/A/S") {
-    val en = 2
-    val qn = 5
-
-    val evalDataSet: Seq[(EvalInfo, RDD[(Query, Prediction, Actual)])] = 
-    Engine.eval(
-      sc,
-      new PDataSource1(id = 1, en = en, qn = qn),
-      new PPreparator0(id = 2),
-      Seq(
-        new PAlgo0(id = 3), 
-        new PAlgo1(id = 4),
-        new NAlgo1(id = 5)),
-      new LServing0(id = 10))
-
-    val pd = ProcessedData(2, TrainingData(1))
-    val model0 = PAlgo0.Model(3, pd)
-    val model1 = PAlgo1.Model(4, pd)
-    val model2 = NAlgo1.Model(5, pd)
-
-    forAll(evalDataSet.zipWithIndex) { case (evalData, ex) => {
-      val (evalInfo, qpaRDD) = evalData
-      evalInfo shouldBe EvalInfo(1)
-
-      val qpaSeq: Seq[(Query, Prediction, Actual)] = qpaRDD.collect
-      forAll (qpaSeq) { case (q, p, a) => 
-        val Query(qId, qEx, qQx, _) = q
-        val Actual(aId, aEx, aQx) = a
-        qId shouldBe aId
-        qEx shouldBe ex
-        aEx shouldBe ex
-        qQx shouldBe aQx
-
-        inside (p) { case Prediction(pId, pQ, pModels, pPs) => {
-          pId shouldBe 10
-          pQ shouldBe q
-          pModels shouldBe None
-          pPs should have size 3
-          pPs shouldBe Seq(
-            Prediction(id = 3, q = q, models = Some(model0)),
-            Prediction(id = 4, q = q, models = Some(model1)),
-            Prediction(id = 5, q = q, models = Some(model2))
-          )
-        }}
-      }
-    }}
-  }
-  
-  test("Parallel DS/P/A/S with Supplemented Query") {
-    val en = 2
-    val qn = 5
-
-    val evalDataSet: Seq[(EvalInfo, RDD[(Query, Prediction, Actual)])] = 
-    Engine.eval(
-      sc,
-      new PDataSource1(id = 1, en = en, qn = qn),
-      new PPreparator0(id = 2),
-      Seq(
-        new PAlgo0(id = 3), 
-        new PAlgo1(id = 4),
-        new NAlgo1(id = 5)),
-      new LServing2(id = 10))
-
-    val pd = ProcessedData(2, TrainingData(1))
-    val model0 = PAlgo0.Model(3, pd)
-    val model1 = PAlgo1.Model(4, pd)
-    val model2 = NAlgo1.Model(5, pd)
-
-    forAll(evalDataSet.zipWithIndex) { case (evalData, ex) => {
-      val (evalInfo, qpaRDD) = evalData
-      evalInfo shouldBe EvalInfo(1)
-
-      val qpaSeq: Seq[(Query, Prediction, Actual)] = qpaRDD.collect
-      forAll (qpaSeq) { case (q, p, a) => 
-        val Query(qId, qEx, qQx, qSupp) = q
-        val Actual(aId, aEx, aQx) = a
-        qId shouldBe aId
-        qEx shouldBe ex
-        aEx shouldBe ex
-        qQx shouldBe aQx
-        qSupp shouldBe false
-
-        inside (p) { case Prediction(pId, pQ, pModels, pPs) => {
-          pId shouldBe 10
-          pQ shouldBe q
-          pModels shouldBe None
-          pPs should have size 3
-          // queries inside prediction should have supp set to true, since it
-          // represents what the algorithms see.
-          val qSupp = q.copy(supp = true)
-          pPs shouldBe Seq(
-            Prediction(id = 3, q = qSupp, models = Some(model0)),
-            Prediction(id = 4, q = qSupp, models = Some(model1)),
-            Prediction(id = 5, q = qSupp, models = Some(model2))
-          )
-        }}
-      }
-    }}
-  }
-  
-  test("Local DS/P/A/S") {
-    val en = 2
-    val qn = 5
-
-    val evalDataSet: Seq[(EvalInfo, RDD[(Query, Prediction, Actual)])] = 
-    Engine.eval(
-      sc,
-      new LDataSource0(id = 1, en = en, qn = qn),
-      new LPreparator0(id = 2),
-      Seq(
-        new LAlgo0(id = 3), 
-        new LAlgo1(id = 4),
-        new LAlgo1(id = 5)),
-      new LServing0(id = 10))
-
-    val pd = ProcessedData(2, TrainingData(1))
-    val model0 = LAlgo0.Model(3, pd)
-    val model1 = LAlgo1.Model(4, pd)
-    val model2 = LAlgo1.Model(5, pd)
-
-    forAll(evalDataSet.zipWithIndex) { case (evalData, ex) => {
-      val (evalInfo, qpaRDD) = evalData
-      evalInfo shouldBe EvalInfo(1)
-
-      val qpaSeq: Seq[(Query, Prediction, Actual)] = qpaRDD.collect
-      forAll (qpaSeq) { case (q, p, a) => 
-        val Query(qId, qEx, qQx, _) = q
-        val Actual(aId, aEx, aQx) = a
-        qId shouldBe aId
-        qEx shouldBe ex
-        aEx shouldBe ex
-        qQx shouldBe aQx
-
-        inside (p) { case Prediction(pId, pQ, pModels, pPs) => {
-          pId shouldBe 10
-          pQ shouldBe q
-          pModels shouldBe None
-          pPs should have size 3
-          pPs shouldBe Seq(
-            Prediction(id = 3, q = q, models = Some(model0)),
-            Prediction(id = 4, q = q, models = Some(model1)),
-            Prediction(id = 5, q = q, models = Some(model2))
-          )
-        }}
-      }
-
-    }}
-
-  }
-}
-
-

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/io/prediction/controller/EvaluationTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/io/prediction/controller/EvaluationTest.scala b/core/src/test/scala/io/prediction/controller/EvaluationTest.scala
deleted file mode 100644
index 5dc4c86..0000000
--- a/core/src/test/scala/io/prediction/controller/EvaluationTest.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package io.prediction.controller
-
-import io.prediction.workflow.SharedSparkContext
-
-import org.scalatest.FunSuite
-import org.scalatest.Inside
-import org.scalatest.Matchers._
-
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-
-object EvaluationSuite {
-  import io.prediction.controller.TestEvaluator._
-
-  class Metric0 extends Metric[EvalInfo, Query, Prediction, Actual, Int] {
-    def calculate(
-      sc: SparkContext,
-      evalDataSet: Seq[(EvalInfo, RDD[(Query, Prediction, Actual)])]): Int = 1
-  }
-
-  object Evaluation0 extends Evaluation {
-    engineMetric = (new FakeEngine(1, 1, 1), new Metric0())
-  }
-}
-
-
-class EvaluationSuite
-extends FunSuite with Inside with SharedSparkContext {
-  import io.prediction.controller.EvaluationSuite._
-
-  test("Evaluation makes MetricEvaluator") {
-    // MetricEvaluator is typed [EvalInfo, Query, Prediction, Actual, Int],
-    // however this information is erased on JVM. scalatest doc recommends to
-    // use wildcards.
-    Evaluation0.evaluator shouldBe a [MetricEvaluator[_, _, _, _, _]]
-  }
-
-  test("Load from class path") {
-    val r = io.prediction.workflow.WorkflowUtils.getEvaluation(
-      "io.prediction.controller.EvaluationSuite.Evaluation0",
-      getClass.getClassLoader)
-
-    r._2 shouldBe EvaluationSuite.Evaluation0
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/io/prediction/controller/EvaluatorTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/io/prediction/controller/EvaluatorTest.scala b/core/src/test/scala/io/prediction/controller/EvaluatorTest.scala
deleted file mode 100644
index c57bd03..0000000
--- a/core/src/test/scala/io/prediction/controller/EvaluatorTest.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-package io.prediction.controller
-
-import io.prediction.core._
-import io.prediction.workflow.WorkflowParams
-
-import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
-
-object TestEvaluator {
-  case class EvalInfo(id: Int, ex: Int)
-  case class Query(id: Int, ex: Int, qx: Int)
-  case class Prediction(id: Int, ex: Int, qx: Int)
-  case class Actual(id: Int, ex: Int, qx: Int)
-
-  class FakeEngine(val id: Int, val en: Int, val qn: Int)
-  extends BaseEngine[EvalInfo, Query, Prediction, Actual] {
-    def train(
-      sc: SparkContext, 
-      engineParams: EngineParams,
-      instanceId: String = "",
-      params: WorkflowParams = WorkflowParams()
-    ): Seq[Any] = {
-      Seq[Any]()
-    }
-
-    def eval(
-      sc: SparkContext, 
-      engineParams: EngineParams, 
-      params: WorkflowParams)
-    : Seq[(EvalInfo, RDD[(Query, Prediction, Actual)])] = {
-      (0 until en).map { ex => {
-        val qpas = (0 until qn).map { qx => {
-          (Query(id, ex, qx), Prediction(id, ex, qx), Actual(id, ex, qx))
-        }}
-  
-        (EvalInfo(id = id, ex = ex), sc.parallelize(qpas))
-      }}
-    }
-  
-  }
-
-  /*
-  class Evaluator0 extends Evaluator[EvalInfo, Query, Prediction, Actual,
-      (Query, Prediction, Actual), 
-      (EvalInfo, Seq[(Query, Prediction, Actual)]),
-      Seq[(EvalInfo, (EvalInfo, Seq[(Query, Prediction, Actual)]))]
-      ] {
-
-    def evaluateUnit(q: Query, p: Prediction, a: Actual)
-    : (Query, Prediction, Actual) = (q, p, a)
-
-    def evaluateSet(
-        evalInfo: EvalInfo, 
-        eus: Seq[(Query, Prediction, Actual)])
-    : (EvalInfo, Seq[(Query, Prediction, Actual)]) = (evalInfo, eus)
-
-    def evaluateAll(
-      input: Seq[(EvalInfo, (EvalInfo, Seq[(Query, Prediction, Actual)]))]) 
-    = input
-  }
-  */
-
-}
-
-/*
-class EvaluatorSuite
-extends FunSuite with Inside with SharedSparkContext {
-  import io.prediction.controller.TestEvaluator._
-  @transient lazy val logger = Logger[this.type] 
-
-  test("Evaluator.evaluate") {
-    val engine = new FakeEngine(1, 3, 10)
-    val evaluator = new Evaluator0()
-  
-    val evalDataSet = engine.eval(sc, null.asInstanceOf[EngineParams])
-    val er: Seq[(EvalInfo, (EvalInfo, Seq[(Query, Prediction, Actual)]))] =
-      evaluator.evaluateBase(sc, evalDataSet)
-
-    evalDataSet.zip(er).map { case (input, output) => {
-      val (inputEvalInfo, inputQpaRDD) = input
-      val (outputEvalInfo, (outputEvalInfo2, outputQpaSeq)) = output
-      
-      inputEvalInfo shouldBe outputEvalInfo
-      inputEvalInfo shouldBe outputEvalInfo2
-      
-      val inputQpaSeq: Array[(Query, Prediction, Actual)] = inputQpaRDD.collect
-
-      inputQpaSeq.size should be (outputQpaSeq.size)
-      // TODO. match inputQpa and outputQpa content.
-    }}
-  }
-}
-*/

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/io/prediction/controller/FastEvalEngineTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/io/prediction/controller/FastEvalEngineTest.scala b/core/src/test/scala/io/prediction/controller/FastEvalEngineTest.scala
deleted file mode 100644
index cdf4598..0000000
--- a/core/src/test/scala/io/prediction/controller/FastEvalEngineTest.scala
+++ /dev/null
@@ -1,181 +0,0 @@
-package io.prediction.controller
-
-import io.prediction.workflow.WorkflowParams
-import org.scalatest.FunSuite
-import org.scalatest.Inside
-import org.scalatest.Matchers._
-import org.scalatest.Inspectors._
-
-import io.prediction.workflow.SharedSparkContext
-
-class FastEngineSuite
-extends FunSuite with Inside with SharedSparkContext {
-  import io.prediction.controller.Engine0._
-  
-  test("Single Evaluation") {
-    val engine = new FastEvalEngine(
-      Map("" -> classOf[PDataSource2]),
-      Map("" -> classOf[PPreparator1]),
-      Map(
-        "PAlgo2" -> classOf[PAlgo2],
-        "PAlgo3" -> classOf[PAlgo3]
-      ),
-      Map("" -> classOf[LServing1]))
-
-    val qn = 10
-    val en = 3
-
-    val engineParams = EngineParams(
-      dataSourceParams = PDataSource2.Params(id = 0, en = en, qn = qn),
-      preparatorParams = PPreparator1.Params(1),
-      algorithmParamsList = Seq(
-        ("PAlgo2", PAlgo2.Params(20)),
-        ("PAlgo2", PAlgo2.Params(21)),
-        ("PAlgo3", PAlgo3.Params(22))
-      ),
-      servingParams = LServing1.Params(3))
-
-    val algoCount = engineParams.algorithmParamsList.size
-    val pd = ProcessedData(1, TrainingData(0))
-    val model0 = PAlgo2.Model(20, pd)
-    val model1 = PAlgo2.Model(21, pd)
-    val model2 = PAlgo3.Model(22, pd)
-
-    val evalDataSet = engine.eval(sc, engineParams, WorkflowParams())
-
-    evalDataSet should have size en
-
-    forAll(evalDataSet.zipWithIndex) { case (evalData, ex) => {
-      val (evalInfo, qpaRDD) = evalData
-      evalInfo shouldBe EvalInfo(0)
-
-      val qpaSeq: Seq[(Query, Prediction, Actual)] = qpaRDD.collect
-
-      qpaSeq should have size qn
-
-      forAll (qpaSeq) { case (q, p, a) => 
-        val Query(qId, qEx, qQx, _) = q
-        val Actual(aId, aEx, aQx) = a
-        qId shouldBe aId
-        qEx shouldBe ex
-        aEx shouldBe ex
-        qQx shouldBe aQx
-
-        inside (p) { case Prediction(pId, pQ, pModels, pPs) => {
-          pId shouldBe 3
-          pQ shouldBe q
-          pModels shouldBe None
-          pPs should have size algoCount
-          pPs shouldBe Seq(
-            Prediction(id = 20, q = q, models = Some(model0)),
-            Prediction(id = 21, q = q, models = Some(model1)),
-            Prediction(id = 22, q = q, models = Some(model2))
-          )
-        }}
-      }
-    }}
-  }
-
-  test("Batch Evaluation") {
-    val engine = new FastEvalEngine(
-      Map("" -> classOf[PDataSource2]),
-      Map("" -> classOf[PPreparator1]),
-      Map("" -> classOf[PAlgo2]),
-      Map("" -> classOf[LServing1]))
-
-    val qn = 10
-    val en = 3
-
-    val baseEngineParams = EngineParams(
-      dataSourceParams = PDataSource2.Params(id = 0, en = en, qn = qn),
-      preparatorParams = PPreparator1.Params(1),
-      algorithmParamsList = Seq(("", PAlgo2.Params(2))),
-      servingParams = LServing1.Params(3))
-
-    val ep0 = baseEngineParams
-    val ep1 = baseEngineParams.copy(
-      algorithmParamsList = Seq(("", PAlgo2.Params(2))))
-    val ep2 = baseEngineParams.copy(
-      algorithmParamsList = Seq(("", PAlgo2.Params(20))))
-
-    val engineEvalDataSet = engine.batchEval(
-      sc,
-      Seq(ep0, ep1, ep2),
-      WorkflowParams())
-
-    val evalDataSet0 = engineEvalDataSet(0)._2
-    val evalDataSet1 = engineEvalDataSet(1)._2
-    val evalDataSet2 = engineEvalDataSet(2)._2
-
-    evalDataSet0 shouldBe evalDataSet1
-    evalDataSet0 should not be evalDataSet2
-    evalDataSet1 should not be evalDataSet2
-
-    // evalDataSet0._1 should be theSameInstanceAs evalDataSet1._1
-    // When things are cached correctly, evalDataSet0 and 1 should share the
-    // same EI
-    evalDataSet0.zip(evalDataSet1).foreach { case (e0, e1) => {
-      e0._1 should be theSameInstanceAs e1._1
-      e0._2 should be theSameInstanceAs e1._2
-    }}
-   
-    // So as set1 and set2, however, the QPA-RDD should be different.
-    evalDataSet1.zip(evalDataSet2).foreach { case (e1, e2) => {
-      e1._1 should be theSameInstanceAs e2._1
-      val e1Qpa = e1._2
-      val e2Qpa = e2._2
-      e1Qpa should not be theSameInstanceAs (e2Qpa)
-    }}
-  }
-  
-  test("Not cached when isEqual not implemented") {
-    // PDataSource3.Params is a class not case class. Need to implement the
-    // isEqual function for hashing.
-    val engine = new FastEvalEngine(
-      Map("" -> classOf[PDataSource4]),
-      Map("" -> classOf[PPreparator1]),
-      Map("" -> classOf[PAlgo2]),
-      Map("" -> classOf[LServing1]))
-
-    val qn = 10
-    val en = 3
-
-    val baseEngineParams = EngineParams(
-      dataSourceParams = new PDataSource4.Params(id = 0, en = en, qn = qn),
-      preparatorParams = PPreparator1.Params(1),
-      algorithmParamsList = Seq(("", PAlgo2.Params(2))),
-      servingParams = LServing1.Params(3))
-
-    val ep0 = baseEngineParams
-    val ep1 = baseEngineParams.copy(
-      algorithmParamsList = Seq(("", PAlgo2.Params(3))))
-    // ep2.dataSource is different from ep0.
-    val ep2 = baseEngineParams.copy(
-      dataSourceParams = ("", new PDataSource4.Params(id = 0, en = en, qn = qn)),
-      algorithmParamsList = Seq(("", PAlgo2.Params(3))))
-
-    val engineEvalDataSet = engine.batchEval(
-      sc,
-      Seq(ep0, ep1, ep2),
-      WorkflowParams())
-
-    val evalDataSet0 = engineEvalDataSet(0)._2
-    val evalDataSet1 = engineEvalDataSet(1)._2
-    val evalDataSet2 = engineEvalDataSet(2)._2
-
-    evalDataSet0 should not be evalDataSet1
-    evalDataSet0 should not be evalDataSet2
-    evalDataSet1 should not be evalDataSet2
-
-    // Set0 should have same EI as Set1, since their dsp are the same instance.
-    evalDataSet0.zip(evalDataSet1).foreach { case (e0, e1) => {
-      e0._1 should be theSameInstanceAs (e1._1)
-    }}
-  
-    // Set1 should have different EI as Set2, since Set2's dsp is another
-    // instance
-    evalDataSet1.zip(evalDataSet2).foreach { case (e1, e2) => {
-      e1._1 should not be theSameInstanceAs (e2._1)
-    }}
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/io/prediction/controller/MetricEvaluatorTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/io/prediction/controller/MetricEvaluatorTest.scala b/core/src/test/scala/io/prediction/controller/MetricEvaluatorTest.scala
deleted file mode 100644
index 71fcb88..0000000
--- a/core/src/test/scala/io/prediction/controller/MetricEvaluatorTest.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.workflow.SharedSparkContext
-import io.prediction.workflow.WorkflowParams
-import org.scalatest.FunSuite
-
-object MetricEvaluatorSuite {
-  case class Metric0() extends SumMetric[EmptyParams, Int, Int, Int, Int] {
-    def calculate(q: Int, p: Int, a: Int): Int = q
-  }
-
-  object Evaluation0 extends Evaluation {}
-}
-
-class MetricEvaluatorDevSuite extends FunSuite with SharedSparkContext {
-  import io.prediction.controller.MetricEvaluatorSuite._
-
-  test("a") {
-    val metricEvaluator = MetricEvaluator(
-      Metric0(),
-      Seq(Metric0(), Metric0())
-    )
- 
-    val engineEvalDataSet = Seq(
-      (EngineParams(), Seq(
-        (EmptyParams(), sc.parallelize(Seq((1,0,0), (2,0,0)))))),
-      (EngineParams(), Seq(
-        (EmptyParams(), sc.parallelize(Seq((1,0,0), (2,0,0)))))))
-
-    val r = metricEvaluator.evaluateBase(
-      sc,
-      Evaluation0,
-      engineEvalDataSet,
-      WorkflowParams())
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-predictionio/blob/4f03388e/core/src/test/scala/io/prediction/controller/MetricTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/io/prediction/controller/MetricTest.scala b/core/src/test/scala/io/prediction/controller/MetricTest.scala
deleted file mode 100644
index b846548..0000000
--- a/core/src/test/scala/io/prediction/controller/MetricTest.scala
+++ /dev/null
@@ -1,143 +0,0 @@
-/** Copyright 2015 TappingStone, Inc.
-  *
-  * Licensed under the Apache License, Version 2.0 (the "License");
-  * you may not use this file except in compliance with the License.
-  * You may obtain a copy of the License at
-  *
-  *     http://www.apache.org/licenses/LICENSE-2.0
-  *
-  * Unless required by applicable law or agreed to in writing, software
-  * distributed under the License is distributed on an "AS IS" BASIS,
-  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
-
-package io.prediction.controller
-
-import io.prediction.workflow.SharedSparkContext
-
-import grizzled.slf4j.Logger
-import org.scalatest.Matchers._
-import org.scalatest.FunSuite
-import org.scalatest.Inside
-
-object MetricDevSuite {
-  class QIntSumMetric extends SumMetric[EmptyParams, Int, Int, Int, Int] {
-    def calculate(q: Int, p: Int, a: Int): Int = q
-  }
-  
-  class QDoubleSumMetric extends SumMetric[EmptyParams, Int, Int, Int, Double] {
-    def calculate(q: Int, p: Int, a: Int): Double = q.toDouble
-  }
-  
-  class QAverageMetric extends AverageMetric[EmptyParams, Int, Int, Int] {
-    def calculate(q: Int, p: Int, a: Int): Double = q.toDouble
-  }
-  
-  class QOptionAverageMetric extends OptionAverageMetric[EmptyParams, Int, Int, Int] {
-    def calculate(q: Int, p: Int, a: Int): Option[Double] = {
-      if (q < 0) { None } else { Some(q.toDouble) }
-    }
-  }
-  
-  class QStdevMetric extends StdevMetric[EmptyParams, Int, Int, Int] {
-    def calculate(q: Int, p: Int, a: Int): Double = q.toDouble
-  }
-  
-  class QOptionStdevMetric extends OptionStdevMetric[EmptyParams, Int, Int, Int] {
-    def calculate(q: Int, p: Int, a: Int): Option[Double] = {
-      if (q < 0) { None } else { Some(q.toDouble) }
-    }
-  }
-  
-}
-
-class MetricDevSuite
-extends FunSuite with Inside with SharedSparkContext {
-  @transient lazy val logger = Logger[this.type] 
-  
-  test("Average Metric") {
-    val qpaSeq0 = Seq((1, 0, 0), (2, 0, 0), (3, 0, 0))
-    val qpaSeq1 = Seq((4, 0, 0), (5, 0, 0), (6, 0, 0))
-
-    val evalDataSet = Seq(
-      (EmptyParams(), sc.parallelize(qpaSeq0)),
-      (EmptyParams(), sc.parallelize(qpaSeq1)))
-  
-    val m = new MetricDevSuite.QAverageMetric()
-    val result = m.calculate(sc, evalDataSet)
-    
-    result shouldBe (21.0 / 6)
-  }
-  
-  test("Option Average Metric") {
-    val qpaSeq0 = Seq((1, 0, 0), (2, 0, 0), (3, 0, 0))
-    val qpaSeq1 = Seq((-4, 0, 0), (-5, 0, 0), (6, 0, 0))
-
-    val evalDataSet = Seq(
-      (EmptyParams(), sc.parallelize(qpaSeq0)),
-      (EmptyParams(), sc.parallelize(qpaSeq1)))
-  
-    val m = new MetricDevSuite.QOptionAverageMetric()
-    val result = m.calculate(sc, evalDataSet)
-    
-    result shouldBe (12.0 / 4)
-  }
-  
-  test("Stdev Metric") {
-    val qpaSeq0 = Seq((1, 0, 0), (1, 0, 0), (1, 0, 0), (1, 0, 0))
-    val qpaSeq1 = Seq((5, 0, 0), (5, 0, 0), (5, 0, 0), (5, 0, 0))
-
-    val evalDataSet = Seq(
-      (EmptyParams(), sc.parallelize(qpaSeq0)),
-      (EmptyParams(), sc.parallelize(qpaSeq1)))
-  
-    val m = new MetricDevSuite.QStdevMetric()
-    val result = m.calculate(sc, evalDataSet)
-    
-    result shouldBe 2.0
-  }
-  
-  test("Option Stdev Metric") {
-    val qpaSeq0 = Seq((1, 0, 0), (1, 0, 0), (1, 0, 0), (1, 0, 0))
-    val qpaSeq1 = Seq((5, 0, 0), (5, 0, 0), (5, 0, 0), (5, 0, 0), (-5, 0, 0))
-
-    val evalDataSet = Seq(
-      (EmptyParams(), sc.parallelize(qpaSeq0)),
-      (EmptyParams(), sc.parallelize(qpaSeq1)))
-  
-    val m = new MetricDevSuite.QOptionStdevMetric()
-    val result = m.calculate(sc, evalDataSet)
-    
-    result shouldBe 2.0
-  }
-
-  test("Sum Metric [Int]") {
-    val qpaSeq0 = Seq((1, 0, 0), (2, 0, 0), (3, 0, 0))
-    val qpaSeq1 = Seq((4, 0, 0), (5, 0, 0), (6, 0, 0))
-
-    val evalDataSet = Seq(
-      (EmptyParams(), sc.parallelize(qpaSeq0)),
-      (EmptyParams(), sc.parallelize(qpaSeq1)))
-  
-    val m = new MetricDevSuite.QIntSumMetric()
-    val result = m.calculate(sc, evalDataSet)
-    
-    result shouldBe 21
-  }
-
-  test("Sum Metric [Double]") {
-    val qpaSeq0 = Seq((1, 0, 0), (2, 0, 0), (3, 0, 0))
-    val qpaSeq1 = Seq((4, 0, 0), (5, 0, 0), (6, 0, 0))
-
-    val evalDataSet = Seq(
-      (EmptyParams(), sc.parallelize(qpaSeq0)),
-      (EmptyParams(), sc.parallelize(qpaSeq1)))
-  
-    val m = new MetricDevSuite.QDoubleSumMetric()
-    val result = m.calculate(sc, evalDataSet)
-    
-    result shouldBe 21.0
-  }
-}