You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@madlib.apache.org by ri...@apache.org on 2018/04/17 21:06:41 UTC

[1/6] madlib git commit: MLP: Ensure grouping_col is same as preprocessed

Repository: madlib
Updated Branches:
  refs/heads/master b5c641a3d -> 47007aa29


http://git-wip-us.apache.org/repos/asf/madlib/blob/ebb32679/src/ports/postgres/modules/utilities/validate_args.py_in
----------------------------------------------------------------------
diff --git a/src/ports/postgres/modules/utilities/validate_args.py_in b/src/ports/postgres/modules/utilities/validate_args.py_in
index c3a6503..1573666 100644
--- a/src/ports/postgres/modules/utilities/validate_args.py_in
+++ b/src/ports/postgres/modules/utilities/validate_args.py_in
@@ -637,19 +637,22 @@ def output_tbl_valid(tbl, module):
 # -------------------------------------------------------------------------
 
 
-def cols_in_tbl_valid(tbl, cols, module):
+def cols_in_tbl_valid(tbl, cols, module, invalid_names=None):
     for c in cols:
         if c is None or c.strip() == '':
             plpy.error(
-                "{module} error: NULL/empty column name!".format(**locals()))
+                "{module} error: NULL/empty column name!".format(module=module))
+        if invalid_names and c.strip() in invalid_names:
+            plpy.error("{module} error: Column {c} is an invalid name.".
+                       format(module=module, c=c))
     missing_cols = columns_missing_from_table(tbl, cols)
 
     # FIXME: still printing just 1 column name for backwards compatibility
     # this should be changed to print all missing columns
     if missing_cols:
         c = missing_cols[0]
-        plpy.error(
-            "{module} error: Column '{c}' does not exist in table '{tbl}'!".format(**locals()))
+        plpy.error("{module} error: Column '{c}' does not exist in table '{tbl}'!".
+                   format(**locals()))
 # -------------------------------------------------------------------------
 
 


[6/6] madlib git commit: Pagerank: Update docs for PPR

Posted by ri...@apache.org.
Pagerank: Update docs for PPR

Closes #264


Project: http://git-wip-us.apache.org/repos/asf/madlib/repo
Commit: http://git-wip-us.apache.org/repos/asf/madlib/commit/47007aa2
Tree: http://git-wip-us.apache.org/repos/asf/madlib/tree/47007aa2
Diff: http://git-wip-us.apache.org/repos/asf/madlib/diff/47007aa2

Branch: refs/heads/master
Commit: 47007aa29a24bccef4bbc50b99978e7cb20d035f
Parents: 0f9f12f
Author: Frank McQuillan <fm...@pivotal.io>
Authored: Tue Apr 17 14:04:20 2018 -0700
Committer: Rahul Iyer <ri...@apache.org>
Committed: Tue Apr 17 14:06:19 2018 -0700

----------------------------------------------------------------------
 .../postgres/modules/graph/pagerank.sql_in      | 29 ++++++++++++++------
 1 file changed, 21 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/madlib/blob/47007aa2/src/ports/postgres/modules/graph/pagerank.sql_in
----------------------------------------------------------------------
diff --git a/src/ports/postgres/modules/graph/pagerank.sql_in b/src/ports/postgres/modules/graph/pagerank.sql_in
index a898b30..30a0b50 100644
--- a/src/ports/postgres/modules/graph/pagerank.sql_in
+++ b/src/ports/postgres/modules/graph/pagerank.sql_in
@@ -48,6 +48,11 @@ This algorithm was originally used by Google to rank websites where the World Wi
 modeled as a directed graph with the vertices representing the websites.  The PageRank
 algorithm initially proposed by Larry Page and Sergey Brin is implemented here [1].
 
+We also implement personalized PageRank, in which a notion of importance 
+provides personalization to a query.  
+For example, importance scores can be biased according
+to a specified set of vertices in the graph that are of interest or special in some way [2].
+
 @anchor pagerank
 @par PageRank
 <pre class="syntax">
@@ -121,10 +126,10 @@ distribution per group. When this value is NULL, no grouping is used and
 a single model is generated for all data.
 @note Expressions are not currently supported for 'grouping_cols'.</dd>
 
-<dt> personalization_vertices (optional) </dt>
-<dd>ARRAY OF INTEGER, default: NULL. A comma seperated list of vertices or nodes
-for personalized page rank. When this parameter is provided, Personalized Page Rank
-will run and in the absence of this parameter, regular PageRank will run.
+<dt> personalization_vertices (optional)</dt>
+<dd>INTEGER[], default: NULL. A comma separated list of vertices or nodes
+for personalized PageRank. When this parameter is provided, personalized PageRank
+will run.  In the absence of this parameter, regular PageRank will run.
 </dl>
 
 @anchor examples
@@ -278,7 +283,9 @@ SELECT * FROM pagerank_out_summary ORDER BY user_id;
 (2 rows)
 </pre>
 
--# Example of Personalized Page Rank with Nodes {2,4}. personalization_vertices can be passed in ARRAY[2,4] format as well.
+-# Personalized PageRank. Here we specify {2,4}
+as the personalization vertices. This parameter
+could be specified as ARRAY[2,4] as well.
 <pre class="syntax">
 DROP TABLE IF EXISTS pagerank_out, pagerank_out_summary;
 SELECT madlib.pagerank(
@@ -291,7 +298,7 @@ SELECT madlib.pagerank(
                         NULL,                -- Default max iters (100)
                         NULL,                -- Default Threshold
                         NULL,                -- No Grouping
-                       '{2,4}');             -- Personlized Nodes
+                       '{2,4}');             -- Personalization vertices
 SELECT * FROM pagerank_out ORDER BY pagerank DESC;
 </pre>
 <pre class="result">
@@ -305,7 +312,6 @@ SELECT * FROM pagerank_out ORDER BY pagerank DESC;
   6 |  0.148615315574136
   5 | 0.0803403307142321
 (7 rows)
-
 </pre>
 <pre class="syntax">
 SELECT * FROM pagerank_out_summary;
@@ -320,7 +326,14 @@ SELECT * FROM pagerank_out_summary;
 @anchor literature
 @par Literature
 
-[1] PageRank algorithm. https://en.wikipedia.org/wiki/PageRank
+[1] Brin, S. and Page, L. (1998), "The anatomy of a large-scale hypertextual Web search engine", 
+Computer Networks and ISDN Systems. 30: 107–117, 
+http://infolab.stanford.edu/pub/papers/google.pdf
+
+[2] Jeh, Glen and Widom, Jennifer. "Scaling Personalized Web Search",
+Proceedings of the 12th international conference on World Wide Web, Pages 271-279 
+Budapest, Hungary, May 20-24, 2003, 
+http://ilpubs.stanford.edu:8090/530/1/2002-12.pdf
 */
 
 CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.pagerank(


[3/6] madlib git commit: MLP: Ensure grouping_col is same as preprocessed

Posted by ri...@apache.org.
MLP: Ensure grouping_col is same as preprocessed

If mini-batch preprocessor is run with grouping, the standardization in
the output table is computed per group. This implies that MLP should
also be run with the same grouping, else the dataset used for training
would be different from the original data, hence making the training
invalid.

This commit ensures that MLP training will proceed only if the grouping
column input is same as the one used during preprocessing.

Closes #263


Project: http://git-wip-us.apache.org/repos/asf/madlib/repo
Commit: http://git-wip-us.apache.org/repos/asf/madlib/commit/ebb32679
Tree: http://git-wip-us.apache.org/repos/asf/madlib/tree/ebb32679
Diff: http://git-wip-us.apache.org/repos/asf/madlib/diff/ebb32679

Branch: refs/heads/master
Commit: ebb326797663d410927b085ecc340d6a79d1994f
Parents: b5c641a
Author: Rahul Iyer <ri...@apache.org>
Authored: Tue Apr 17 13:58:35 2018 -0700
Committer: Rahul Iyer <ri...@apache.org>
Committed: Tue Apr 17 13:58:35 2018 -0700

----------------------------------------------------------------------
 src/ports/postgres/modules/convex/mlp_igd.py_in |  35 +++---
 .../postgres/modules/convex/test/mlp.sql_in     | 123 ++++++++++++++++---
 .../modules/utilities/validate_args.py_in       |  11 +-
 3 files changed, 136 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/madlib/blob/ebb32679/src/ports/postgres/modules/convex/mlp_igd.py_in
----------------------------------------------------------------------
diff --git a/src/ports/postgres/modules/convex/mlp_igd.py_in b/src/ports/postgres/modules/convex/mlp_igd.py_in
index 5ec5e8d..8e3bccf 100644
--- a/src/ports/postgres/modules/convex/mlp_igd.py_in
+++ b/src/ports/postgres/modules/convex/mlp_igd.py_in
@@ -110,12 +110,6 @@ def mlp(schema_madlib, source_table, output_table, independent_varname,
                                 optimizer_params["learning_rate_policy"])
     activation_index = _get_activation_index(activation)
 
-    reserved_cols = ['coeff', 'loss', 'n_iterations']
-    grouping_col = grouping_col or ""
-    grouping_str, grouping_col = get_grouping_col_str(schema_madlib, 'MLP',
-                                                      reserved_cols,
-                                                      source_table,
-                                                      grouping_col)
     # The original dependent_varname is required later if warm start is
     # used, and while creating the model summary table. Keep a copy of it
     # since dependent_varname is overwritten if one hot encoding is used.
@@ -125,6 +119,14 @@ def mlp(schema_madlib, source_table, output_table, independent_varname,
     if is_minibatch_enabled:
         mlp_preprocessor = MLPMinibatchPreProcessor(source_table)
         pp_summary_dict = mlp_preprocessor.preprocessed_summary_dict
+
+        if (pp_summary_dict[MLPMinibatchPreProcessor.GROUPING_COL]):
+            # if a valid grouping_col is provided then it should be same as the
+            # grouping_col used in preprocessing.
+            _assert(grouping_col == pp_summary_dict[MLPMinibatchPreProcessor.GROUPING_COL],
+                    "MLP: Grouping column input should be same as the one used "
+                    "in the preprocessor.")
+
         batch_size = min(200, pp_summary_dict['buffer_size'])\
                          if batch_size == 1 else batch_size
         tbl_data_scaled = source_table
@@ -147,6 +149,7 @@ def mlp(schema_madlib, source_table, output_table, independent_varname,
                                                  dependent_varname, dim=2)
         dependent_vartype = pp_summary_dict["dependent_vartype"]
     else:
+        grouping_col = grouping_col or ""
         x_mean_table = unique_string(desp='x_mean_table')
         tbl_data_scaled = unique_string(desp="tbl_data_scaled")
         col_ind_var_norm_new = unique_string(desp="ind_var_norm")
@@ -187,6 +190,11 @@ def mlp(schema_madlib, source_table, output_table, independent_varname,
             num_output_nodes = get_col_dimension(tbl_data_scaled,
                                                  dependent_varname, dim=1)
 
+    reserved_cols = ['coeff', 'loss', 'n_iterations']
+    grouping_str, grouping_col = get_grouping_col_str(schema_madlib, 'MLP',
+                                                      reserved_cols,
+                                                      source_table,
+                                                      grouping_col)
     # Need layers sizes before validating for warm_start
     layer_sizes = [num_input_nodes] + hidden_layer_sizes + [num_output_nodes]
     col_grp_key = unique_string(desp='col_grp_key')
@@ -790,7 +798,8 @@ def _validate_args(source_table, output_table, summary_table,
     if grouping_col:
         cols_in_tbl_valid(source_table,
                           _string_to_array_with_quotes(grouping_col),
-                          'MLP')
+                          'MLP',
+                          invalid_names=[independent_varname, dependent_varname])
 
 def _get_learning_rate_policy_name(learning_rate_policy):
     if not learning_rate_policy:
@@ -1759,9 +1768,8 @@ class MLPMinibatchPreProcessor:
     def _validate_and_set_preprocessed_summary(self):
         if not table_exists(self.summary_table) or not table_exists(self.std_table):
             plpy.error("Tables {0} and/or {1} do not exist. These tables are"
-                       " needed for using minibatch during training.".format(
-                                                             self.summary_table,
-                                                             self.std_table))
+                       " needed for using minibatch during training.".
+                       format(self.summary_table, self.std_table))
 
         query = "SELECT * FROM {0}".format(self.summary_table)
         summary_table_columns = plpy.execute(query)
@@ -1771,12 +1779,11 @@ class MLPMinibatchPreProcessor:
             summary_table_columns = summary_table_columns[0]
 
         required_columns = (self.DEPENDENT_VARNAME, self.INDEPENDENT_VARNAME,
-                            self.CLASS_VALUES)
+                            self.CLASS_VALUES, self.GROUPING_COL)
         if set(required_columns) <= set(summary_table_columns):
             self.preprocessed_summary_dict = summary_table_columns
         else:
             plpy.error("One or more expected columns {0} not present in"
                        " summary table {1}. These columns are"
-                       " needed for using minibatch during training.".format(
-                                                    required_columns,
-                                                    self.summary_table))
+                       " needed for using minibatch during training.".
+                       format(required_columns, self.summary_table))


[2/6] madlib git commit: MLP: Ensure grouping_col is same as preprocessed

Posted by ri...@apache.org.
http://git-wip-us.apache.org/repos/asf/madlib/blob/ebb32679/src/ports/postgres/modules/convex/test/mlp.sql_in
----------------------------------------------------------------------
diff --git a/src/ports/postgres/modules/convex/test/mlp.sql_in b/src/ports/postgres/modules/convex/test/mlp.sql_in
index 5f9e338..16d1637 100644
--- a/src/ports/postgres/modules/convex/test/mlp.sql_in
+++ b/src/ports/postgres/modules/convex/test/mlp.sql_in
@@ -188,21 +188,68 @@ INSERT INTO iris_data VALUES
 -- Create preprocessed data that can be used with minibatch MLP:
 DROP TABLE IF EXISTS iris_data_batch, iris_data_batch_summary, iris_data_batch_standardization;
 CREATE TABLE iris_data_batch(
+    __id__ integer,
+    dependent_varname double precision[],
+    independent_varname double precision[]
+);
+COPY iris_data_batch (__id__, dependent_varname, independent_varname) FROM STDIN NULL '?' DELIMITER '|';
+0 | {{0,1,0},{0,1,0},{0,0,1},{1,0,0},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{1,0,0},{0,1,0},{1,0,0},{0,0,1},{0,0,1},{0,0,1},{1,0,0},{0,0,1},{0,0,1},{1,0,0},{1,0,0},{0,0,1},{0,1,0},{0,0,1},{0,0,1},{0,0,1},{0,0,1},{1,0,0},{0,1,0},{0,0,1},{0,0,1},{1,0,0}} | {{0.828881825720994,-0.314980522532101,0.363710790466334,0.159758615207397},{-1.08079689039279,-1.57669227467446,-0.229158821743702,-0.240110581430527},{-1.08079689039279,-1.32434992424599,0.482284712908341,0.692917544057962},{-1.46273263361555,0.442046528753317,-1.35561108494277,-1.30642843913166},{-0.0623015751321059,-0.567322872960574,0.245136868024327,0.159758615207397},{-0.189613489539692,-0.819665223389045,0.304423829245331,0.159758615207397},{0.701569911313408,-1.32434992424599,0.778719519013359,0.959497008483245},{-1.20810880480038,-0.0626381721036282,-1.35561108494277,-1.4397181713443},{-0.698861147170034,0.946731229610261,-1.35561108494277,-1.30642843913166},{-0.82617306157762,-1.32434992424599,-0.407019705406713,-0.1068208492178
 86},{-0.698861147170034,2.71312768260957,-1.29632412372177,-1.4397181713443},{1.33812948335134,0.442046528753317,1.31230217000239,1.49265593733381},{0.319634168090651,-0.0626381721036282,0.660145596571352,0.826207276270604},{0.701569911313408,-1.32434992424599,0.778719519013359,0.959497008483245},{-0.698861147170034,1.19907358003873,-1.29632412372177,-1.30642843913166},{1.46544139775892,0.189704178324845,0.838006480234363,1.49265593733381},{1.21081756894375,-0.0626381721036282,0.897293441455367,1.49265593733381},{-0.444237318354863,1.70375828089568,-1.29632412372177,-1.30642843913166},{-0.82617306157762,1.95610063132415,-1.05917627883775,-1.03984897470638},{0.828881825720994,-0.819665223389045,0.95658040267637,0.959497008483245},{0.956193740128579,-0.567322872960574,0.541571674129345,0.42633807963268},{1.33812948335134,0.442046528753317,1.31230217000239,1.49265593733381},{0.574257996905822,0.946731229610261,1.01586736389737,1.49265593733381},{0.0650103392754793,-0.819665223389045,0.
 838006480234363,0.959497008483245},{0.0650103392754793,-0.819665223389045,0.838006480234363,0.959497008483245},{-1.46273263361555,0.442046528753317,-1.35561108494277,-1.30642843913166},{0.574257996905822,-2.08137697553141,0.482284712908341,0.42633807963268},{1.21081756894375,0.189704178324845,1.13444128633938,1.62594566954645},{1.97468905538926,-0.314980522532101,1.54945001488641,0.826207276270604},{-1.08079689039279,0.189704178324845,-1.29632412372177,-1.4397181713443}}
+1 | {{0,1,0},{1,0,0},{0,1,0},{1,0,0},{1,0,0},{1,0,0},{1,0,0},{0,1,0},{0,0,1},{0,0,1},{1,0,0},{0,0,1},{1,0,0},{0,0,1},{0,1,0},{0,1,0},{0,1,0},{1,0,0},{1,0,0},{0,0,1},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{1,0,0},{0,1,0},{1,0,0},{0,0,1},{0,1,0},{0,1,0}} | {{-0.0623015751321059,-0.0626381721036282,0.304423829245331,0.0264688829947554},{-0.316925403947277,2.96547003303804,-1.35561108494277,-1.30642843913166},{0.319634168090651,-0.819665223389045,0.838006480234363,0.559627811845321},{-0.953484975985206,1.19907358003873,-1.41489804616377,-1.17313870691902},{-0.953484975985206,0.442046528753317,-1.47418500738478,-1.30642843913166},{-1.33542071920796,0.442046528753317,-1.41489804616377,-1.30642843913166},{-1.71735646243072,-0.0626381721036282,-1.41489804616377,-1.30642843913166},{0.446946082498236,-0.0626381721036282,0.541571674129345,0.293048347420038},{1.21081756894375,-1.32434992424599,1.25301520878139,0.826207276270604},{0.701569911313408,0.694388879181789,1.3715891312234,1.75923540175909},{-
 1.84466837683831,-0.0626381721036282,-1.53347196860578,-1.4397181713443},{1.84737714098168,1.45141593046721,1.4308760924444,1.75923540175909},{-0.82617306157762,1.19907358003873,-1.35561108494277,-1.30642843913166},{0.701569911313408,-0.314980522532101,1.13444128633938,0.826207276270604},{1.33812948335134,-0.567322872960574,0.660145596571352,0.293048347420038},{0.192322253683066,-0.0626381721036282,0.304423829245331,0.42633807963268},{-0.189613489539692,-0.819665223389045,0.304423829245331,0.159758615207397},{-1.46273263361555,0.189704178324845,-1.29632412372177,-1.30642843913166},{-1.71735646243072,0.442046528753317,-1.41489804616377,-1.30642843913166},{0.828881825720994,0.189704178324845,1.07515432511838,0.826207276270604},{0.0650103392754793,-1.07200757381752,0.185849906803323,0.0264688829947554},{-0.953484975985206,-2.58606167638835,-0.110584899301695,-0.240110581430527},{0.192322253683066,-0.0626381721036282,0.838006480234363,0.826207276270604},{-0.953484975985206,1.19907358003
 873,-1.23703716250076,-0.773269510281093},{-0.82617306157762,0.946731229610261,-1.29632412372177,-1.30642843913166},{0.319634168090651,0.946731229610261,0.482284712908341,0.559627811845321},{-0.953484975985206,0.694388879181789,-1.35561108494277,-1.30642843913166},{0.192322253683066,-0.0626381721036282,0.838006480234363,0.826207276270604},{0.446946082498236,-0.314980522532101,0.600858635350349,0.293048347420038},{-0.0623015751321059,-0.567322872960574,0.482284712908341,0.159758615207397}}
+2 | {{1,0,0},{1,0,0},{0,0,1},{1,0,0},{0,1,0},{0,1,0},{1,0,0},{1,0,0},{0,0,1},{0,0,1},{0,1,0},{0,1,0},{0,1,0},{0,1,0},{1,0,0},{0,0,1},{0,1,0},{1,0,0},{1,0,0},{0,0,1},{1,0,0},{0,0,1},{1,0,0},{1,0,0},{1,0,0},{1,0,0},{0,1,0},{1,0,0},{0,1,0},{0,0,1}} | {{-0.953484975985206,0.946731229610261,-1.23703716250076,-1.03984897470638},{-0.953484975985206,0.694388879181789,-1.35561108494277,-1.30642843913166},{1.21081756894375,0.694388879181789,1.19372824756038,1.75923540175909},{-1.20810880480038,0.946731229610261,-1.23703716250076,-1.30642843913166},{1.08350565453616,-0.314980522532101,0.541571674129345,0.159758615207397},{-0.189613489539692,-0.314980522532101,-0.0512979380806911,0.159758615207397},{-1.20810880480038,-0.0626381721036282,-1.35561108494277,-1.17313870691902},{-1.08079689039279,0.189704178324845,-1.29632412372177,-1.4397181713443},{0.956193740128579,-0.0626381721036282,0.897293441455367,1.09278674069589},{0.956193740128579,-0.0626381721036282,0.897293441455367,1.09278674069589},{1
 .46544139775892,0.189704178324845,0.719432557792356,0.42633807963268},{0.0650103392754793,-1.07200757381752,0.185849906803323,0.0264688829947554},{1.08350565453616,-0.0626381721036282,0.422997751687338,0.293048347420038},{0.319634168090651,-0.314980522532101,0.482284712908341,0.42633807963268},{-0.82617306157762,1.95610063132415,-1.23703716250076,-1.30642843913166},{0.956193740128579,-0.0626381721036282,1.25301520878139,1.35936620512117},{-0.0623015751321059,-1.07200757381752,-0.110584899301695,-0.240110581430527},{-0.571549232762449,1.70375828089568,-1.29632412372177,-1.30642843913166},{-0.571549232762449,1.70375828089568,-1.29632412372177,-1.30642843913166},{2.35662479861202,-0.0626381721036282,1.72731089854942,1.22607647290853},{-1.71735646243072,0.442046528753317,-1.41489804616377,-1.30642843913166},{1.72006522657409,-0.0626381721036282,1.31230217000239,1.22607647290853},{-0.953484975985206,0.946731229610261,-1.29632412372177,-1.30642843913166},{-1.46273263361555,0.9467312296102
 61,-1.35561108494277,-1.17313870691902},{-1.08079689039279,-0.0626381721036282,-1.35561108494277,-1.30642843913166},{-0.953484975985206,1.45141593046721,-1.35561108494277,-1.30642843913166},{0.701569911313408,-1.82903462510294,0.422997751687338,0.159758615207397},{-0.444237318354863,2.20844298175262,-1.17775020127976,-1.03984897470638},{-0.0623015751321059,-0.314980522532101,0.304423829245331,0.159758615207397},{1.33812948335134,-0.0626381721036282,1.07515432511838,1.22607647290853}}
+3 | {{0,1,0},{0,1,0},{0,1,0},{0,1,0},{1,0,0},{1,0,0},{0,0,1},{0,1,0},{1,0,0},{0,1,0},{1,0,0},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{1,0,0},{0,1,0},{0,0,1},{1,0,0},{0,1,0},{0,0,1},{0,1,0},{0,0,1},{0,1,0},{0,1,0},{0,1,0}} | {{-0.953484975985206,-1.82903462510294,-0.229158821743702,-0.240110581430527},{0.319634168090651,-2.08137697553141,0.185849906803323,-0.240110581430527},{-0.189613489539692,-0.0626381721036282,0.482284712908341,0.42633807963268},{-0.316925403947277,-1.07200757381752,0.422997751687338,0.0264688829947554},{-0.953484975985206,1.19907358003873,-1.23703716250076,-0.773269510281093},{-0.316925403947277,1.19907358003873,-1.41489804616377,-1.30642843913166},{0.0650103392754793,-0.819665223389045,0.838006480234363,0.959497008483245},{0.446946082498236,-0.0626381721036282,0.541571674129345,0.293048347420038},{-0.444237318354863,0.946731229610261,-1.29632412372177,-1.03984897470638},{1.21081756894375,0.189704178324845,0.600858635350349,0.426338079632
 68},{-0.82617306157762,1.95610063132415,-1.23703716250076,-1.30642843913166},{-0.0623015751321059,-0.567322872960574,0.245136868024327,0.159758615207397},{-0.316925403947277,-1.82903462510294,0.185849906803323,0.159758615207397},{1.21081756894375,-0.0626381721036282,0.897293441455367,1.49265593733381},{-1.59004454802313,-1.82903462510294,-1.41489804616377,-1.17313870691902},{0.701569911313408,0.694388879181789,0.600858635350349,0.559627811845321},{-0.316925403947277,-1.57669227467446,0.00798902314031256,-0.240110581430527},{1.46544139775892,0.189704178324845,1.01586736389737,1.22607647290853},{-1.08079689039279,0.189704178324845,-1.29632412372177,-1.4397181713443},{-1.71735646243072,-0.314980522532101,-1.35561108494277,-1.30642843913166},{-0.444237318354863,-0.0626381721036282,0.482284712908341,0.42633807963268},{1.72006522657409,-0.0626381721036282,1.31230217000239,1.22607647290853},{-0.82617306157762,1.95610063132415,-1.05917627883775,-1.03984897470638},{1.21081756894375,-0.062638
 1721036282,0.778719519013359,0.692917544057962},{2.35662479861202,-0.0626381721036282,1.72731089854942,1.22607647290853},{-0.953484975985206,-1.82903462510294,-0.229158821743702,-0.240110581430527},{0.701569911313408,-0.314980522532101,1.13444128633938,0.826207276270604},{-0.698861147170034,-0.819665223389045,0.12656294558232,0.293048347420038},{-0.0623015751321059,-0.314980522532101,0.304423829245331,0.159758615207397},{0.574257996905822,-0.314980522532101,0.363710790466334,0.159758615207397}}
+4 | {{0,0,1},{0,1,0},{0,0,1},{0,1,0},{1,0,0},{0,1,0},{0,1,0},{0,0,1},{0,0,1},{0,1,0},{0,0,1},{0,0,1},{1,0,0},{0,1,0},{0,1,0},{0,1,0},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{0,1,0}} | {{1.21081756894375,0.694388879181789,1.19372824756038,1.75923540175909},{-0.82617306157762,-1.32434992424599,-0.407019705406713,-0.106820849217886},{0.701569911313408,0.694388879181789,1.3715891312234,1.75923540175909},{0.0650103392754793,-0.819665223389045,0.245136868024327,-0.240110581430527},{-1.20810880480038,0.189704178324845,-1.23703716250076,-1.30642843913166},{0.574257996905822,-0.314980522532101,0.363710790466334,0.159758615207397},{1.21081756894375,0.189704178324845,0.422997751687338,0.293048347420038},{1.97468905538926,-0.314980522532101,1.54945001488641,0.826207276270604},{-1.08079689039279,-1.32434992424599,0.482284712908341,0.692917544057962},{0.0650103392754793,-0.819665223389045,0.12656294558232,0.0264688829947554},{0.574257996905822,0.946731229610261,1.01586736389737,1.49265593733381},{0.95619
 3740128579,0.442046528753317,0.838006480234363,1.09278674069589},{-1.20810880480038,-0.0626381721036282,-1.35561108494277,-1.17313870691902},{0.828881825720994,0.442046528753317,0.482284712908341,0.42633807963268},{-0.0623015751321059,-0.0626381721036282,0.304423829245331,0.0264688829947554},{-0.316925403947277,-1.57669227467446,0.0672759843613159,-0.106820849217886},{-0.189613489539692,-0.0626381721036282,0.245136868024327,0.159758615207397},{1.59275331216651,0.442046528753317,0.600858635350349,0.293048347420038},{0.956193740128579,-0.0626381721036282,1.25301520878139,1.35936620512117},{-1.33542071920796,0.442046528753317,-1.23703716250076,-1.30642843913166},{-0.316925403947277,-1.32434992424599,0.185849906803323,0.159758615207397}}
+\.
+
+-- Create the corresponding summary table for preprocessed data
+CREATE TABLE iris_data_batch_summary(
+    source_table text,
+    output_table text,
+    dependent_varname text,
+    independent_varname text,
+    dependent_vartype text,
+    buffer_size integer,
+    class_values text[],
+    num_rows_processed integer,
+    num_rows_skipped integer,
+    grouping_cols text
+);
+-- The availability of the original source table should not be a condition for
+-- MLP to work correctly. It should work fine even the original source table is
+-- deleted (this basically ensures that all the necessary info is captured in
+-- the summary table). So name the original source table as
+-- 'iris_data_does_not_exist' instead of the original 'iris_data', to mimic the
+-- scenario where the original source table is deleted and MLP is trained with
+-- the preprocessed table.
+INSERT INTO iris_data_batch_summary VALUES
+('iris_data_does_not_exist', 'iris_data_batch', 'class::TEXT', 'attributes',
+    'text', 30, ARRAY[1,2,3], 141, 0, '');
+-- Create the corresponding standardization table for preprocessed data
+CREATE TABLE iris_data_batch_standardization(
+    mean double precision[],
+    std double precision[]
+);
+INSERT INTO iris_data_batch_standardization VALUES
+-- -- TODO get real numbers by running preprocessor
+(ARRAY[5.74893617021,3.02482269504,3.6865248227,1.18014184397],
+    ARRAY[0.785472439601,0.396287027644,1.68671151195,0.750245336531]);
+
+-- Create preprocessed data that can be used with minibatch MLP:
+DROP TABLE IF EXISTS iris_data_batch_grp, iris_data_batch_grp_summary, iris_data_batch_grp_standardization;
+CREATE TABLE iris_data_batch_grp(
     grp text,
     __id__ integer,
     dependent_varname double precision[],
     independent_varname double precision[]
 );
-COPY iris_data_batch (grp, __id__, dependent_varname, independent_varname) FROM STDIN NULL '?' DELIMITER '|';
+COPY iris_data_batch_grp (grp, __id__, dependent_varname, independent_varname) FROM STDIN NULL '?' DELIMITER '|';
 '1'|0 | {{0,1,0},{0,1,0},{0,0,1},{1,0,0},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{1,0,0},{0,1,0},{1,0,0},{0,0,1},{0,0,1},{0,0,1},{1,0,0},{0,0,1},{0,0,1},{1,0,0},{1,0,0},{0,0,1},{0,1,0},{0,0,1},{0,0,1},{0,0,1},{0,0,1},{1,0,0},{0,1,0},{0,0,1},{0,0,1},{1,0,0}} | {{0.828881825720994,-0.314980522532101,0.363710790466334,0.159758615207397},{-1.08079689039279,-1.57669227467446,-0.229158821743702,-0.240110581430527},{-1.08079689039279,-1.32434992424599,0.482284712908341,0.692917544057962},{-1.46273263361555,0.442046528753317,-1.35561108494277,-1.30642843913166},{-0.0623015751321059,-0.567322872960574,0.245136868024327,0.159758615207397},{-0.189613489539692,-0.819665223389045,0.304423829245331,0.159758615207397},{0.701569911313408,-1.32434992424599,0.778719519013359,0.959497008483245},{-1.20810880480038,-0.0626381721036282,-1.35561108494277,-1.4397181713443},{-0.698861147170034,0.946731229610261,-1.35561108494277,-1.30642843913166},{-0.82617306157762,-1.32434992424599,-0.407019705406713,-0.106820849
 217886},{-0.698861147170034,2.71312768260957,-1.29632412372177,-1.4397181713443},{1.33812948335134,0.442046528753317,1.31230217000239,1.49265593733381},{0.319634168090651,-0.0626381721036282,0.660145596571352,0.826207276270604},{0.701569911313408,-1.32434992424599,0.778719519013359,0.959497008483245},{-0.698861147170034,1.19907358003873,-1.29632412372177,-1.30642843913166},{1.46544139775892,0.189704178324845,0.838006480234363,1.49265593733381},{1.21081756894375,-0.0626381721036282,0.897293441455367,1.49265593733381},{-0.444237318354863,1.70375828089568,-1.29632412372177,-1.30642843913166},{-0.82617306157762,1.95610063132415,-1.05917627883775,-1.03984897470638},{0.828881825720994,-0.819665223389045,0.95658040267637,0.959497008483245},{0.956193740128579,-0.567322872960574,0.541571674129345,0.42633807963268},{1.33812948335134,0.442046528753317,1.31230217000239,1.49265593733381},{0.574257996905822,0.946731229610261,1.01586736389737,1.49265593733381},{0.0650103392754793,-0.81966522338904
 5,0.838006480234363,0.959497008483245},{0.0650103392754793,-0.819665223389045,0.838006480234363,0.959497008483245},{-1.46273263361555,0.442046528753317,-1.35561108494277,-1.30642843913166},{0.574257996905822,-2.08137697553141,0.482284712908341,0.42633807963268},{1.21081756894375,0.189704178324845,1.13444128633938,1.62594566954645},{1.97468905538926,-0.314980522532101,1.54945001488641,0.826207276270604},{-1.08079689039279,0.189704178324845,-1.29632412372177,-1.4397181713443}}
 '1'|1 | {{0,1,0},{1,0,0},{0,1,0},{1,0,0},{1,0,0},{1,0,0},{1,0,0},{0,1,0},{0,0,1},{0,0,1},{1,0,0},{0,0,1},{1,0,0},{0,0,1},{0,1,0},{0,1,0},{0,1,0},{1,0,0},{1,0,0},{0,0,1},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{1,0,0},{0,1,0},{1,0,0},{0,0,1},{0,1,0},{0,1,0}} | {{-0.0623015751321059,-0.0626381721036282,0.304423829245331,0.0264688829947554},{-0.316925403947277,2.96547003303804,-1.35561108494277,-1.30642843913166},{0.319634168090651,-0.819665223389045,0.838006480234363,0.559627811845321},{-0.953484975985206,1.19907358003873,-1.41489804616377,-1.17313870691902},{-0.953484975985206,0.442046528753317,-1.47418500738478,-1.30642843913166},{-1.33542071920796,0.442046528753317,-1.41489804616377,-1.30642843913166},{-1.71735646243072,-0.0626381721036282,-1.41489804616377,-1.30642843913166},{0.446946082498236,-0.0626381721036282,0.541571674129345,0.293048347420038},{1.21081756894375,-1.32434992424599,1.25301520878139,0.826207276270604},{0.701569911313408,0.694388879181789,1.3715891312234,1.75923540175909
 },{-1.84466837683831,-0.0626381721036282,-1.53347196860578,-1.4397181713443},{1.84737714098168,1.45141593046721,1.4308760924444,1.75923540175909},{-0.82617306157762,1.19907358003873,-1.35561108494277,-1.30642843913166},{0.701569911313408,-0.314980522532101,1.13444128633938,0.826207276270604},{1.33812948335134,-0.567322872960574,0.660145596571352,0.293048347420038},{0.192322253683066,-0.0626381721036282,0.304423829245331,0.42633807963268},{-0.189613489539692,-0.819665223389045,0.304423829245331,0.159758615207397},{-1.46273263361555,0.189704178324845,-1.29632412372177,-1.30642843913166},{-1.71735646243072,0.442046528753317,-1.41489804616377,-1.30642843913166},{0.828881825720994,0.189704178324845,1.07515432511838,0.826207276270604},{0.0650103392754793,-1.07200757381752,0.185849906803323,0.0264688829947554},{-0.953484975985206,-2.58606167638835,-0.110584899301695,-0.240110581430527},{0.192322253683066,-0.0626381721036282,0.838006480234363,0.826207276270604},{-0.953484975985206,1.1990735
 8003873,-1.23703716250076,-0.773269510281093},{-0.82617306157762,0.946731229610261,-1.29632412372177,-1.30642843913166},{0.319634168090651,0.946731229610261,0.482284712908341,0.559627811845321},{-0.953484975985206,0.694388879181789,-1.35561108494277,-1.30642843913166},{0.192322253683066,-0.0626381721036282,0.838006480234363,0.826207276270604},{0.446946082498236,-0.314980522532101,0.600858635350349,0.293048347420038},{-0.0623015751321059,-0.567322872960574,0.482284712908341,0.159758615207397}}
 '1'|2 | {{1,0,0},{1,0,0},{0,0,1},{1,0,0},{0,1,0},{0,1,0},{1,0,0},{1,0,0},{0,0,1},{0,0,1},{0,1,0},{0,1,0},{0,1,0},{0,1,0},{1,0,0},{0,0,1},{0,1,0},{1,0,0},{1,0,0},{0,0,1},{1,0,0},{0,0,1},{1,0,0},{1,0,0},{1,0,0},{1,0,0},{0,1,0},{1,0,0},{0,1,0},{0,0,1}} | {{-0.953484975985206,0.946731229610261,-1.23703716250076,-1.03984897470638},{-0.953484975985206,0.694388879181789,-1.35561108494277,-1.30642843913166},{1.21081756894375,0.694388879181789,1.19372824756038,1.75923540175909},{-1.20810880480038,0.946731229610261,-1.23703716250076,-1.30642843913166},{1.08350565453616,-0.314980522532101,0.541571674129345,0.159758615207397},{-0.189613489539692,-0.314980522532101,-0.0512979380806911,0.159758615207397},{-1.20810880480038,-0.0626381721036282,-1.35561108494277,-1.17313870691902},{-1.08079689039279,0.189704178324845,-1.29632412372177,-1.4397181713443},{0.956193740128579,-0.0626381721036282,0.897293441455367,1.09278674069589},{0.956193740128579,-0.0626381721036282,0.897293441455367,1.09278674069589
 },{1.46544139775892,0.189704178324845,0.719432557792356,0.42633807963268},{0.0650103392754793,-1.07200757381752,0.185849906803323,0.0264688829947554},{1.08350565453616,-0.0626381721036282,0.422997751687338,0.293048347420038},{0.319634168090651,-0.314980522532101,0.482284712908341,0.42633807963268},{-0.82617306157762,1.95610063132415,-1.23703716250076,-1.30642843913166},{0.956193740128579,-0.0626381721036282,1.25301520878139,1.35936620512117},{-0.0623015751321059,-1.07200757381752,-0.110584899301695,-0.240110581430527},{-0.571549232762449,1.70375828089568,-1.29632412372177,-1.30642843913166},{-0.571549232762449,1.70375828089568,-1.29632412372177,-1.30642843913166},{2.35662479861202,-0.0626381721036282,1.72731089854942,1.22607647290853},{-1.71735646243072,0.442046528753317,-1.41489804616377,-1.30642843913166},{1.72006522657409,-0.0626381721036282,1.31230217000239,1.22607647290853},{-0.953484975985206,0.946731229610261,-1.29632412372177,-1.30642843913166},{-1.46273263361555,0.946731229
 610261,-1.35561108494277,-1.17313870691902},{-1.08079689039279,-0.0626381721036282,-1.35561108494277,-1.30642843913166},{-0.953484975985206,1.45141593046721,-1.35561108494277,-1.30642843913166},{0.701569911313408,-1.82903462510294,0.422997751687338,0.159758615207397},{-0.444237318354863,2.20844298175262,-1.17775020127976,-1.03984897470638},{-0.0623015751321059,-0.314980522532101,0.304423829245331,0.159758615207397},{1.33812948335134,-0.0626381721036282,1.07515432511838,1.22607647290853}}
 '2'|3 | {{0,1,0},{0,1,0},{0,1,0},{0,1,0},{1,0,0},{1,0,0},{0,0,1},{0,1,0},{1,0,0},{0,1,0},{1,0,0},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{1,0,0},{0,1,0},{0,0,1},{1,0,0},{0,1,0},{0,0,1},{0,1,0},{0,0,1},{0,1,0},{0,1,0},{0,1,0}} | {{-0.953484975985206,-1.82903462510294,-0.229158821743702,-0.240110581430527},{0.319634168090651,-2.08137697553141,0.185849906803323,-0.240110581430527},{-0.189613489539692,-0.0626381721036282,0.482284712908341,0.42633807963268},{-0.316925403947277,-1.07200757381752,0.422997751687338,0.0264688829947554},{-0.953484975985206,1.19907358003873,-1.23703716250076,-0.773269510281093},{-0.316925403947277,1.19907358003873,-1.41489804616377,-1.30642843913166},{0.0650103392754793,-0.819665223389045,0.838006480234363,0.959497008483245},{0.446946082498236,-0.0626381721036282,0.541571674129345,0.293048347420038},{-0.444237318354863,0.946731229610261,-1.29632412372177,-1.03984897470638},{1.21081756894375,0.189704178324845,0.600858635350349,0.42633807
 963268},{-0.82617306157762,1.95610063132415,-1.23703716250076,-1.30642843913166},{-0.0623015751321059,-0.567322872960574,0.245136868024327,0.159758615207397},{-0.316925403947277,-1.82903462510294,0.185849906803323,0.159758615207397},{1.21081756894375,-0.0626381721036282,0.897293441455367,1.49265593733381},{-1.59004454802313,-1.82903462510294,-1.41489804616377,-1.17313870691902},{0.701569911313408,0.694388879181789,0.600858635350349,0.559627811845321},{-0.316925403947277,-1.57669227467446,0.00798902314031256,-0.240110581430527},{1.46544139775892,0.189704178324845,1.01586736389737,1.22607647290853},{-1.08079689039279,0.189704178324845,-1.29632412372177,-1.4397181713443},{-1.71735646243072,-0.314980522532101,-1.35561108494277,-1.30642843913166},{-0.444237318354863,-0.0626381721036282,0.482284712908341,0.42633807963268},{1.72006522657409,-0.0626381721036282,1.31230217000239,1.22607647290853},{-0.82617306157762,1.95610063132415,-1.05917627883775,-1.03984897470638},{1.21081756894375,-0.06
 26381721036282,0.778719519013359,0.692917544057962},{2.35662479861202,-0.0626381721036282,1.72731089854942,1.22607647290853},{-0.953484975985206,-1.82903462510294,-0.229158821743702,-0.240110581430527},{0.701569911313408,-0.314980522532101,1.13444128633938,0.826207276270604},{-0.698861147170034,-0.819665223389045,0.12656294558232,0.293048347420038},{-0.0623015751321059,-0.314980522532101,0.304423829245331,0.159758615207397},{0.574257996905822,-0.314980522532101,0.363710790466334,0.159758615207397}}
 '2'|4 | {{0,0,1},{0,1,0},{0,0,1},{0,1,0},{1,0,0},{0,1,0},{0,1,0},{0,0,1},{0,0,1},{0,1,0},{0,0,1},{0,0,1},{1,0,0},{0,1,0},{0,1,0},{0,1,0},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{0,1,0}} | {{1.21081756894375,0.694388879181789,1.19372824756038,1.75923540175909},{-0.82617306157762,-1.32434992424599,-0.407019705406713,-0.106820849217886},{0.701569911313408,0.694388879181789,1.3715891312234,1.75923540175909},{0.0650103392754793,-0.819665223389045,0.245136868024327,-0.240110581430527},{-1.20810880480038,0.189704178324845,-1.23703716250076,-1.30642843913166},{0.574257996905822,-0.314980522532101,0.363710790466334,0.159758615207397},{1.21081756894375,0.189704178324845,0.422997751687338,0.293048347420038},{1.97468905538926,-0.314980522532101,1.54945001488641,0.826207276270604},{-1.08079689039279,-1.32434992424599,0.482284712908341,0.692917544057962},{0.0650103392754793,-0.819665223389045,0.12656294558232,0.0264688829947554},{0.574257996905822,0.946731229610261,1.01586736389737,1.49265593733381},{0.9
 56193740128579,0.442046528753317,0.838006480234363,1.09278674069589},{-1.20810880480038,-0.0626381721036282,-1.35561108494277,-1.17313870691902},{0.828881825720994,0.442046528753317,0.482284712908341,0.42633807963268},{-0.0623015751321059,-0.0626381721036282,0.304423829245331,0.0264688829947554},{-0.316925403947277,-1.57669227467446,0.0672759843613159,-0.106820849217886},{-0.189613489539692,-0.0626381721036282,0.245136868024327,0.159758615207397},{1.59275331216651,0.442046528753317,0.600858635350349,0.293048347420038},{0.956193740128579,-0.0626381721036282,1.25301520878139,1.35936620512117},{-1.33542071920796,0.442046528753317,-1.23703716250076,-1.30642843913166},{-0.316925403947277,-1.32434992424599,0.185849906803323,0.159758615207397}}
 \.
-
 -- Create the corresponding summary table for preprocessed data
-CREATE TABLE iris_data_batch_summary(
+CREATE TABLE iris_data_batch_grp_summary(
     source_table text,
     output_table text,
     dependent_varname text,
@@ -221,15 +268,16 @@ CREATE TABLE iris_data_batch_summary(
 -- 'iris_data_does_not_exist' instead of the original 'iris_data', to mimic the
 -- scenario where the original source table is deleted and MLP is trained with
 -- the preprocessed table.
-INSERT INTO iris_data_batch_summary VALUES
-('iris_data_does_not_exist','iris_data_batch','class::TEXT','attributes','text',30,ARRAY[1,2,3],141,0,'grp');
+INSERT INTO iris_data_batch_grp_summary VALUES
+('iris_data_does_not_exist', 'iris_data_batch_grp', 'class::TEXT', 'attributes',
+    'text', 30, ARRAY[1,2,3], 141, 0, 'grp');
 -- Create the corresponding standardization table for preprocessed data
-CREATE TABLE iris_data_batch_standardization(
+CREATE TABLE iris_data_batch_grp_standardization(
     grp text,
     mean double precision[],
     std double precision[]
 );
-INSERT INTO iris_data_batch_standardization VALUES
+INSERT INTO iris_data_batch_grp_standardization VALUES
 -- -- TODO get real numbers by running preprocessor
 ('1',ARRAY[5.74893617021,3.02482269504,3.6865248227,1.18014184397],ARRAY[0.785472439601,0.396287027644,1.68671151195,0.750245336531]),
 ('2',ARRAY[5.74893617021,3.02482269504,3.6865248227,1.18014184397],ARRAY[0.785472439601,0.396287027644,1.68671151195,0.750245336531]);
@@ -279,6 +327,7 @@ SELECT mlp_classification(
     False,
     False
 );
+
 DROP TABLE IF EXISTS mlp_prediction_batch_output, mlp_prediction_batch_output_summary, mlp_prediction_batch_output_standardization;
 SELECT mlp_predict(
     'mlp_class_batch',
@@ -290,7 +339,7 @@ SELECT mlp_predict(
 -- minibatch with grouping and without warm_start
 DROP TABLE IF EXISTS mlp_class_batch, mlp_class_batch_summary, mlp_class_batch_standardization;
 SELECT mlp_classification(
-    'iris_data_batch',    -- Source table
+    'iris_data_batch_grp',    -- Source table
     'mlp_class_batch',    -- Desination table
     'independent_varname',   -- Input features
     'dependent_varname',        -- Label
@@ -807,12 +856,56 @@ COPY lin_housing_wi (x, grp, y) FROM STDIN NULL '?' DELIMITER '|';
 -- Create preprocessed data that can be used with minibatch MLP:
 DROP TABLE IF EXISTS lin_housing_wi_batch, lin_housing_wi_batch_summary, lin_housing_wi_batch_standardization;
 CREATE TABLE lin_housing_wi_batch(
+    __id__ integer,
+    dependent_varname double precision[],
+    independent_varname double precision[]
+);
+COPY lin_housing_wi_batch (__id__, dependent_varname, independent_varname) FROM STDIN NULL '?' DELIMITER '|';
+0 | {{15},{15.7},{19.6},{10.9},{21.4},{50},{30.8},{18},{23},{20.5},{15.6},{9.6},{20.6},{29.8},{22.7},{19.3},{16.2},{7.5},{13.6},{20.4},{26.6},{24.7},{27.1},{17.9},{6.3},{28.7},{18.5},{27.9},{20},{17.5},{37.2},{18.3},{10.2},{32.5},{17.1},{23.1},{21.9},{18.7},{19.4},{20.3},{18.3},{19.3},{21.2},{50},{17.3},{21.7},{19.8},{15.2},{27.5},{14.6}} | {{0,-0.445702761621649,0.255069510086467,-0.65770381882244,-0.194870940739048,-0.413777927225806,0.355066565555217,0.815140254739345,1.38432428783503,-0.604583549242574,-0.722104723544712,-1.90653024922074,0.431101787118015,0.949283718252188},{0,-0.428368753077516,-0.384945784500547,1.92326100150999,-0.194870940739048,0.0738444277537763,-0.891680410581072,0.86361611618313,-0.873482235720559,-0.927483143499006,-1.42845214296799,0.103320666207828,0.0982729706837658,1.89946585735285},{0,-0.0388112913321931,-0.384945784500547,0.827294500187726,-0.194870940739048,-0.345339701965513,0.113550135623056,0.680899407664247,-0.213235556286209,1.4404472143815
 ,1.31654026747368,0.670201693636395,0.459246504968759,-0.10833458048247},{0,3.54871710300239,-0.384945784500547,0.827294500187726,-0.194870940739048,0.912212687192357,0.0694897058381349,0.233429917413921,-0.821483074692163,1.4404472143815,1.31654026747368,0.670201693636395,-3.31184520961623,0.121885761590377},{0,-0.459548842433986,-0.384945784500547,0.0622953661521859,-0.194870940739048,-1.15804362693148,0.206566598502334,-1.02321356770575,0.477118489798452,-0.604583549242574,-0.222493134196543,-0.102817889220744,0.338053947675518,-0.656677577055981},{0,0.227968766917991,-0.384945784500547,0.827294500187726,5.13160143945555,0.501583335630603,1.39783007046502,0.934465452139432,-1.14641633731521,1.4404472143815,1.31654026747368,0.670201693636395,0.426394307619848,-1.49105190772},{0,-0.466749828748424,3.45514598302154,-1.37189655875695,-0.194870940739048,-1.23503663034931,0.71081373937421,-1.88832124885638,0.919135956061595,-0.819849945413529,-1.06092177839002,-0.308956444649314,0.4622
 51279116526,-1.30129453485995},{0,-0.43497543662456,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,0.44318742512506,0.982941313583218,-0.846277376639478,-0.712216747328051,0.00147068171815396,1.18554808220782,0.474971489675403,0.243274669228788},{0,-0.406741253804489,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,0.346907226706157,0.949381101814444,-0.592972097419787,-0.712216747328051,0.00147068171815396,1.18554808220782,0.363394209655014,-0.352507670559733},{0,-0.459776107226544,-0.384945784500547,1.92326100150999,-0.194870940739048,0.0738444277537763,-0.323790426686532,0.762935480876807,-0.711286177148542,-0.927483143499006,-1.42845214296799,0.103320666207828,0.286572150610455,0.59767446854093},{0,-0.389647313703198,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.384169534169572,0.80768242990184,0.426447594945805,-0.712216747328051,-0.745075371330834,1.08247880449354,0.449230591142872,0.3702446760689
 65},{0,1.06895946435246,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,0.492143458219417,0.777851130551818,-0.752757598858038,1.4404472143815,1.31654026747368,0.670201693636395,-3.22500723674578,0.614417766146228},{0,-0.464867692156534,-0.384945784500547,-0.0683496472504681,-0.194870940739048,0.00540620249348394,-0.0643234512864404,0.158851669038866,-0.612600919794745,-1.03511634158448,-0.940325877512877,1.08247880449354,0.474971489675403,-0.6371437298498},{0,0.026111751379456,-0.384945784500547,0.827294500187726,-0.194870940739048,0.356152106952482,1.33908283075179,-0.180479361067632,-0.491876282932224,1.4404472143815,1.31654026747368,0.670201693636395,0.252417884464178,-0.277162831335892},{0,0.0853158302971524,-0.384945784500547,0.827294500187726,5.13160143945555,1.69069749952818,-0.0529003768977577,0.408688801095299,-0.398504090281423,1.4404472143815,1.31654026747368,0.670201693636395,0.460248096351348,-0.302277777743839},{0,-0.454252399099437,-0.3849457
 84500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,-0.699120013743269,-0.269973259117697,-0.380547899387038,-0.496950351157096,-0.0272426280144994,-0.566629638935027,0.420885555015609,-0.00508424524979929},{0,-0.442047319465291,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,-0.761130988996121,0.878531765858142,-0.858182577177673,-0.712216747328051,0.00147068171815396,1.18554808220782,0.426995262449401,0.494424133308259},{0,0.686279696751882,-0.384945784500547,0.827294500187726,-0.194870940739048,0.912212687192357,1.01597301232904,0.684628320083,-0.84283372359124,1.4404472143815,1.31654026747368,0.670201693636395,-3.28430144659504,1.69436046168795},{0,-0.458415719383766,-0.384945784500547,2.22664775485615,-0.194870940739048,0.313378216164799,-0.287889335750671,0.982941313583218,-0.818924932427758,-0.712216747328051,1.57496005506756,0.618667054779254,0.406963434797626,0.617208315747111},{0,-0.402504312531021,-0.384945784500547,-0.618510314801644,
 -0.194870940739048,-0.294011033020294,-0.343372839924275,-0.39675628135529,0.577919134024738,-0.712216747328051,-0.745075371330834,1.08247880449354,0.474971489675403,-0.751556263486003},{0,-0.456100392623901,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,0.996390599091292,-2.59308569600064,1.07646170532253,-0.819849945413529,-1.1700323553741,-0.515095000077885,0.359888639815953,-1.228740245237},{0,-0.45102054437334,-0.384945784500547,-0.934961569488072,-0.194870940739048,-0.627647381164219,-0.315631087837472,-1.57509260568115,0.154743369439813,-0.604583549242574,-0.905869905833693,0.154855305064969,0.440216268699573,-0.490639875803442},{0,-0.4639661017823,-0.384945784500547,0.2205209934954,-0.194870940739048,-1.15804362693148,0.635747821962864,-0.799478822580584,1.19428383461582,-0.712216747328051,-0.848443286368387,-1.49425313836359,0.434407038680558,-0.872945171124413},{0,1.53738101002805,-0.384945784500547,0.827294500187726,-0.194870940739048,0.2107208
 78274361,-2.49907016384579,1.02768826260825,-0.973495759250105,1.4404472143815,1.31654026747368,0.670201693636395,-3.21198654877213,2.89150624046676},{0,0.588369116051868,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-0.501664013596029,0.199869705645146,-0.999815107547354,1.4404472143815,1.31654026747368,0.670201693636395,-0.113363288457242,2.2775853282725},{0,-0.464040789742296,-0.384945784500547,-1.30512244079559,-0.194870940739048,-0.713195162739584,1.50063773996317,-0.348280419911504,-0.0581727790283989,-0.927483143499006,-0.95755386335247,-0.566629638935027,0.466558122061657,-1.10874661239903},{0,-0.439404432652302,-0.384945784500547,-0.728832770563885,-0.194870940739048,-0.678976050109438,-0.736652972448942,0.0693577709888006,0.584609659947029,-0.604583549242574,-0.859928610261448,0.360993860493541,0.417179666900031,-0.26600063293236},{0,-0.465840769578191,3.71115210085635,-1.08157430675105,-0.194870940739048,-1.38046785902743,0.767929111317627,-1.82
 865865015634,0.779225252216032,-0.712216747328051,-1.10112041201574,0.154855305064969,0.474971489675403,-1.24827409244318},{0,-0.449600406162565,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,-0.609367286403615,-1.45949632069982,0.771206460117992,-0.819849945413529,-1.1700323553741,-0.515095000077885,0.474971489675403,0.0702605939740419},{0,-0.450723926475071,-0.384945784500547,-0.393510569497073,-0.194870940739048,0.108063540383922,-0.963482592452796,0.0395264716387789,-0.557305690848749,-0.496950351157096,-0.262691767822257,0.154855305064969,0.46365350705215,0.202811700015985},{0,-0.463530777672611,-0.384945784500547,-1.44302551049839,-0.194870940739048,-0.721749940897121,1.33908283075179,-0.523539303592882,-0.346209758991749,-0.819849945413529,-1.39973883323533,-0.566629638935027,0.474971489675403,-1.20083474922817},{0,-0.451416390561317,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,-0.377642063090325,0.587676597195429,-0.5
 26214423327513,-0.496950351157096,-0.0272426280144994,-0.566629638935027,-0.0457558701324997,0.294899836845124},{0,1.43662695199392,-0.384945784500547,0.827294500187726,-0.194870940739048,0.843774461932065,0.103758929004185,1.02768826260825,-1.05604504232073,1.4404472143815,1.31654026747368,0.670201693636395,0.443321201985598,1.13485526671091},{0,-0.459019624888874,-0.384945784500547,-1.44302551049839,-0.194870940739048,-0.721749940897121,0.658593970740229,0.86361611618313,-0.337354651153422,-0.819849945413529,-1.39973883323533,-0.566629638935027,0.474971489675403,-1.11153716199991},{0,-0.464338474611422,1.40709704034309,-0.920445456887777,-0.194870940739048,-1.1503443265897,-0.739916707988565,-1.6422130292187,1.52895771586103,-1.03511634158448,-0.762303357170426,-1.03044138864931,0.446125657856846,-0.169726671701896},{0,-0.467702633723797,3.96715821869115,-1.19770320755341,-0.194870940739048,-1.22648185219178,0.581896185559071,-1.66831541614997,2.46100701088846,-0.712216747328051,-
 0.492398245683485,-0.515095000077885,0.430200354873685,-1.01665847556989},{0,-0.0990023182926892,-0.384945784500547,0.827294500187726,5.13160143945555,1.24584903533628,4.27644481641321,0.390044239001535,-0.800919546489827,1.4404472143815,1.31654026747368,0.670201693636395,0.0507975391490579,-1.16595287921713},{0,-0.453765860388608,0.895084804673483,-1.05544530407052,-0.194870940739048,-1.0211671764109,-0.68280133604515,-0.23268413493017,1.81660113547601,-0.281683954986141,-0.87715659610104,0.412528499350682,0.457043003927064,-0.0692668860701082},{0,-0.465999748235896,1.40709704034309,-0.920445456887777,-0.194870940739048,-1.1503443265897,-0.2095596827997,-1.83238756257509,1.52895771586103,-1.03511634158448,-0.762303357170426,-1.03044138864931,0.127920075608393,-0.811553079904987},{0,-0.460749184648202,-0.384945784500547,0.0622953661521859,-0.194870940739048,-1.15804362693148,-0.465762922660168,-1.33644221088098,0.477118489798452,-0.604583549242574,-0.222493134196543,-0.1028178892207
 44,0.466558122061657,-0.634353180248917},{0,-0.441062505364205,-0.384945784500547,-0.393510569497073,-0.194870940739048,0.108063540383922,-0.59631234424512,-0.0686119885050502,-0.314872516252782,-0.496950351157096,-0.262691767822257,0.154855305064969,0.474971489675403,0.0632842199718343},{0,-0.462637723065235,-0.384945784500547,-1.32979983221609,-0.194870940739048,-0.961283729308144,-0.475554129279038,-1.73916475210627,0.827387199847821,-0.712216747328051,-0.0387279519075608,-1.03044138864931,0.330141375753067,-0.512964272610506},{0,-0.46367375176746,-0.384945784500547,0.0622953661521859,-0.194870940739048,-1.15804362693148,0.118445738932492,-0.69879818727426,0.72875113753757,-0.604583549242574,-0.222493134196543,-0.102817889220744,0.369804394503582,-0.18228414490587},{0,0.515361635156118,-0.384945784500547,0.827294500187726,-0.194870940739048,0.501583335630603,0.0923358546155019,1.02768826260825,-1.16279828681611,1.4404472143815,1.31654026747368,0.670201693636395,0.166982139529355,
 -0.574356363829932},{0,-0.453652761477758,-0.384945784500547,1.92326100150999,-0.194870940739048,0.0738444277537763,-0.495136542516782,0.915820890045669,-0.781389114201962,-0.927483143499006,-1.42845214296799,0.103320666207828,0.208648341045048,1.64134001927117},{0,-0.452758639899524,-0.384945784500547,-0.230930108373771,-0.194870940739048,-1.36335830271236,-0.323790426686532,-2.04866448286275,0.863151996505951,-0.712216747328051,-0.756560695223896,0.154855305064969,0.275053849710684,-0.525521745814479},{0,-0.45603850831419,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,0.513357739226971,0.919549802464421,-0.541071326478483,-0.604583549242574,-0.302890401447972,1.03094416563639,0.45834507272443,-0.192051068508961},{0,0.110855911703094,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.808725805562925,0.960567839070702,-0.579295875313927,1.4404472143815,1.31654026747368,0.670201693636395,0.0582093153802149,0.5697689725321},{0,0.0164
 001826383071,-0.384945784500547,0.827294500187726,-0.194870940739048,1.24584903533628,-4.24027307423509,0.576489859939171,-0.944323098427173,1.4404472143815,1.31654026747368,0.670201693636395,0.0522999262229408,-0.910617590736334},{0,0.622133408824188,-0.384945784500547,0.827294500187726,-0.194870940739048,0.356152106952482,0.0417479537513321,0.904634152789411,-0.670159120743868,1.4404472143815,1.31654026747368,0.670201693636395,0.302697771870134,0.611627216545345}}
+1 | {{20.2},{25},{14.1},{25},{36.2},{19.1},{18.9},{22.1},{29.9},{13.4},{24.2},{37},{22},{23.7},{17.2},{7.4},{15.3},{8.5},{23.1},{41.3},{50},{20.9},{17.2},{28.5},{7},{22.2},{29},{17.5},{13.4},{21.2},{23.2},{21.7},{34.9},{27},{28},{14.4},{16.8},{24.1},{20.6},{24.1},{23.8},{11.7},{28.7},{10.2},{19.2},{12.6},{15.2},{19.2},{24.3},{50}} | {{0,-0.38405105155781,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-1.14788365044154,-1.33644221088098,0.129752287318313,-0.712216747328051,-0.745075371330834,1.08247880449354,-0.605845771276142,-0.272977006934568},{0,-0.466630328012431,1.04868847537437,0.383101454618702,-0.194870940739048,-0.927064616677998,0.0841765157664425,-1.62356846712494,0.0655035604468979,-0.712216747328051,-0.95755386335247,-0.360491083506457,0.469262418794647,-1.03758759757651},{0,0.0373661599799437,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.596582995487378,0.524285086076633,-0.539644670215642,1.4404472143815,1.31654
 026747368,0.670201693636395,-2.99033437580523,0.62557996454976},{0,0.0149117582926795,-0.384945784500547,0.827294500187726,-0.194870940739048,1.69069749952818,0.389335788721266,0.580218772357924,-0.499107954333524,1.4404472143815,1.31654026747368,0.670201693636395,0.251215974805071,-0.817134179106753},{0,-0.462330435458396,-0.384945784500547,-1.48367062577922,-0.194870940739048,-0.978393285623217,1.61160474831038,-0.680153625180496,1.24436438894592,-0.819849945413529,-1.23320163678594,-0.102817889220744,0.474971489675403,-1.16037178001536},{0,-0.222811482606625,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,-0.455971716041296,0.927007627301927,-0.56281553572593,-0.604583549242574,-0.193779824463889,-2.16420344350645,-0.0135046276131415,-0.225537663719557},{0,-0.451555096772737,0.255069510086467,-0.65770381882244,-0.194870940739048,-0.413777927225806,-0.253620112584621,0.501911611564117,1.50504892469755,-0.604583549242574,-0.722104723544712,-1.90653024922074
 ,0.372909327789607,0.481866660104285},{0,-0.385363425712019,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,-0.061059715746817,-0.732358399043035,-0.439040806163541,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.474971489675403,-1.06967891798667},{0,-0.462611048793808,-0.384945784500547,-1.2122193201537,-0.194870940739048,-0.533544821431317,1.14325869837436,0.0730866834075537,-0.303754436411327,-0.604583549242574,-0.808244652742672,-1.18504530522074,0.418581894835655,-0.938523086745164},{0,-0.115351512735737,-0.384945784500547,1.04213296667209,5.13160143945555,2.55473009343937,-1.23437264224157,1.02768826260825,-1.08777584540807,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.474971489675403,1.83807376613342},{0,-0.460280784441944,-0.384945784500547,-0.230930108373771,-0.194870940739048,-1.36335830271236,0.420341276347693,-2.45511593650679,0.863151996505951,-0.712216747328051,-0.756560695223896,0.154855305064969,0.343061904588462,-0.
 966428582753994},{0,-0.46002257749453,1.91910927601271,-1.3007676070155,-0.194870940739048,-1.15804362693148,1.29175866542725,-1.89950798611264,1.4498028907951,-0.604583549242574,-0.222493134196543,-1.90653024922074,0.282465625941841,-1.19246310042552},{0,-0.465502539816497,-0.384945784500547,-1.30512244079559,-0.194870940739048,-0.713195162739584,0.400758863109951,0.0544421213137901,-0.21677759942154,-0.927483143499006,-0.95755386335247,-0.566629638935027,0.441418178358679,-0.759927912288652},{0,0.139348301470578,-0.384945784500547,0.827294500187726,-0.194870940739048,-0.345339701965513,0.963753243695055,0.0917312455013173,-0.0989062750847017,1.4404472143815,1.31654026747368,0.670201693636395,0.436610539722253,-0.824110553108961},{0,0.320275616676617,-0.384945784500547,0.827294500187726,-0.194870940739048,0.210720878274361,-0.885152939501825,0.949381101814444,-1.022297242448,1.4404472143815,1.31654026747368,0.670201693636395,-0.348937581642122,1.77947222451488},{0,1.94134684622739,
 -0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-1.89201535347577,0.636152458639215,-0.990959999709028,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,2.55943083796168},{0,-0.349495066409536,-0.384945784500547,1.04213296667209,5.13160143945555,2.55473009343937,-1.87243294023802,0.580218772357924,-0.945798949733561,-0.604583549242574,-0.193779824463889,-2.16420344350645,-0.0620818096686972,-0.212980190515583},{0,0.348884306267523,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-0.673010129426278,0.986670226001971,-0.934385699630829,1.4404472143815,1.31654026747368,0.670201693636395,0.43691101713703,0.875334153828789},{0,-0.454852036721117,-0.384945784500547,-1.2122193201537,-0.194870940739048,-0.533544821431317,-0.95858698914336,0.598863334451687,-0.460785015410988,-0.604583549242574,-0.808244652742672,-1.18504530522074,0.474971489675403,0.145605433197883},{0,-0.339145449095852,-0.384945784500547,1.04213296667209,
 -0.194870940739048,0.279159103534653,1.27870372326875,0.93073653972068,-0.814398988421502,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.139738853922941,-1.26362211524803},{0,0.0529279299304664,-0.384945784500547,0.827294500187726,-0.194870940739048,0.501583335630603,-1.94097138657012,1.02768826260825,-1.08241358566152,1.4404472143815,1.31654026747368,0.670201693636395,0.260831252077923,-1.44919366370675},{0,-0.456023570722191,0.255069510086467,-0.918993845627748,-0.194870940739048,-1.39757741534251,-0.447812377192237,-1.47068305795607,1.45875638872052,-0.712216747328051,-0.526854217362669,0.000251388493540901,0.474971489675403,-0.677606699062603},{0,-0.463123194805207,-0.384945784500547,-1.1628645373127,-0.194870940739048,-1.1152697361438,-0.426598096184682,-0.751002961136798,2.20435646870763,-0.819849945413529,-0.486655583736954,-0.0512832503636006,0.151557632237488,-0.1362400764913},{0,-0.465961337285041,3.71115210085635,-1.08157430675105,-0.194870940739048,-1.38046785
 902743,1.14489056614417,-1.66085759131246,0.779225252216032,-0.712216747328051,-1.10112041201574,0.154855305064969,0.474971489675403,-1.43942674010366},{0,4.41127768328361,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-2.67694375075529,1.02768826260825,-0.922185328831357,1.4404472143815,1.31654026747368,0.670201693636395,-2.61623999440833,3.25567296338199},{0,-0.462067960627554,-0.384945784500547,-1.14834842471241,-0.194870940739048,-1.05538628904105,-0.062691583516628,-0.583201902292926,0.105695911024192,-0.819849945413529,-1.08963508812268,-0.205887166935029,0.457443640480099,-0.726441317078056},{0,-0.463764444290312,3.19913986518674,-1.47496095821904,-0.194870940739048,-1.47457041876034,1.43862676471032,-2.3283329142692,2.11295207779868,-0.604583549242574,-0.45219961205777,-2.11266880464931,0.221368551603926,-1.24269299324141},{0,-0.386026014614266,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.276466261361987,0.345297289
 976502,0.356738218240755,-0.712216747328051,-0.745075371330834,1.08247880449354,0.373309964342643,0.142814883597},{0,0.721084286109856,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,0.766297243547814,0.826326991995603,-0.692690450688055,1.4404472143815,1.31654026747368,0.670201693636395,-2.40009657404567,1.34275121197669},{0,-0.45565866668907,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,0.0270611438230259,0.00223734745125172,-0.394863657059,-0.496950351157096,-0.0272426280144994,-0.566629638935027,0.438914199902207,-0.224142388919115},{0,-0.462205599868117,-0.384945784500547,-1.2122193201537,-0.194870940739048,-0.533544821431317,-0.227510228267631,-0.941177494493187,0.0108970621105497,-0.604583549242574,-0.808244652742672,-1.18504530522074,0.438213085934395,-0.493430425404325},{0,-0.458182052766066,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,0.0580666314494522,-0.672695800342991,-0.37139762128743
 4,-0.604583549242574,-0.302890401447972,1.03094416563639,0.440817223529126,-0.0901960080767308},{0,-0.460767323152772,1.91910927601271,-1.3007676070155,-0.194870940739048,-1.15804362693148,1.67361572356323,-1.25067722524966,0.508652512711604,-0.604583549242574,-0.222493134196543,-1.90653024922074,0.474971489675403,-1.15200013121271},{0,-0.333823398460734,-0.384945784500547,1.04213296667209,5.13160143945555,0.279159103534653,0.147819358789106,0.751748743620549,-0.853213877779501,-0.604583549242574,-0.193779824463889,-2.16420344350645,-0.105751193949567,-1.13665210840786},{0,-0.465309418091365,0.895084804673483,-1.09463880809132,-0.194870940739048,-1.25214618666439,0.926220284989382,-1.45203849586231,0.91893917588741,-0.712216747328051,-0.894384581940632,0.0517860273506843,0.474971489675403,-1.16595287921713},{0,-0.442610680077829,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,-1.24090011332082,0.852429378926872,1.14981151525001,-0.819849945413529,-1.170032
 3553741,-0.515095000077885,0.474971489675403,2.39478841150958},{0,-0.44575717713536,-0.384945784500547,-0.393510569497073,-0.194870940739048,0.108063540383922,-0.216087153878947,0.270719041601448,-0.508946963042776,-0.496950351157096,-0.262691767822257,0.154855305064969,0.474971489675403,0.0953755403819889},{0,-0.461215450912746,2.68712762951712,-1.55479957752066,-0.194870940739048,-1.38046785902743,0.68470385505722,-1.36254459781225,3.53099920801961,-0.712216747328051,-0.147838528891644,-0.308956444649314,0.213355820543215,-1.1380473832083},{0,-0.466170463573029,-0.384945784500547,-1.04673563651034,-0.194870940739048,-0.490770930643634,-0.163867385244966,-1.31033982394971,0.629426344617672,-0.604583549242574,-1.22171631289288,0.670201693636395,0.467359395167728,-0.716674393474965},{0,-0.461273067339029,-0.384945784500547,0.0622953661521859,-0.194870940739048,-1.15804362693148,0.185352317494779,-2.47748941101931,0.353589735453794,-0.604583549242574,-0.222493134196543,-0.102817889220
 744,0.455139980300146,-0.958056933951345},{0,-0.277613239768006,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,-0.460867319350732,0.252074479507684,-0.544514979526721,-0.604583549242574,-0.193779824463889,-2.16420344350645,-1.22062256190913,-0.2101896409147},{0,0.468395711969138,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0995087622263859,-0.970010063532043,-0.0686119885050502,-0.722797817338366,1.4404472143815,1.31654026747368,0.670201693636395,-3.46378662235495,0.490238308906934},{0,-0.466512961218152,-0.384945784500547,-1.48367062577922,-0.194870940739048,-0.978393285623217,0.441555557355247,-0.512352566336624,1.24436438894592,-0.819849945413529,-1.23320163678594,-0.102817889220744,0.447127249239435,-1.17711507762066},{0,0.837042678857392,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0995087622263859,-0.526142030143208,-0.475063442149097,-0.755217351035351,1.4404472143815,1.31654026747368,0.670201693636395,-3.2534524320113,0.285
 132913242033},{0,-0.453588743226333,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,-0.225878360497819,0.378857501745276,-0.386352914525497,-0.496950351157096,-0.0272426280144994,-0.566629638935027,0.451033455631531,-0.466920204195936},{0,0.589254701863245,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,0.149451226558918,0.900905240370658,-0.656630483768647,1.4404472143815,1.31654026747368,0.670201693636395,0.391038131814464,0.389778523275146},{0,-0.453601546876618,-0.384945784500547,2.22664775485615,-0.194870940739048,0.313378216164799,-1.15114738598116,0.755477656039302,-0.842144992981592,-0.712216747328051,1.57496005506756,0.618667054779254,0.456842685650546,0.615813040946669},{0,-0.43341445826065,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,0.487247854909981,0.986670226001971,-0.695740543387923,-0.712216747328051,0.00147068171815396,1.18554808220782,0.456341889959252,-0.146007000094391},{0,-0.32642075
 4654307,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,-0.152444310856283,1.02768826260825,-0.873433040677013,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.0441870360239717,-1.0068915519668},{0,-0.307622862094259,-0.384945784500547,1.04213296667209,5.13160143945555,0.279159103534653,3.61553836963939,0.800224605064335,-0.674340699445301,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.390337017846652,-1.4408220149041}}
+2 | {{7.2},{19},{21.7},{7.2},{13.2},{8.5},{16.1},{20},{23},{14.2},{13.9},{8.3},{13.3},{20.6},{20.1},{13.8},{30.5},{17.8},{18.4},{29.6},{19.3},{16.2},{11.9},{14.1},{21.7},{16.5},{19.5},{14.6},{26.4},{14},{28.6},{10.2},{17.1},{15},{19.9},{17.4},{28.2},{20.2},{19.1},{19.7},{11.5},{22.9},{19.3},{12.7},{17.4},{20.3},{15},{16.4},{21.7},{19.9}} | {{0,1.32407219628047,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-1.43998798123787,0.956838926651949,-1.03636702490223,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,2.39478841150958},{0,-0.0669185046202712,-0.384945784500547,0.827294500187726,-0.194870940739048,0.70689801141148,-0.338477236614839,0.457164662539084,-0.325301865484589,1.4404472143815,1.31654026747368,0.670201693636395,-3.27989444451165,0.488843034106492},{0,-0.460949775169333,-0.384945784500547,0.2205209934954,-0.194870940739048,-1.15804362693148,-0.245460773735561,-1.12389420301207,0.969118120304595,-0.712216747328051,-0.848443286
 368387,-1.49425313836359,0.474971489675403,-0.452967456191521},{0,1.04926318233075,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,0.299583061381612,1.02768826260825,-0.963558360453761,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.931145145846448},{0,-0.321603381234588,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.341740972154463,0.35648402723276,0.224944696580325,-0.712216747328051,-0.745075371330834,1.08247880449354,-1.17064315191795,1.96225322337272},{0,3.96134674256123,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-1.02549356770565,0.483267049470353,-0.947176410952856,1.4404472143815,1.31654026747368,0.670201693636395,-0.200501738742466,1.91620915495815},{0,-0.188499833784622,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,-1.93607578326069,-1.29169526185594,-0.498517613810969,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.00973229
 246291796,-0.140425900892625},{0,0.255967149178647,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,-0.127966294309104,0.445977925282826,-0.401062232545828,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.147000707998324},{0,-0.437318504626709,-0.384945784500547,-0.728832770563885,-0.194870940739048,-0.678976050109438,0.248995160517444,-1.62356846712494,0.926416822506441,-0.604583549242574,-0.859928610261448,0.360993860493541,0.474971489675403,-1.04595924637916},{0,0.279592017896129,-0.384945784500547,0.827294500187726,-0.194870940739048,1.24584903533628,-0.250356377044997,0.852429378926872,-0.815727254597251,1.4404472143815,1.31654026747368,0.670201693636395,-0.295452601811881,0.286528188042475},{0,1.1614871770785,-0.384945784500547,0.827294500187726,-0.194870940739048,0.843774461932065,0.798934598944053,0.777851130551818,-1.07631340026179,1.4404472143815,1.31654026747368,0.670201693636395,0.135632329254327,1.33856538757537},{0,2.1765712
 4137941,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-1.32249350181141,0.878531765858142,-0.900244339409725,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.854405031822166},{0,0.212429403355467,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0995087622263859,0.00421499504565896,0.93073653972068,-0.652694880284946,1.4404472143815,1.31654026747368,0.670201693636395,-0.467926637893668,1.45855902041334},{0,-0.458325026860915,3.71115210085635,-1.52286412980001,-0.194870940739048,-1.36335830271236,-0.36458712093183,-1.97408623448769,3.46970218376097,-0.712216747328051,-0.590023498774506,1.59782519306496,0.266039527267385,-1.12688518480477},{0,-0.457820349645515,-0.384945784500547,2.22664775485615,-0.194870940739048,0.313378216164799,-0.287889335750671,0.412417713514051,-0.699971317132902,-0.712216747328051,1.57496005506756,0.618667054779254,0.474971489675403,-0.0413613900612783},{0,0.389831446849711,-0.384945784500547,0.82729450
 0187726,-0.194870940739048,0.0995087622263859,-1.19520781576609,0.856158291345625,-0.542596372828417,1.4404472143815,1.31654026747368,0.670201693636395,0.0310661889120588,0.626975239350201},{0,-0.462324033633253,1.91910927601271,-1.3007676070155,-0.194870940739048,-1.15804362693148,0.945802698227124,-1.55271913116863,1.4498028907951,-0.604583549242574,-0.222493134196543,-1.90653024922074,0.40295706926727,-1.24966936724362},{0,0.410349296431374,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,2.0130442196841,1.00158587567698,-0.531330707856324,1.4404472143815,1.31654026747368,0.670201693636395,0.264336821916984,0.431636767288391},{0,-0.387222088945054,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,0.547626962393021,0.818869167158098,0.453554063939794,-0.712216747328051,-0.745075371330834,1.08247880449354,0.385228901795449,-0.118101504085561},{0,-0.463245896453771,-0.384945784500547,-1.44302551049839,-0.194870940739048,-0.72174994089
 7121,-0.0104718148826487,-0.135732412042599,-0.124487697728757,-0.819849945413529,-1.39973883323533,-0.566629638935027,0.376915693319962,-0.0692668860701082},{0,-0.447119698919853,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,-0.0365816991996391,0.557845297845408,-0.402439693765124,-0.604583549242574,-0.302890401447972,1.03094416563639,0.450632819078496,-0.0288039168573048},{0,-0.442643756174399,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,-0.741548575758378,0.196140793226393,0.202806926984508,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.470163851038977,-0.299487228142956},{0,1.74066029771907,-0.384945784500547,0.827294500187726,-0.194870940739048,0.741117124041626,-3.29868537105362,1.02768826260825,-1.15837073289695,1.4404472143815,1.31654026747368,0.670201693636395,0.207746908800719,1.35251813557978},{0,0.525688846081803,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.0417479537513
 321,0.979212401164465,-0.625342436073226,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.62557996454976},{0,-0.459691816528835,0.255069510086467,-0.65770381882244,-0.194870940739048,-0.413777927225806,-0.441284906112988,-1.24694831283091,0.943635087747632,-0.604583549242574,-0.722104723544712,-1.90653024922074,0.410869641189722,0.287923462842916},{0,-0.447159176841565,0.255069510086467,-0.65770381882244,-0.194870940739048,-0.413777927225806,-0.862306790724458,1.02768826260825,1.25415420261162,-0.604583549242574,-0.722104723544712,-1.90653024922074,0.372108054683536,2.27200422907073},{0,0.240362700393841,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.257154499366503,0.393773151420288,-0.392748270186511,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.0479361971669779},{0,-0.216973018076679,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,-2.01277356844185,0.867345028601884,-1.01929634479167
 ,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.422988896919046,2.21619323705307},{0,-0.459811317264828,1.35589581677613,-0.916090623107689,-0.194870940739048,-1.19226273956163,0.547626962393021,-2.01510427109397,0.963706665514506,-0.389317153071619,-0.618736808507159,-1.44271849950645,0.341859994929355,-0.694349996667901},{0,-0.438659686994059,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,0.0237974082834025,0.789037867808076,-0.944962633993275,-0.712216747328051,0.00147068171815396,1.18554808220782,0.386631129731073,1.46693066921599},{0,-0.45589980210277,-0.384945784500547,0.2205209934954,-0.194870940739048,-1.15804362693148,0.846258764268598,-1.54153239391238,1.19428383461582,-0.712216747328051,-0.848443286368387,-1.49425313836359,0.474971489675403,-1.02921594877386},{0,1.05966614818729,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-2.08783948585319,1.02768826260825,-0.955982323747637,1.4404472143815,1.31654026747368,
 0.670201693636395,0.234789876130616,2.3682781903012},{0,0.567843797674206,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,0.402390730879762,0.923278714883174,-0.722010696641626,1.4404472143815,1.31654026747368,0.670201693636395,0.365397392420191,0.819523161811128},{0,4.98634296612442,-0.384945784500547,0.827294500187726,-0.194870940739048,0.210720878274361,-0.65669145172816,1.02768826260825,-1.04281157560679,1.4404472143815,1.31654026747368,0.670201693636395,-3.47430333187213,-0.493430425404325},{0,-0.402757184624149,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.531037633452644,-0.594388639549184,0.475150688056602,-0.712216747328051,-0.745075371330834,1.08247880449354,0.462151119978267,-0.722255492676731},{0,-0.340869674000895,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,-0.464131054890355,0.826326991995603,-0.544514979526721,-0.604583549242574,-0.193779824463889,-2.16420344350645,-0.57279325565071
 2,0.109328288386404},{0,-0.464435568959416,1.30469459320917,-1.48367062577922,-0.194870940739048,-0.858626391417706,1.12530815290643,-0.0797987257613083,-0.172206889968629,-0.389317153071619,-1.23320163678594,-0.257421805792172,0.474971489675403,-0.853411323918232},{0,0.15140187124302,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.577000582249635,0.651068108314226,-0.359689200923424,1.4404472143815,1.31654026747368,0.670201693636395,0.444122475091669,-0.468315478996378},{0,1.19218392863672,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0652896495962397,-0.380905798629948,-0.0536963388300391,-0.307148894416019,1.4404472143815,1.31654026747368,0.670201693636395,0.192923356338405,0.62557996454976},{0,-0.43879839320548,-0.384945784500547,-0.393510569497073,-0.194870940739048,0.108063540383922,-1.25558692324913,0.0171529971262628,-0.361165052229812,-0.496950351157096,-0.262691767822257,0.154855305064969,0.474971489675403,1.04555767948265},{0,0.400069
 032223401,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-1.25558692324913,0.986670226001971,-0.887797993392521,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,1.00509471026985},{0,-0.465909055713044,0.895084804673483,-1.09463880809132,-0.194870940739048,-1.25214618666439,0.0123743338947183,-0.959822056586951,0.91893917588741,-0.712216747328051,-0.894384581940632,0.0517860273506843,0.412271869125346,-0.856201873519115},{0,-0.45140785479446,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,-0.779081534464051,-1.44085175860605,0.771206460117992,-0.819849945413529,-1.1700323553741,-0.515095000077885,0.474971489675403,-0.47947767739991},{0,0.0284526854398912,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,-0.299312410139354,0.576489859939171,-0.468410247160658,1.4404472143815,1.31654026747368,0.670201693636395,-3.39537793092413,0.748364146988612},{0,-0.455236146229666,0.255069510086467,-0.918
 993845627748,-0.194870940739048,-1.39757741534251,-0.922685898207498,-1.32898438604347,1.45875638872052,-0.712216747328051,-0.526854217362669,0.000251388493540901,0.474971489675403,-0.0776385348727573},{0,-0.462053023035555,-0.384945784500547,1.92326100150999,-0.194870940739048,0.0738444277537763,-0.253620112584621,0.434791188026567,-0.656925654029925,-0.927483143499006,-1.42845214296799,0.103320666207828,0.282365466803582,0.0870038915793399},{0,1.62253595413167,-0.384945784500547,0.827294500187726,-0.194870940739048,0.843774461932065,1.88249479809915,0.949381101814444,-1.09038318271602,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,-0.0288039168573048},{0,0.0437423778218583,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.883791722974271,0.654797020732978,-0.46009628480134,1.4404472143815,1.31654026747368,0.670201693636395,-0.943983022038112,0.386987973674263},{0,-0.0589460983761639,-0.384945784500547,0.827294500187726,5.13160143945555
 ,1.69069749952818,0.38444018541183,0.692086144920505,-0.505503309994538,1.4404472143815,1.31654026747368,0.670201693636395,0.419283008803467,-0.0525235884648103},{0,-0.132150968880474,-0.384945784500547,0.827294500187726,-0.194870940739048,0.70689801141148,-0.653427716188535,-0.90388837030566,-0.229371530569382,1.4404472143815,1.31654026747368,0.670201693636395,-0.15102312444258,0.067470044373159}}
+3 | {{14.9},{33.4},{13.8},{23.5},{19.8},{18.2},{24.8},{8.4},{22.2},{23.6},{22.6},{23.9},{29.4},{14.3},{11.8},{50},{39.8},{16.8},{18.9},{21.2},{5},{14.3},{15.2},{20.6},{20.3},{20.4},{22},{15.1},{10.4},{25},{13.8},{11.3},{20.6},{16.7},{23.3},{20},{24.8},{18.8},{18.5},{19.1},{20.1},{21},{24.8},{23},{13},{23.1},{14.1},{13.5},{24.5},{19.8}} | {{0,0.357442479512168,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.231044615049513,0.419875538351557,-0.368790283979482,1.4404472143815,1.31654026747368,0.670201693636395,-0.773912805274538,0.360477752465874},{0,-0.461692386885861,1.30469459320917,-1.48367062577922,-0.194870940739048,-0.858626391417706,2.05710464946903,-0.0201361270612644,-0.213284751329755,-0.389317153071619,-1.23320163678594,-0.257421805792172,0.474971489675403,-1.00131045276503},{0,0.715504028527324,-0.384945784500547,0.827294500187726,-0.194870940739048,0.818110127459455,-2.04541092383808,1.02768826260825,-1.16028933959525,1.4404472143815,1.316540267
 47368,0.670201693636395,0.474971489675403,2.94731723248442},{0,-0.465873845674761,3.71115210085635,-1.31092888583571,-0.194870940739048,-1.49167997507541,0.213094069581582,-2.03747774560649,1.51459276314552,-0.712216747328051,-0.572795512934914,-1.44271849950645,0.474971489675403,-1.2524599168445},{0,-0.464849553651963,-0.384945784500547,-1.32979983221609,-0.194870940739048,-0.961283729308144,-0.0251586248109549,-1.5005143573061,1.15158253681767,-0.712216747328051,-0.0387279519075608,-1.03044138864931,0.191220650988004,-0.635748455049358},{0,-0.465108827570234,3.71115210085635,-1.52286412980001,-0.194870940739048,-1.36335830271236,-0.810087022090477,-1.88459233643763,3.46970218376097,-0.712216747328051,-0.590023498774506,1.59782519306496,0.333747104730386,-0.780857034295274},{0,-0.46579382286048,0.895084804673483,-1.09463880809132,-0.194870940739048,-1.25214618666439,0.232676482819324,-1.5005143573061,0.91893917588741,-0.712216747328051,-0.894384581940632,0.0517860273506843,0.474971
 489675403,-0.966428582753994},{0,0.79064011628295,-0.384945784500547,0.827294500187726,-0.194870940739048,1.24584903533628,1.08451145866113,0.151393844201361,-0.855378459695536,1.4404472143815,1.31654026747368,0.670201693636395,-3.01507368295518,1.26880164755329},{0,-0.457932381585509,0.895084804673483,-1.05544530407052,-0.194870940739048,-1.0211671764109,0.483984119370358,-0.173021536230126,1.81665033051956,-0.281683954986141,-0.87715659610104,0.412528499350682,0.474971489675403,-0.965033307953552},{0,-0.459905210700251,-0.384945784500547,-1.2122193201537,-0.194870940739048,-0.533544821431317,0.418709408577882,0.434791188026567,-0.436089103550765,-0.604583549242574,-0.808244652742672,-1.18504530522074,0.46094921031916,-0.642724829051566},{0,-0.460685166396777,-0.384945784500547,-1.2122193201537,-0.194870940739048,-0.533544821431317,-0.490240939207346,-0.139461324461352,-0.408736659339045,-0.604583549242574,-0.808244652742672,-1.18504530522074,0.438213085934395,-0.559008341025075},{
 0,-0.463214954298916,-0.384945784500547,-0.0683496472504681,-0.194870940739048,0.00540620249348394,1.33255535967254,0.692086144920505,-0.671634972050256,-1.03511634158448,-0.940325877512877,1.08247880449354,0.474971489675403,-1.11711826120168},{0,-0.462587575434952,-0.384945784500547,-1.2122193201537,-0.194870940739048,-0.533544821431317,0.630852218653428,-1.46695414553732,-0.197001191915944,-0.604583549242574,-0.808244652742672,-1.18504530522074,0.41547696154963,-1.16037178001536},{0,0.125786034906225,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.451346763974119,0.576489859939171,-0.598678722471153,1.4404472143815,1.31654026747368,0.670201693636395,-2.49685030160375,0.359082477665433},{0,0.668952090032891,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,0.488879722679792,0.833784816833109,-0.759989270259339,1.4404472143815,1.31654026747368,0.670201693636395,-3.06905945847671,1.44181572280804},{0,-0.463720698485172,-0.3849457845005
 47,-1.44302551049839,-0.194870940739048,-0.721749940897121,2.72780230286172,-0.702527099693013,-0.164089707783496,-0.819849945413529,-1.39973883323533,-0.566629638935027,0.432203537638862,-1.28315596245422},{0,-0.46266866522009,-0.384945784500547,-1.44302551049839,-0.194870940739048,-0.721749940897121,2.62009903005413,0.404959888676546,-0.389501397312457,-0.819849945413529,-1.39973883323533,-0.566629638935027,0.461550165148714,-0.849225499516908},{0,-0.0191811615036216,-0.384945784500547,0.827294500187726,5.13160143945555,1.69069749952818,-0.581625534316812,0.617507896545451,-0.800919546489827,1.4404472143815,1.31654026747368,0.670201693636395,0.0356735092719675,0.138629059195676},{0,-0.457164162568411,0.255069510086467,-0.65770381882244,-0.194870940739048,-0.413777927225806,-0.245460773735561,0.390044239001535,1.32529023557951,-0.604583549242574,-0.722104723544712,-1.90653024922074,0.474971489675403,-0.0525235884648103},{0,-0.0777279863733562,-0.384945784500547,0.827294500187726,-0
 .194870940739048,0.0909539840688493,0.248995160517444,-0.765918610811809,0.225781012320612,1.4404472143815,1.31654026747368,0.670201693636395,0.392039723197053,-0.427852509783574},{0,3.62232742243239,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-1.15277925375098,1.02768826260825,-1.00512817225035,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,2.36409236589987},{0,-0.375671062446298,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,-0.852515584105588,0.830055904414356,-0.763924873743039,-0.712216747328051,0.00147068171815396,1.18554808220782,0.474971489675403,0.654880735359031},{0,-0.338196912003908,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.0284223603505783,0.718188531851775,0.218500145875765,-0.712216747328051,-0.745075371330834,1.08247880449354,0.474971489675403,0.707901177775808},{0,-0.465116296366234,1.04868847537437,0.383101454618702,-0.194870940739048,-0.927064616677998,
 0.146187491019294,0.181225143551382,0.0404632832818518,-0.712216747328051,-0.95755386335247,-0.360491083506457,0.474971489675403,-0.426457234983133},{0,-0.432417907480137,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,-0.305839881218601,0.158851669038866,-0.211661314892729,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.468360986550317,-0.512964272610506},{0,-0.455765363774778,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,-0.469026658199791,0.0246108219637678,-0.519130337056851,-0.496950351157096,-0.0272426280144994,-0.566629638935027,-0.108655808959075,0.240484119627905},{0,-0.462336837283538,-0.384945784500547,1.92326100150999,-0.194870940739048,0.0738444277537763,-0.472290393739415,-0.102172200273824,-0.62726104277153,-0.927483143499006,-1.42845214296799,0.103320666207828,0.397348157524773,0.100956639583755},{0,0.273143246035933,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-0.72849363
 3599883,0.915820890045669,-0.790195026996742,1.4404472143815,1.31654026747368,0.670201693636395,0.450232182525461,0.483261934904726},{0,2.29808855229234,-0.384945784500547,0.827294500187726,-0.194870940739048,0.912212687192357,-1.39592755145295,0.621236808964204,-0.927449198490806,1.4404472143815,1.31654026747368,0.670201693636395,-2.22471792295438,1.81295881972548},{0,-0.157715590616128,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,-0.0953289389128667,0.76666439329556,-0.614617916580141,-0.604583549242574,-0.193779824463889,-2.16420344350645,-1.09492284339424,-0.53528866941757},{0,0.452672829419196,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,0.0548028959098273,0.751748743620549,-0.856755920914831,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.212578623619075},{0,0.510530391115256,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-1.01733422885659,1.02768826260825,-0.96045907271034
 6,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,1.38879528039126},{0,-0.215911382073883,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0909539840688493,-0.470658525969602,-1.13880985268708,0.0940858807472748,1.4404472143815,1.31654026747368,0.670201693636395,0.212855024851921,-0.0427566648617198},{0,0.0500673810626329,-0.384945784500547,0.827294500187726,-0.194870940739048,0.356152106952482,0.52967641692509,0.789037867808076,-0.603844202043511,1.4404472143815,1.31654026747368,0.670201693636395,0.46806050913554,0.702320078574042},{0,-0.317652388150818,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,0.572104978940199,1.02768826260825,-0.869202266932035,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.148552858089722,-0.872945171124413},{0,-0.459301305195143,-0.384945784500547,-0.934961569488072,-0.194870940739048,-0.627647381164219,-0.51961455906396,-0.411671931030301,-0.0761781649663299,-0.604583549242574,-0.905869905
 833693,0.154855305064969,0.281263716282734,-0.31204470134693},{0,-0.464985058950813,3.19913986518674,-1.47496095821904,-0.194870940739048,-1.47457041876034,1.16120924384229,-0.933719669655682,2.11295207779868,-0.604583549242574,-0.45219961205777,-2.11266880464931,0.414475370167041,-1.05712144478269},{0,-0.45654318552959,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,-0.402120079637503,0.762935480876807,-0.58018138609776,-0.496950351157096,-0.0272426280144994,-0.566629638935027,0.455440457714922,0.357687202864991},{0,-0.454513806959422,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,0.154346829868353,0.43852010044532,-0.627851383294085,-0.496950351157096,-0.0272426280144994,-0.566629638935027,0.393241632856159,-0.445991082189314},{0,0.137595268352395,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0909539840688493,-0.0741146579053122,0.2744479540202,0.00646950819138634,1.4404472143815,1.31654026747368,0.670201693636395,
 0.432704333330157,0.186068402410687},{0,0.925377196115106,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0652896495962397,-0.728493633599883,-0.586930814711678,-0.348817096299701,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.155372356800973},{0,-0.362739375658479,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,0.839731293189351,0.554116385426655,0.347440355010512,-0.712216747328051,-0.745075371330834,1.08247880449354,0.308006206197855,-0.232514037721764},{0,-0.465113095453662,2.30311845276492,-1.02786469012996,-0.194870940739048,-1.43179652797265,0.661857706279854,-1.8473032122501,1.86176218545148,-0.496950351157096,-0.825472638582264,-1.18504530522074,0.22277077953955,-0.577146913430815},{0,0.151707024908145,-0.384945784500547,0.827294500187726,-0.194870940739048,-0.345339701965513,0.134764416630611,-0.288617821211461,-0.0534008598044117,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,-0.4055281129
 7651},{0,0.333305464783287,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.420341276347693,0.964296751489454,-0.663025839429661,1.4404472143815,1.31654026747368,0.670201693636395,-0.453403562846131,0.790222391001857},{0,-0.357246609686227,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.366218988701642,-1.60865281744992,0.475150688056602,-0.712216747328051,-0.745075371330834,1.08247880449354,0.374311555725232,-0.985962429960175},{0,0.60392021629382,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0995087622263859,1.09919826858944,0.815140254739345,-0.71064664158244,1.4404472143815,1.31654026747368,0.670201693636395,-2.68575043635999,0.843242833418634},{0,-0.297614675454841,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.103488277761926,0.912091977626916,0.111697706336836,-0.712216747328051,-0.745075371330834,1.08247880449354,-1.01329314571325,0.933935695447331},{0,-0.46809634597006,3.7111
 5210085635,-1.50834801719972,-0.194870940739048,-1.17515318324656,0.776088450166686,-1.59373716777491,2.36689689258447,-0.712216747328051,-0.900127243887163,-0.978906749792168,0.415276643273112,-1.06828364318622},{0,-0.443533609869204,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,-0.615894757482862,-0.02759395189877,0.245459029739115,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.474971489675403,0.320014783253071}}
+4 | {{18.6},{9.7},{21.4},{10.4},{16},{25},{20.8},{22.6},{20},{13.5},{21.2},{24.7},{19.4},{21.6},{24.5},{23.4},{27.5},{5.6},{8.8},{5},{22.6},{22.3},{13.1},{21.8},{22.2},{16.7},{19.7},{26.6},{12.1},{16.5},{34.9},{23.1},{18.7},{31.6},{21.4},{27.9},{8.4},{21.2},{37.3},{25},{23.2},{16.1},{19.6},{22.5},{23.2},{22.2},{18.2},{20.5},{22},{15.4}} | {{0,-0.461968732337845,2.68712762951712,-1.55479957752066,-0.194870940739048,-1.38046785902743,-0.449444244962048,-2.01137535867522,3.53099920801961,-0.712216747328051,-0.147838528891644,-0.308956444649314,0.429198763491096,-0.817134179106753},{0,0.765630319392977,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-1.83326811376254,0.915820890045669,-0.867185270146638,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,1.67901243888309},{0,1.05966614818729,-0.384945784500547,0.827294500187726,-0.194870940739048,0.356152106952482,0.113550135623056,0.580218772357924,-0.778043851240816,1.4404472143815,1.316540267
 47368,0.670201693636395,0.338955379919848,-0.0748479852718744},{0,9.02380336815224,-0.384945784500547,0.827294500187726,-0.194870940739048,0.843774461932065,1.31950041751405,0.72564635668928,-1.04108974908267,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.497214682909142},{0,-0.451376912639605,0.895084804673483,-1.05544530407052,-0.194870940739048,-1.0211671764109,-0.315631087837472,0.781580042970571,1.61642650328628,-0.281683954986141,-0.87715659610104,0.412528499350682,0.286471991472196,0.110723563186845},{0,-0.463978905432585,0.690279910405638,-0.981413129809016,-0.194870940739048,-1.14093407061641,0.573736846710011,-1.91442363578765,1.61455709163152,-0.712216747328051,-1.1126057359088,-1.08197602750645,0.474971489675403,-1.16734815401757},{0,0.815809958801485,-0.384945784500547,0.827294500187726,-0.194870940739048,0.356152106952482,-0.834565038637657,0.565303122682913,-0.778043851240816,1.4404472143815,1.31654026747368,0.670201693636395,-0.580205031881869
 ,0.0632842199718343},{0,-0.0150285109278633,-0.384945784500547,0.827294500187726,-0.194870940739048,1.69069749952818,-0.0773783934449356,0.330381640301491,-0.503584703296233,1.4404472143815,1.31654026747368,0.670201693636395,0.413273460507935,-0.1362400764913},{0,-0.458864914114597,-0.384945784500547,0.0622953661521859,-0.194870940739048,-1.15804362693148,0.195143524113651,0.0768155958263062,0.255544013666099,-0.604583549242574,-0.222493134196543,-0.102817889220744,0.242201652361773,-0.233909312522205},{0,0.405280117889383,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,-0.36458712093183,0.293092516113964,-0.370708890677786,1.4404472143815,1.31654026747368,0.670201693636395,-3.46528900942883,0.459542263297221},{0,-0.456607203781014,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,-0.147548707546847,-1.20965918864338,1.07646170532253,-0.819849945413529,-1.1700323553741,-0.515095000077885,0.399751976842986,-0.571565814229049},{0,-0.452
 694621648099,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,0.0841765157664425,-2.45884484892555,1.07646170532253,-0.819849945413529,-1.1700323553741,-0.515095000077885,0.450532659940237,-0.865968797122206},{0,-0.441569316521319,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,0.113550135623056,0.699543969758011,-0.48587448761958,-0.604583549242574,-0.302890401447972,1.03094416563639,0.41818125828262,0.265599066035852},{0,-0.466783971815851,-0.384945784500547,-0.773832719624799,-0.194870940739048,-0.884290725890315,0.426868747426941,0.240887742251426,0.705629467070828,-0.927483143499006,-1.11834839785533,-0.566629638935027,0.474971489675403,-0.62877208104715},{0,-0.439868564975132,-0.384945784500547,-0.393510569497073,-0.194870940739048,0.108063540383922,-0.380905798629948,-1.11270746575581,-0.566259188774168,-0.496950351157096,-0.262691767822257,0.154855305064969,0.474971489675403,-0.00787479485068223},{0,-0.448850325650038,-0.
 384945784500547,-0.230930108373771,-0.194870940739048,-1.36335830271236,0.139660019940047,-2.4700315861818,0.863151996505951,-0.712216747328051,-0.756560695223896,0.154855305064969,0.277357509890638,-0.852016049117791},{0,-0.453836280465176,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,0.926220284989382,0.278176866438953,-0.371397621287434,-0.604583549242574,-0.302890401447972,1.03094416563639,0.453537434088003,-0.589704386634788},{0,2.20264800912647,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-0.281361864671423,1.02768826260825,-0.956326689052461,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,1.83109739213122},{0,1.67330242751157,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-2.92335578399688,0.699543969758011,-1.02977488906703,1.4404472143815,1.31654026747368,0.670201693636395,-0.637496058965947,2.36967346510164},{0,6.77725354973445,-0.384945784500547,0.827294500187726,-0.19
 4870940739048,1.03197958139787,-0.77744966669424,1.02768826260825,-1.03671139020705,1.4404472143815,1.31654026747368,0.670201693636395,0.355481637732563,1.30228824276389},{0,-0.462523557183527,-0.384945784500547,-1.32979983221609,-0.194870940739048,-0.961283729308144,0.283264383683494,-2.059851220119,0.827387199847821,-0.712216747328051,-0.0387279519075608,-1.03044138864931,0.257726318791898,-0.879921545126621},{0,-0.464800472992538,2.30311845276492,-1.02786469012996,-0.194870940739048,-1.43179652797265,0.25389076382688,-1.00084009319323,1.86176218545148,-0.496950351157096,-0.825472638582264,-1.18504530522074,0.474971489675403,-0.843644400315142},{0,-0.208644243566307,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,-1.44814732008693,0.803953517483087,-0.883714804778182,-0.604583549242574,-0.193779824463889,-2.16420344350645,-2.61263426543101,0.347920279261901},{0,-0.168984936808615,-0.384945784500547,0.827294500187726,-0.194870940739048,-0.345339701965513,-0.
 648532112879101,-1.19847245138713,0.278222928740924,1.4404472143815,1.31654026747368,0.670201693636395,0.43510815264837,-0.450176906590638},{0,-0.443980670658321,-0.384945784500547,-0.728832770563885,-0.194870940739048,-0.678976050109438,-0.12470255876948,-1.07168942914953,0.926416822506441,-0.604583549242574,-0.859928610261448,0.360993860493541,0.474971489675403,-0.119496778886002},{0,0.713295398853167,-0.384945784500547,0.827294500187726,-0.194870940739048,1.24584903533628,0.410550069728821,1.02768826260825,-0.823450876434013,1.4404472143815,1.31654026747368,0.670201693636395,-0.307772175817723,0.191649501612452},{0,-0.460230636811661,0.690279910405638,-0.981413129809016,-0.194870940739048,-1.14093407061641,-0.320526691146908,-0.997111180774478,1.61455709163152,-0.712216747328051,-1.1126057359088,-1.08197602750645,0.461550165148714,-0.0274086420568633},{0,-0.46357879136118,-0.384945784500547,-1.14834842471241,-0.194870940739048,-1.05538628904105,0.767929111317627,-0.60930428922419
 5,0.44519090653693,-0.819849945413529,-1.08963508812268,-0.205887166935029,0.42889828607632,-0.992938803962382},{0,0.554136423073289,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,0.399126995340138,1.02768826260825,-0.931630777192238,1.4404472143815,1.31654026747368,0.670201693636395,0.266740641235197,0.929749871046007},{0,-0.46703257602555,-0.384945784500547,-1.52576735232007,-0.194870940739048,-0.465106596171025,0.621061012034556,-0.475063442149097,1.34506664308511,-1.03511634158448,-0.0846692474798062,-1.54578777722074,0.405461047723742,-0.697140546268784},{0,-0.466113914117604,3.45514598302154,-1.37189655875695,-0.194870940739048,-1.23503663034931,1.41088501262351,-2.11205599398154,0.919135956061595,-0.819849945413529,-1.06092177839002,-0.308956444649314,0.462151119978267,-1.62778883816327},{0,-0.426803506830178,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,0.363225904404276,-0.195395010742642,-0.000122627643812377,-0.71221
 6747328051,-0.762303357170426,-0.257421805792172,0.458044595309653,-0.458548555393287},{0,-0.445998312549061,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,-0.110015748841174,0.856158291345625,-0.484447831356738,-0.496950351157096,-0.0272426280144994,-0.566629638935027,0.474971489675403,0.480471385303843},{0,-0.468169966959199,4.73517657219557,-1.60850919414175,-0.194870940739048,-1.38046785902743,1.07145651650264,-1.19101462654962,2.35745144422359,-0.604583549242574,-1.0379511306039,-1.95806488807788,0.434907834371852,-1.35291970247629},{0,0.366734728706484,-0.384945784500547,0.827294500187726,-0.194870940739048,0.70689801141148,0.0809127802268177,-0.262515434280191,-0.280091620465576,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,-0.0594999624670177},{0,0.805449671779231,-0.384945784500547,0.827294500187726,-0.194870940739048,0.741117124041626,-0.899839749430133,1.02768826260825,-1.10568284125891,1.4404472143815,1.31654026747368,0.
 670201693636395,-0.174159885380381,-0.211584915715141},{0,0.989715538797076,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,-0.366218988701642,0.576489859939171,-0.842292578112231,1.4404472143815,1.31654026747368,0.670201693636395,-2.80974744952448,2.84267162245131},{0,-0.444184462092023,-0.384945784500547,-0.393510569497073,-0.194870940739048,0.108063540383922,-0.229142096037442,-0.266244346698944,-0.552779746842493,-0.496950351157096,-0.262691767822257,0.154855305064969,0.474971489675403,-0.101358206480263},{0,-0.4612837370476,3.71115210085635,-1.08157430675105,-0.194870940739048,-1.38046785902743,1.61323661608019,-1.66831541614997,0.779225252216032,-0.712216747328051,-1.10112041201574,0.154855305064969,0.474971489675403,-1.40733541969351},{0,0.14180020050015,-0.384945784500547,0.827294500187726,-0.194870940739048,-0.345339701965513,1.47126412010655,0.170038406295124,-0.0600913857267029,1.4404472143815,1.31654026747368,0.670201693636395,0.458745709277465,-
 0.92596561354119},{0,-0.0889301134018469,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0652896495962397,0.452978631743931,0.0954601579200698,-0.313003104598024,1.4404472143815,1.31654026747368,0.670201693636395,0.43961531387002,0.0995613647833133},{0,0.217863485930579,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0995087622263859,0.433396218506188,0.0880023330825642,-0.655449802723537,1.4404472143815,1.31654026747368,0.670201693636395,-2.51928594857373,-0.225537663719557},{0,-0.310044885939832,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,-1.23274077447176,1.02768826260825,-0.954949227833165,-0.604583549242574,-0.193779824463889,-2.16420344350645,-0.0789085448961881,-0.0511283136643689},{0,-0.462798835664654,3.19913986518674,-1.47496095821904,-0.1948709407390

<TRUNCATED>

[5/6] madlib git commit: Docs: Update MLP, mini-batch documentation

Posted by ri...@apache.org.
Docs: Update MLP, mini-batch documentation

Closes #257


Project: http://git-wip-us.apache.org/repos/asf/madlib/repo
Commit: http://git-wip-us.apache.org/repos/asf/madlib/commit/0f9f12f3
Tree: http://git-wip-us.apache.org/repos/asf/madlib/tree/0f9f12f3
Diff: http://git-wip-us.apache.org/repos/asf/madlib/diff/0f9f12f3

Branch: refs/heads/master
Commit: 0f9f12f3d9b3a041279209e024c3d87a1abba3a1
Parents: ebb3267
Author: Frank McQuillan <fm...@pivotal.io>
Authored: Tue Apr 17 14:03:24 2018 -0700
Committer: Rahul Iyer <ri...@apache.org>
Committed: Tue Apr 17 14:06:07 2018 -0700

----------------------------------------------------------------------
 doc/mainpage.dox.in                             |   58 +-
 src/ports/postgres/modules/convex/mlp.sql_in    | 1051 ++++++++++--------
 .../utilities/minibatch_preprocessing.sql_in    |  439 +++++++-
 3 files changed, 994 insertions(+), 554 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/madlib/blob/0f9f12f3/doc/mainpage.dox.in
----------------------------------------------------------------------
diff --git a/doc/mainpage.dox.in b/doc/mainpage.dox.in
index d579e81..b93f23c 100644
--- a/doc/mainpage.dox.in
+++ b/doc/mainpage.dox.in
@@ -109,21 +109,18 @@ complete matrix stored as a distributed table.
         @defgroup grp_svec Sparse Vectors
     @}
 
-        @defgroup grp_pca Dimensionality Reduction
+        @defgroup grp_encode_categorical Encoding Categorical Variables
         @ingroup grp_datatrans
-        @brief A collection of methods for dimensionality reduction.
-        @details A collection of methods for dimensionality reduction.
-        @{
-            @defgroup grp_pca_train Principal Component Analysis
-            @defgroup grp_pca_project Principal Component Projection
-        @}
 
-        @defgroup grp_encode_categorical Encoding Categorical Variables
+        @defgroup grp_path Path
         @ingroup grp_datatrans
 
         @defgroup grp_pivot Pivot
         @ingroup grp_datatrans
 
+        @defgroup grp_sessionize Sessionize
+        @ingroup grp_datatrans
+
         @defgroup grp_stemmer Stemming
         @ingroup grp_datatrans
 
@@ -155,6 +152,13 @@ Contains graph algorithms.
     @defgroup grp_train_test_split Train-Test Split
     @ingroup grp_mdl
 
+@defgroup grp_sampling Sampling
+    @ingroup grp_sampling
+    @{A collection of methods for sampling from a population. @}
+        @defgroup grp_balance_sampling Balanced Sampling
+        @ingroup grp_sampling
+        @defgroup grp_strs Stratified Sampling
+        @ingroup grp_sampling
 
 @defgroup grp_stats Statistics
 @{Contains statistics modules @}
@@ -233,11 +237,22 @@ Contains graph algorithms.
     @{A collection of methods used to uncover interesting patterns in transactional datasets. @}
         @defgroup grp_assoc_rules Apriori Algorithm
         @ingroup grp_association_rules
+
     @defgroup grp_clustering Clustering
     @ingroup grp_unsupervised
     @{A collection of methods for clustering data@}
         @defgroup grp_kmeans k-Means Clustering
         @ingroup grp_clustering
+
+    @defgroup grp_pca Dimensionality Reduction
+    @ingroup grp_unsupervised
+    @brief A collection of methods for dimensionality reduction.
+    @details A collection of methods for dimensionality reduction.
+    @{
+        @defgroup grp_pca_train Principal Component Analysis
+        @defgroup grp_pca_project Principal Component Projection
+    @}
+
     @defgroup grp_topic_modelling Topic Modelling
     @ingroup grp_unsupervised
     @{A collection of methods to uncover abstract topics in a document corpus @}
@@ -259,45 +274,24 @@ Contains graph algorithms.
         @defgroup grp_sparse_linear_solver Sparse Linear Systems
         @ingroup grp_linear_solver
 
-    @defgroup grp_path Path
+    @defgroup grp_minibatch_preprocessing Mini-Batch Preprocessor
     @ingroup grp_utility_functions
 
     @defgroup grp_pmml PMML Export
     @ingroup grp_utility_functions
 
-    @defgroup grp_sampling Sampling
+    @defgroup grp_text_utilities Term Frequency
     @ingroup grp_utility_functions
-    @{A collection of methods for sampling from a population. @}
-        @defgroup grp_balance_sampling Balanced Sampling
-        @ingroup grp_sampling
-
-        @defgroup grp_strs Stratified Sampling
-        @ingroup grp_sampling
-
-    @defgroup grp_sessionize Sessionize
-    @ingroup grp_utility_functions
-
-    @defgroup grp_text_analysis Text Analysis
-    @ingroup grp_utility_functions
-    @{A collection of methods to find patterns in textual data. @}
-        @defgroup grp_text_utilities Term Frequency
-        @ingroup grp_text_analysis
 
 @defgroup grp_early_stage Early Stage Development
 @brief A collection of implementations which are in early stage of development.
 There may be some issues that will be addressed in a future version.
 Interface and implementation are subject to change.
 @{
-
     @defgroup grp_cg Conjugate Gradient
+    @defgroup grp_knn k-Nearest Neighbors
     @defgroup grp_bayes Naive Bayes Classification
     @defgroup grp_sample Random Sampling
-
-    @defgroup grp_nene Nearest Neighbors
-    @{A collection of methods to create nearest neigbor based models.
-
-        @defgroup grp_knn k-Nearest Neighbors
-    @}
 @}
 
 @defgroup grp_deprecated Deprecated Modules

http://git-wip-us.apache.org/repos/asf/madlib/blob/0f9f12f3/src/ports/postgres/modules/convex/mlp.sql_in
----------------------------------------------------------------------
diff --git a/src/ports/postgres/modules/convex/mlp.sql_in b/src/ports/postgres/modules/convex/mlp.sql_in
index 7ccec2d..64ed62d 100644
--- a/src/ports/postgres/modules/convex/mlp.sql_in
+++ b/src/ports/postgres/modules/convex/mlp.sql_in
@@ -45,13 +45,28 @@ m4_include(`SQLCommon.m4')
 Multilayer Perceptron (MLP) is a type of neural network that can be
 used for regression and classification.
 
-Also called "vanilla neural networks", MLPs consist of several
+MLPs consist of several
 fully connected hidden layers with non-linear activation
 functions.  In the case of classification, the final layer of the
 neural net has as many nodes as classes, and the output of the
 neural net can be interpreted as the probability that a given input
 feature belongs to a specific class.
 
+MLP can be used with or without mini-batching.
+The advantage of using mini-batching is that it 
+can perform better than stochastic gradient descent
+(default MADlib optimizer)
+because it uses more than one training example at a time, 
+typically resulting faster and smoother convergence [3].
+
+@note
+In order to use mini-batching, you must first run 
+the <a href="group__grp__minibatch__preprocessing.html">Mini-Batch Preprocessor</a>, 
+which is a utility that prepares input data for 
+use by models that support mini-batch as an optimization option,
+such as MLP.  This is a one-time operation and you would only 
+need to re-run the preprocessor if your input data has changed,
+or if you change the grouping parameter.
 
 @brief Solves classification and regression problems with several
 fully connected layers and non-linear activation functions.
@@ -79,7 +94,9 @@ mlp_classification(
 \b Arguments
 <dl class="arglist">
   <dt>source_table</dt>
-  <dd>TEXT. Name of the table containing the training data.</dd>
+  <dd>TEXT. Name of the table containing the training data.
+  If you are using mini-batching, this is the name of the output
+  table from the mini-batch preprocessor.</dd>
 
   <dt>output_table</dt>
   <dd>TEXT. Name of the output table containing the model. Details of the output
@@ -88,21 +105,28 @@ mlp_classification(
 
   <dt>independent_varname</dt>
   <dd>TEXT. Expression list to evaluate for the independent variables.
+  If you are using mini-batching, set this parameter to 'independent_varname'
+  which is the hardcoded name of the column from the mini-batch preprocessor
+  containing the packed independent variables.
 
   @note
-  Please note that an intercept variable should not be included as part
+  If you are not using mini-batching, 
+  please note that an intercept variable should not be included as part
   of this expression - this is different from other MADlib modules.  Also
-  please note that <b>independent variables should be encoded properly.</b>
+  please note that independent variables should be encoded properly.
   All values are cast to DOUBLE PRECISION, so categorical variables should be
   one-hot or dummy encoded as appropriate.
   See <a href="group__grp__encode__categorical.html">Encoding Categorical Variables</a>
-  for more details on how to do this.
+  for more details.
   </dd>
 
   <dt>dependent_varname</dt>
   <dd> TEXT. Name of the dependent variable column. For classification, supported types are:
   text, varchar, character varying, char, character
-  integer, smallint, bigint, and boolean.  </dd>
+  integer, smallint, bigint, and boolean.  
+  If you are using mini-batching, set this parameter to 'dependent_varname'
+  which is the hardcoded name of the column from the mini-batch preprocessor
+  containing the packed dependent variables.</dd>
 
   <DT>hidden_layer_sizes (optional)</DT>
   <DD>INTEGER[], default: ARRAY[100].
@@ -132,6 +156,8 @@ mlp_classification(
     but will not be used
     for loss calculations. If not specified, weight for each row will default to 1 (equal
     weights).  Column should be a numeric type.
+    @note
+    The 'weights' parameter is not currently for mini-batching.
   </DD>
 
   <DT>warm_start (optional)</DT>
@@ -160,7 +186,11 @@ mlp_classification(
     A single column or a list of comma-separated
     columns that divides the input data into discrete groups, resulting in one
     model per group. When this value is NULL, no grouping is used and
-    a single model is generated for all data.
+    a single model is generated for all data.  If you are using mini-batching, 
+    you must have run the mini-batch preprocessor with exactly the same
+    groups that you specify here for MLP training.  If you change the 
+    groups, or remove the groups, then you must re-run the mini-batch 
+    preprocessor.</dd>
   </DD>
 </dl>
 
@@ -180,9 +210,14 @@ mlp_classification(
       </tr>
       <tr>
         <th>loss</th>
-        <td>FLOAT8. The cross entropy over the training data.
+        <td>FLOAT8. The cross entropy loss over the training data.
         See Technical Background section below for more details.</td>
       </tr>
+      <tr>
+        <th>grouping columns</th>
+        <td>If grouping_col is specified during training, a column for each grouping column
+        is created.</td>
+      </tr>
     </table>
 
 A summary table named \<output_table\>_summary is also created, which has the following columns:
@@ -252,11 +287,11 @@ A standardization table named \<output_table\>_standardization is also create, t
 following columns:
   <table class="output">
     <tr>
-        <th>x_means</th>
+        <th>mean</th>
         <td>The mean for all input features (used for normalization).</td>
     </tr>
     <tr>
-        <th>x_stds</th>
+        <th>std</th>
         <td>The standard deviation for all input features (used for normalization).</td>
     </tr>
     <tr>
@@ -288,7 +323,7 @@ mlp_regression(
 \b Arguments
 
 Parameters for regression are largely the same as for classification. In the
-model table, the loss refers to mean square error instead of cross entropy. In the
+model table, the loss refers to mean square error instead of cross entropy loss. In the
 summary table, there is no classes column. The following
 arguments have specifications which differ from mlp_classification:
 <DL class="arglist">
@@ -378,21 +413,17 @@ If you want to run the full number of iterations specified in \e n_interations,
 set tolerance=0.0
 </DD>
 
-<DT>tolerance</dt>
-<DD>Default: 1 for IGD, 20 for Minibatch
-If the source_table is detected to contain data
-that is supported by minibatch, then the solver
-uses mini-batch gradient descent, with the specified
-batch_size.
+<DT>batch_size</dt>
+<DD>Default: min(200, buffer_size) where buffer_size
+is set in the mini-batch preprocessor.  The 'batch_size'
+is the size of the mini-batch used in the optimizer.
+This parameter is only used in the case of mini-batching.
 </DD>
 
-<DT>tolerance</dt>
-<DD>Default: 1 for IGD, 20 for Minibatch
-If the source_table is detected to contain data
-that is supported by minibatch, then the solver
-uses mini-batch gradient descent. During gradient
-descent, n_epochs represents the number of times
-all batches in a buffer are iterated over.
+<DT>n_epochs</dt>
+<DD>Default: 1. Represents the number of times
+each batch is used by the optimizer.  This parameter
+is only used in the case of mini-batching.
 </DD>
 </DL>
 
@@ -462,6 +493,9 @@ For regression, only type='response' is defined.
 
 @anchor example
 @par Examples
+
+<h4>Classification without Mini-Batching</h4>
+
 -#  Create an input data set.
 <pre class="example">
 DROP TABLE IF EXISTS iris_data;
@@ -550,24 +584,266 @@ SELECT madlib.mlp_classification(
     FALSE             -- Not verbose
 );
 </pre>
--# View the result for the model.
+View the model:
 <pre class="example">
--- Set extended display on for easier reading of output
-\\x ON
--- Results may vary depending on platform
+\\x on
 SELECT * FROM mlp_model;
 </pre>
-Result:
 <pre class="result">
 -[ RECORD 1 ]--+------------------------------------------------------------------------------------
 coeff          | {-0.40378996718,0.0157490328855,-0.298904053444,-0.984152185093,-0.657684089715 ...
-loss           | 0.00470906197238
+loss           | 0.0103518565103
+num_iterations | 500
+</pre>
+View the model summary table:
+<pre class="example">
+SELECT * FROM mlp_model_summary;
+</pre>
+<pre class="result">
+-[ RECORD 1 ]--------+------------------------------
+source_table         | iris_data
+independent_varname  | attributes
+dependent_varname    | class_text
+dependent_vartype    | character varying
+tolerance            | 0
+learning_rate_init   | 0.003
+learning_rate_policy | constant
+n_iterations         | 500
+n_tries              | 1
+layer_sizes          | {4,5,2}
+activation           | tanh
+is_classification    | t
+classes              | {Iris_setosa,Iris_versicolor}
+weights              | 1
+grouping_col         | NULL
+</pre>
+View the model standardization table:
+<pre class="example">
+SELECT * FROM mlp_model_standardization;
+</pre>
+<pre class="result">
+-[ RECORD 1 ]------------------------------------------------------------------
+mean | {5.45961538461539,2.99807692307692,3.025,0.851923076923077}
+std  | {0.598799958694505,0.498262513685689,1.41840579525043,0.550346179381454}
+</pre>
+
+-# Now let's use the model to predict. In the following example we will
+use the training data set for prediction as well, which is not usual but serves to
+show the syntax. The prediction is in the estimated_class_text column with the
+actual value in the class_text column.
+<pre class="example">
+DROP TABLE IF EXISTS mlp_prediction;
+\\x off
+SELECT madlib.mlp_predict(
+         'mlp_model',         -- Model table
+         'iris_data',         -- Test data table
+         'id',                -- Id column in test table
+         'mlp_prediction',    -- Output table for predictions
+         'response'           -- Output classes, not probabilities
+     );
+SELECT * FROM mlp_prediction JOIN iris_data USING (id) ORDER BY id;
+</pre>
+<pre class="result">
+ id | estimated_class_text |    attributes     |   class_text    | class |   state
+----+----------------------+-------------------+-----------------+-------+-----------
+  1 | Iris_setosa          | {5.0,3.2,1.2,0.2} | Iris_setosa     |     1 | Alaska
+  2 | Iris_setosa          | {5.5,3.5,1.3,0.2} | Iris_setosa     |     1 | Alaska
+  3 | Iris_setosa          | {4.9,3.1,1.5,0.1} | Iris_setosa     |     1 | Alaska
+  4 | Iris_setosa          | {4.4,3.0,1.3,0.2} | Iris_setosa     |     1 | Alaska
+  5 | Iris_setosa          | {5.1,3.4,1.5,0.2} | Iris_setosa     |     1 | Alaska
+  6 | Iris_setosa          | {5.0,3.5,1.3,0.3} | Iris_setosa     |     1 | Alaska
+  7 | Iris_setosa          | {4.5,2.3,1.3,0.3} | Iris_setosa     |     1 | Alaska
+  8 | Iris_setosa          | {4.4,3.2,1.3,0.2} | Iris_setosa     |     1 | Alaska
+  9 | Iris_setosa          | {5.0,3.5,1.6,0.6} | Iris_setosa     |     1 | Alaska
+ 10 | Iris_setosa          | {5.1,3.8,1.9,0.4} | Iris_setosa     |     1 | Alaska
+ 11 | Iris_setosa          | {4.8,3.0,1.4,0.3} | Iris_setosa     |     1 | Alaska
+ 12 | Iris_setosa          | {5.1,3.8,1.6,0.2} | Iris_setosa     |     1 | Alaska
+ 13 | Iris_versicolor      | {5.7,2.8,4.5,1.3} | Iris_versicolor |     2 | Alaska
+ 14 | Iris_versicolor      | {6.3,3.3,4.7,1.6} | Iris_versicolor |     2 | Alaska
+ 15 | Iris_versicolor      | {4.9,2.4,3.3,1.0} | Iris_versicolor |     2 | Alaska
+ 16 | Iris_versicolor      | {6.6,2.9,4.6,1.3} | Iris_versicolor |     2 | Alaska
+ 17 | Iris_versicolor      | {5.2,2.7,3.9,1.4} | Iris_versicolor |     2 | Alaska
+ 18 | Iris_versicolor      | {5.0,2.0,3.5,1.0} | Iris_versicolor |     2 | Alaska
+ 19 | Iris_versicolor      | {5.9,3.0,4.2,1.5} | Iris_versicolor |     2 | Alaska
+ 20 | Iris_versicolor      | {6.0,2.2,4.0,1.0} | Iris_versicolor |     2 | Alaska
+ 21 | Iris_versicolor      | {6.1,2.9,4.7,1.4} | Iris_versicolor |     2 | Alaska
+ 22 | Iris_versicolor      | {5.6,2.9,3.6,1.3} | Iris_versicolor |     2 | Alaska
+ 23 | Iris_versicolor      | {6.7,3.1,4.4,1.4} | Iris_versicolor |     2 | Alaska
+ 24 | Iris_versicolor      | {5.6,3.0,4.5,1.5} | Iris_versicolor |     2 | Alaska
+ 25 | Iris_versicolor      | {5.8,2.7,4.1,1.0} | Iris_versicolor |     2 | Alaska
+ 26 | Iris_versicolor      | {6.2,2.2,4.5,1.5} | Iris_versicolor |     2 | Alaska
+ 27 | Iris_versicolor      | {5.6,2.5,3.9,1.1} | Iris_versicolor |     2 | Alaska
+ 28 | Iris_setosa          | {5.0,3.4,1.5,0.2} | Iris_setosa     |     1 | Tennessee
+ 29 | Iris_setosa          | {4.4,2.9,1.4,0.2} | Iris_setosa     |     1 | Tennessee
+ 30 | Iris_setosa          | {4.9,3.1,1.5,0.1} | Iris_setosa     |     1 | Tennessee
+ 31 | Iris_setosa          | {5.4,3.7,1.5,0.2} | Iris_setosa     |     1 | Tennessee
+ 32 | Iris_setosa          | {4.8,3.4,1.6,0.2} | Iris_setosa     |     1 | Tennessee
+ 33 | Iris_setosa          | {4.8,3.0,1.4,0.1} | Iris_setosa     |     1 | Tennessee
+ 34 | Iris_setosa          | {4.3,3.0,1.1,0.1} | Iris_setosa     |     1 | Tennessee
+ 35 | Iris_setosa          | {5.8,4.0,1.2,0.2} | Iris_setosa     |     1 | Tennessee
+ 36 | Iris_setosa          | {5.7,4.4,1.5,0.4} | Iris_setosa     |     1 | Tennessee
+ 37 | Iris_setosa          | {5.4,3.9,1.3,0.4} | Iris_setosa     |     1 | Tennessee
+ 38 | Iris_versicolor      | {6.0,2.9,4.5,1.5} | Iris_versicolor |     2 | Tennessee
+ 39 | Iris_versicolor      | {5.7,2.6,3.5,1.0} | Iris_versicolor |     2 | Tennessee
+ 40 | Iris_versicolor      | {5.5,2.4,3.8,1.1} | Iris_versicolor |     2 | Tennessee
+ 41 | Iris_versicolor      | {5.5,2.4,3.7,1.0} | Iris_versicolor |     2 | Tennessee
+ 42 | Iris_versicolor      | {5.8,2.7,3.9,1.2} | Iris_versicolor |     2 | Tennessee
+ 43 | Iris_versicolor      | {6.0,2.7,5.1,1.6} | Iris_versicolor |     2 | Tennessee
+ 44 | Iris_versicolor      | {5.4,3.0,4.5,1.5} | Iris_versicolor |     2 | Tennessee
+ 45 | Iris_versicolor      | {6.0,3.4,4.5,1.6} | Iris_versicolor |     2 | Tennessee
+ 46 | Iris_versicolor      | {6.7,3.1,4.7,1.5} | Iris_versicolor |     2 | Tennessee
+ 47 | Iris_versicolor      | {6.3,2.3,4.4,1.3} | Iris_versicolor |     2 | Tennessee
+ 48 | Iris_versicolor      | {5.6,3.0,4.1,1.3} | Iris_versicolor |     2 | Tennessee
+ 49 | Iris_versicolor      | {5.5,2.5,4.0,1.3} | Iris_versicolor |     2 | Tennessee
+ 50 | Iris_versicolor      | {5.5,2.6,4.4,1.2} | Iris_versicolor |     2 | Tennessee
+ 51 | Iris_versicolor      | {6.1,3.0,4.6,1.4} | Iris_versicolor |     2 | Tennessee
+ 52 | Iris_versicolor      | {5.8,2.6,4.0,1.2} | Iris_versicolor |     2 | Tennessee
+(52 rows)
+</pre>
+Count the misclassifications:
+<pre class="example">
+SELECT COUNT(*) FROM mlp_prediction JOIN iris_data USING (id)
+WHERE mlp_prediction.estimated_class_text != iris_data.class_text;
+</pre>
+<pre class="result">
+ count
+-------+
+     0
+</pre>
+
+<h4>Classification with Mini-Batching</h4>
+
+-#  Use the same data set as above.  Call mini-batch preprocessor:
+<pre class="example">
+DROP TABLE IF EXISTS iris_data_packed, iris_data_packed_summary, iris_data_packed_standardization;
+SELECT madlib.minibatch_preprocessor('iris_data',         -- Source table
+                                     'iris_data_packed',  -- Output table
+                                     'class_text',        -- Dependent variable
+                                     'attributes'        -- Independent variables
+                                    );
+</pre>
+-# Train the classification model using similar parameters as before:
+<pre class="example">
+DROP TABLE IF EXISTS mlp_model, mlp_model_summary, mlp_model_standardization;
+-- Set seed so results are reproducible
+SELECT setseed(0);
+SELECT madlib.mlp_classification(
+    'iris_data_packed',      -- Output table from mini-batch preprocessor
+    'mlp_model',             -- Destination table
+    'independent_varname',   -- Hardcode to this, from table iris_data_packed
+    'dependent_varname',     -- Hardcode to this, from table iris_data_packed
+    ARRAY[5],                -- Number of units per layer
+    'learning_rate_init=0.1,
+    n_iterations=500,
+    tolerance=0',            -- Optimizer params
+    'tanh',                  -- Activation function
+    NULL,                    -- Default weight (1)
+    FALSE,                   -- No warm start
+    FALSE                    -- Not verbose
+);
+</pre>
+View the model:
+<pre class="example">
+\\x on
+SELECT * FROM mlp_model;
+</pre>
+<pre class="result">
+-[ RECORD 1 ]--+------------------------------------------------------------------------------------
+coeff          | {-0.0780564661828377,-0.0781452670639994,0.3083605989842 ...
+loss           | 0.00563534904146765
 num_iterations | 500
 </pre>
 
+-# Now let's use the model to predict. As before we will
+use the training data set for prediction as well, which is not usual but serves to
+show the syntax. The prediction is in the estimated_class_text column with the
+actual value in the class_text column.
+<pre class="example">
+DROP TABLE IF EXISTS mlp_prediction;
+\\x off
+SELECT madlib.mlp_predict(
+         'mlp_model',         -- Model table
+         'iris_data',         -- Test data table
+         'id',                -- Id column in test table
+         'mlp_prediction',    -- Output table for predictions
+         'response'           -- Output classes, not probabilities
+     );
+SELECT * FROM mlp_prediction JOIN iris_data USING (id) ORDER BY id;
+</pre>
+<pre class="result">
+ id | estimated_class_text |    attributes     |   class_text    | class |   state   
+----+----------------------+-------------------+-----------------+-------+-----------
+  1 | Iris_setosa          | {5.0,3.2,1.2,0.2} | Iris_setosa     |     1 | Alaska
+  2 | Iris_setosa          | {5.5,3.5,1.3,0.2} | Iris_setosa     |     1 | Alaska
+  3 | Iris_setosa          | {4.9,3.1,1.5,0.1} | Iris_setosa     |     1 | Alaska
+  4 | Iris_setosa          | {4.4,3.0,1.3,0.2} | Iris_setosa     |     1 | Alaska
+  5 | Iris_setosa          | {5.1,3.4,1.5,0.2} | Iris_setosa     |     1 | Alaska
+  6 | Iris_setosa          | {5.0,3.5,1.3,0.3} | Iris_setosa     |     1 | Alaska
+  7 | Iris_setosa          | {4.5,2.3,1.3,0.3} | Iris_setosa     |     1 | Alaska
+  8 | Iris_setosa          | {4.4,3.2,1.3,0.2} | Iris_setosa     |     1 | Alaska
+  9 | Iris_setosa          | {5.0,3.5,1.6,0.6} | Iris_setosa     |     1 | Alaska
+ 10 | Iris_setosa          | {5.1,3.8,1.9,0.4} | Iris_setosa     |     1 | Alaska
+ 11 | Iris_setosa          | {4.8,3.0,1.4,0.3} | Iris_setosa     |     1 | Alaska
+ 12 | Iris_setosa          | {5.1,3.8,1.6,0.2} | Iris_setosa     |     1 | Alaska
+ 13 | Iris_versicolor      | {5.7,2.8,4.5,1.3} | Iris_versicolor |     2 | Alaska
+ 14 | Iris_versicolor      | {6.3,3.3,4.7,1.6} | Iris_versicolor |     2 | Alaska
+ 15 | Iris_versicolor      | {4.9,2.4,3.3,1.0} | Iris_versicolor |     2 | Alaska
+ 16 | Iris_versicolor      | {6.6,2.9,4.6,1.3} | Iris_versicolor |     2 | Alaska
+ 17 | Iris_versicolor      | {5.2,2.7,3.9,1.4} | Iris_versicolor |     2 | Alaska
+ 18 | Iris_versicolor      | {5.0,2.0,3.5,1.0} | Iris_versicolor |     2 | Alaska
+ 19 | Iris_versicolor      | {5.9,3.0,4.2,1.5} | Iris_versicolor |     2 | Alaska
+ 20 | Iris_versicolor      | {6.0,2.2,4.0,1.0} | Iris_versicolor |     2 | Alaska
+ 21 | Iris_versicolor      | {6.1,2.9,4.7,1.4} | Iris_versicolor |     2 | Alaska
+ 22 | Iris_versicolor      | {5.6,2.9,3.6,1.3} | Iris_versicolor |     2 | Alaska
+ 23 | Iris_versicolor      | {6.7,3.1,4.4,1.4} | Iris_versicolor |     2 | Alaska
+ 24 | Iris_versicolor      | {5.6,3.0,4.5,1.5} | Iris_versicolor |     2 | Alaska
+ 25 | Iris_versicolor      | {5.8,2.7,4.1,1.0} | Iris_versicolor |     2 | Alaska
+ 26 | Iris_versicolor      | {6.2,2.2,4.5,1.5} | Iris_versicolor |     2 | Alaska
+ 27 | Iris_versicolor      | {5.6,2.5,3.9,1.1} | Iris_versicolor |     2 | Alaska
+ 28 | Iris_setosa          | {5.0,3.4,1.5,0.2} | Iris_setosa     |     1 | Tennessee
+ 29 | Iris_setosa          | {4.4,2.9,1.4,0.2} | Iris_setosa     |     1 | Tennessee
+ 30 | Iris_setosa          | {4.9,3.1,1.5,0.1} | Iris_setosa     |     1 | Tennessee
+ 31 | Iris_setosa          | {5.4,3.7,1.5,0.2} | Iris_setosa     |     1 | Tennessee
+ 32 | Iris_setosa          | {4.8,3.4,1.6,0.2} | Iris_setosa     |     1 | Tennessee
+ 33 | Iris_setosa          | {4.8,3.0,1.4,0.1} | Iris_setosa     |     1 | Tennessee
+ 34 | Iris_setosa          | {4.3,3.0,1.1,0.1} | Iris_setosa     |     1 | Tennessee
+ 35 | Iris_setosa          | {5.8,4.0,1.2,0.2} | Iris_setosa     |     1 | Tennessee
+ 36 | Iris_setosa          | {5.7,4.4,1.5,0.4} | Iris_setosa     |     1 | Tennessee
+ 37 | Iris_setosa          | {5.4,3.9,1.3,0.4} | Iris_setosa     |     1 | Tennessee
+ 38 | Iris_versicolor      | {6.0,2.9,4.5,1.5} | Iris_versicolor |     2 | Tennessee
+ 39 | Iris_versicolor      | {5.7,2.6,3.5,1.0} | Iris_versicolor |     2 | Tennessee
+ 40 | Iris_versicolor      | {5.5,2.4,3.8,1.1} | Iris_versicolor |     2 | Tennessee
+ 41 | Iris_versicolor      | {5.5,2.4,3.7,1.0} | Iris_versicolor |     2 | Tennessee
+ 42 | Iris_versicolor      | {5.8,2.7,3.9,1.2} | Iris_versicolor |     2 | Tennessee
+ 43 | Iris_versicolor      | {6.0,2.7,5.1,1.6} | Iris_versicolor |     2 | Tennessee
+ 44 | Iris_versicolor      | {5.4,3.0,4.5,1.5} | Iris_versicolor |     2 | Tennessee
+ 45 | Iris_versicolor      | {6.0,3.4,4.5,1.6} | Iris_versicolor |     2 | Tennessee
+ 46 | Iris_versicolor      | {6.7,3.1,4.7,1.5} | Iris_versicolor |     2 | Tennessee
+ 47 | Iris_versicolor      | {6.3,2.3,4.4,1.3} | Iris_versicolor |     2 | Tennessee
+ 48 | Iris_versicolor      | {5.6,3.0,4.1,1.3} | Iris_versicolor |     2 | Tennessee
+ 49 | Iris_versicolor      | {5.5,2.5,4.0,1.3} | Iris_versicolor |     2 | Tennessee
+ 50 | Iris_versicolor      | {5.5,2.6,4.4,1.2} | Iris_versicolor |     2 | Tennessee
+ 51 | Iris_versicolor      | {6.1,3.0,4.6,1.4} | Iris_versicolor |     2 | Tennessee
+ 52 | Iris_versicolor      | {5.8,2.6,4.0,1.2} | Iris_versicolor |     2 | Tennessee
+(52 rows)
+</pre>
+Count the misclassifications:
+<pre class="example">
+SELECT COUNT(*) FROM mlp_prediction JOIN iris_data USING (id)
+WHERE mlp_prediction.estimated_class_text != iris_data.class_text;
+</pre>
+<pre class="result">
+ count
+-------+
+     0
+</pre>
+
+<h4>Classification with Other Parameters</h4>
+
 -# Now, use the n_tries optimizer parameter to learn and choose the best model
 among n_tries number of models learnt by the algorithm. Run only for 50 iterations
-and choose the best model from this short run.
+and choose the best model from this short run. Note we are not using mini-batching 
+here.
 <pre class="example">
 DROP TABLE IF EXISTS mlp_model, mlp_model_summary, mlp_model_standardization;
 -- Set seed so results are reproducible
@@ -588,16 +864,15 @@ SELECT madlib.mlp_classification(
     FALSE             -- Not verbose
 );
 </pre>
--# View the result for the model.
+View the model:
 <pre class="example">
--- Results may vary depending on platform
+\\x on
 SELECT * FROM mlp_model;
 </pre>
-Result:
 <pre class="result">
 -[ RECORD 1 ]--+------------------------------------------------------------------------------------
-coeff          | {-0.166258065719,0.134984689989,-0.445383330023,0.997461636443,0.00624022069817 ...
-loss           | 0.0501651250932
+coeff          | {0.000156316559088915,0.131131017223563,-0.293990512682215 ...
+loss           | 0.142238768280717
 num_iterations | 50
 </pre>
 
@@ -621,16 +896,15 @@ SELECT madlib.mlp_classification(
     FALSE             -- Not verbose
 );
 </pre>
--# View the result for the model.
+View the model:
 <pre class="example">
--- Results may vary depending on platform
+\\x on
 SELECT * FROM mlp_model;
 </pre>
-Result:
 <pre class="result">
 -[ RECORD 1 ]--+------------------------------------------------------------------------------------
-coeff          | {-0.0269464483507,0.1800462291,-0.550842844362,1.14444526434,0.16573219016 ...
-loss           | 0.00379153493882
+coeff          | {0.0883013960215441,0.235944854050211,-0.398126039487036 ...
+loss           | 0.00818899646775659
 num_iterations | 450
 </pre>
 Notice that the loss is lower compared to the previous example, despite
@@ -639,7 +913,10 @@ learnt three different models starting with a different set of initial weights
 for the coefficients, and chose the best model among them as the initial
 weights for the coefficients when run with warm start.
 
+<h4>Classification with Grouping</h4>
+
 -# Next, group the training data by state, and learn a different model for each state.
+Note we are not using mini-batching in this example.
 <pre class="example">
 DROP TABLE IF EXISTS mlp_model_group, mlp_model_group_summary, mlp_model_group_standardization;
 -- Set seed so results are reproducible
@@ -651,9 +928,8 @@ SELECT madlib.mlp_classification(
     'class_text',       -- Label
     ARRAY[5],           -- Number of units per layer
     'learning_rate_init=0.003,
-    n_iterations=50,
-    tolerance=0,
-    n_tries=3',         -- Optimizer params, with n_tries
+    n_iterations=500,   -- Optimizer params
+    tolerance=0',
     'tanh',             -- Activation function
     NULL,               -- Default weight (1)
     FALSE,              -- No warm start
@@ -661,420 +937,39 @@ SELECT madlib.mlp_classification(
     'state'             -- Grouping column
 );
 </pre>
--# View the result for the model.
+View the model:
 <pre class="example">
--- Results may vary depending on platform
+\\x on
 SELECT * FROM mlp_model_group ORDER BY state;
 </pre>
-Result:
 <pre class="result">
 -[ RECORD 1 ]--+------------------------------------------------------------------------------------
 state          | Alaska
 coeff          | {-0.51246602223,-0.78952457411,0.454192045225,0.223214894458,0.188804700547 ...
-loss           | 0.10264382522
-num_iterations | 50
+loss           | 0.0225081995679
+num_iterations | 500
 -[ RECORD 2 ]--+------------------------------------------------------------------------------------
 state          | Tennessee
 coeff          | {-0.215009937565,0.116581594162,-0.397643279185,0.919193295184,-0.0811341736111 ...
-loss           | 0.110626560621
-num_iterations | 50
+loss           | 0.0182854983946
+num_iterations | 500
 </pre>
 A separate model is learnt for each state, and the result table displays the name of
 the state (grouping column) associated with the model.
 
--# Next use warm_start to improve the models that are already present
-in the mlp_model_group table. Note again that we cannot use n_tries with warm_start,
-and the model table must already be present.
-<pre class="example">
-SELECT madlib.mlp_classification(
-    'iris_data',        -- Source table
-    'mlp_model_group',  -- Destination table
-    'attributes',       -- Input features
-    'class_text',       -- Label
-    ARRAY[5],           -- Number of units per layer
-    'learning_rate_init=0.003,
-    n_iterations=450,
-    tolerance=0',       -- Optimizer params
-    'tanh',             -- Activation function
-    NULL,               -- Default weight (1)
-    TRUE,               -- Warm start
-    FALSE,              -- Not verbose
-    'state'             -- Grouping column
-);
-</pre>
--# View the result for the model.
+-# Prediction based on grouping using the state column:
 <pre class="example">
--- Results may vary depending on platform
-SELECT * FROM mlp_model_group ORDER BY state;
+DROP TABLE IF EXISTS mlp_prediction;
+SELECT madlib.mlp_predict(
+         'mlp_model_group',   -- Model table
+         'iris_data',         -- Test data table
+         'id',                -- Id column in test table
+         'mlp_prediction',    -- Output table for predictions
+         'response'           -- Output classes, not probabilities
+     );
+SELECT * FROM mlp_prediction JOIN iris_data USING (state,id) ORDER BY state, id;
 </pre>
-Result:
-<pre class="result">
--[ RECORD 1 ]--+------------------------------------------------------------------------------------
-state          | Alaska
-coeff          | {-0.49062677864,-0.831107028791,0.512375074037,0.132255301901,0.0979308947674 ...
-loss           | 0.00848875807291
-num_iterations | 450
--[ RECORD 2 ]--+------------------------------------------------------------------------------------
-state          | Tennessee
-coeff          | {-0.0479694454816,0.204731619449,-0.501981763261,1.083913543,0.0907812789677 ...
-loss           | 0.00789463466086
-num_iterations | 450
-</pre>
-
-
-
--# Next train a regression example.  First create some test data.  This dataset
-contains housing prices data, where zipcode can be used as a grouping column.
-<pre class="example">
-DROP TABLE IF EXISTS lin_housing;
-CREATE TABLE lin_housing (id serial, x numeric[], zipcode int, y float8);
-INSERT INTO lin_housing(id, x, zipcode, y) VALUES
-(1,ARRAY[1,0.00632,18.00,2.310,0,0.5380,6.5750,65.20,4.0900,1,296.0,15.30,396.90,4.98],94016,24.00),
-(2,ARRAY[1,0.02731,0.00,7.070,0,0.4690,6.4210,78.90,4.9671,2,242.0,17.80,396.90,9.14],94016,21.60),
-(3,ARRAY[1,0.02729,0.00,7.070,0,0.4690,7.1850,61.10,4.9671,2,242.0,17.80,392.83,4.03],94016,34.70),
-(4,ARRAY[1,0.03237,0.00,2.180,0,0.4580,6.9980,45.80,6.0622,3,222.0,18.70,394.63,2.94],94016,33.40),
-(5,ARRAY[1,0.06905,0.00,2.180,0,0.4580,7.1470,54.20,6.0622,3,222.0,18.70,396.90,5.33],94016,36.20),
-(6,ARRAY[1,0.02985,0.00,2.180,0,0.4580,6.4300,58.70,6.0622,3,222.0,18.70,394.12,5.21],94016,28.70),
-(7,ARRAY[1,0.08829,12.50,7.870,0,0.5240,6.0120,66.60,5.5605,5,311.0,15.20,395.60,12.43],94016,22.90),
-(8,ARRAY[1,0.14455,12.50,7.870,0,0.5240,6.1720,96.10,5.9505,5,311.0,15.20,396.90,19.15],94016,27.10),
-(9,ARRAY[1,0.21124,12.50,7.870,0,0.5240,5.6310,100.00,6.0821,5,311.0,15.20,386.63,29.93],94016,16.50),
-(10,ARRAY[1,0.17004,12.50,7.870,0,0.5240,6.0040,85.90,6.5921,5,311.0,15.20,386.71,17.10],94016,18.90),
-(11,ARRAY[1,0.22489,12.50,7.870,0,0.5240,6.3770,94.30,6.3467,5,311.0,15.20,392.52,20.45],94016,15.00),
-(12,ARRAY[1,0.11747,12.50,7.870,0,0.5240,6.0090,82.90,6.2267,5,311.0,15.20,396.90,13.27],20001,18.90),
-(13,ARRAY[1,0.09378,12.50,7.870,0,0.5240,5.8890,39.00,5.4509,5,311.0,15.20,390.50,15.71],20001,21.70),
-(14,ARRAY[1,0.62976,0.00,8.140,0,0.5380,5.9490,61.80,4.7075,4,307.0,21.00,396.90,8.26],20001,20.40),
-(15,ARRAY[1,0.63796,0.00,8.140,0,0.5380,6.0960,84.50,4.4619,4,307.0,21.00,380.02,10.26],20001,18.20),
-(16,ARRAY[1,0.62739,0.00,8.140,0,0.5380,5.8340,56.50,4.4986,4,307.0,21.00,395.62,8.47],20001,19.90),
-(17,ARRAY[1,1.05393,0.00,8.140,0,0.5380,5.9350,29.30,4.4986,4,307.0,21.00,386.85,6.58],20001, 23.10),
-(18,ARRAY[1,0.78420,0.00,8.140,0,0.5380,5.9900,81.70,4.2579,4,307.0,21.00,386.75,14.67],20001,17.50),
-(19,ARRAY[1,0.80271,0.00,8.140,0,0.5380,5.4560,36.60,3.7965,4,307.0,21.00,288.99,11.69],20001,20.20),
-(20,ARRAY[1,0.72580,0.00,8.140,0,0.5380,5.7270,69.50,3.7965,4,307.0,21.00,390.95,11.28],20001,18.20);
-</pre>
--# Now train a regression model using a multilayer
-perceptron with two hidden layers of twenty five nodes each.
-<pre class="example">
-DROP TABLE IF EXISTS mlp_regress, mlp_regress_summary, mlp_regress_standardization;
-SELECT setseed(0);
-SELECT madlib.mlp_regression(
-    'lin_housing',    -- Source table
-    'mlp_regress',    -- Desination table
-    'x',              -- Input features
-    'y',              -- Dependent variable
-    ARRAY[25,25],     -- Number of units per layer
-    'learning_rate_init=0.001,
-    n_iterations=500,
-    lambda=0.001,
-    tolerance=0',     -- Optimizer params
-    'relu',           -- Activation function
-    NULL,             -- Default weight (1)
-    FALSE,            -- No warm start
-    FALSE             -- Not verbose
-);
-</pre>
--# Check the results of the model
-<pre class="example">
--- Set extended display on for easier reading of output.
-\\x ON
--- Results may vary depending on platform.
-SELECT * FROM mlp_regress;
-</pre>
-Result:
-<pre class="result">
-[ RECORD 1 ]--+-------------------------------------------------------------------------------------
-coeff          | {-0.250057620174,0.0630805938982,-0.290635490112,-0.382966162592,-0.212206338909...
-loss           | 0.056318716374
-num_iterations | 500
- </pre>
-
--# Now, use the n_tries optimizer parameter to learn and choose the best model
-among n_tries number of models learnt by the algorithm.
-<pre class="example">
-DROP TABLE IF EXISTS mlp_regress, mlp_regress_summary, mlp_regress_standardization;
--- Set seed so results are reproducible
-SELECT setseed(0);
-SELECT madlib.mlp_regression(
-    'lin_housing',    -- Source table
-    'mlp_regress',    -- Desination table
-    'x',              -- Input features
-    'y',              -- Dependent variable
-    ARRAY[25,25],     -- Number of units per layer
-    'learning_rate_init=0.001,
-    n_iterations=50,
-    n_tries=3,
-    lambda=0.001,
-    tolerance=0',     -- Optimizer params, with n_tries
-    'relu',           -- Activation function
-    NULL,             -- Default weight (1)
-    FALSE,            -- No warm start
-    FALSE             -- Not verbose
-);
-</pre>
--# View the result for the model.
-<pre class="example">
--- Results may vary depending on platform
-SELECT * FROM mlp_regress;
-</pre>
-Result:
-<pre class="result">
--[ RECORD 1 ]--+------------------------------------------------------------------------------------
-coeff          | {-0.205930835968,0.362647441938,-0.303398893908,-0.304178433784,0.394084054048 ...
-loss           | 2.06914906438
-num_iterations | 50
-</pre>
-
--# Next, use the warm_start parameter to start learning a new model, using
-the coefficients already present in mlp_regress.
-<pre class="example">
-SELECT madlib.mlp_regression(
-    'lin_housing',    -- Source table
-    'mlp_regress',    -- Desination table
-    'x',              -- Input features
-    'y',              -- Dependent variable
-    ARRAY[25,25],     -- Number of units per layer
-    'learning_rate_init=0.001,
-    n_iterations=450,
-    lambda=0.001,
-    tolerance=0',     -- Optimizer params
-    'relu',           -- Activation function
-    NULL,             -- Default weight (1)
-    TRUE,             -- Warm start
-    FALSE             -- Not verbose
-);
-</pre>
--# View the result for the model.
-<pre class="example">
--- Results may vary depending on platform
-SELECT * FROM mlp_regress;
-</pre>
-Result:
-<pre class="result">
--[ RECORD 1 ]--+------------------------------------------------------------------------------------
-coeff          | {-0.0876810606854,0.179699128308,-0.145139175157,-0.147247509843,0.163163651648, ...
-loss           | 0.199137729586
-num_iterations | 450
-</pre>
-
--# Next, group the training data by zipcode, and learn a different model for each zipcode.
-<pre class="example">
-DROP TABLE IF EXISTS mlp_regress_group, mlp_regress_group_summary, mlp_regress_group_standardization;
--- Set seed so results are reproducible
-SELECT setseed(0);
-SELECT madlib.mlp_regression(
-    'lin_housing',    -- Source table
-    'mlp_regress_group',    -- Desination table
-    'x',              -- Input features
-    'y',              -- Dependent variable
-    ARRAY[25,25],     -- Number of units per layer
-    'learning_rate_init=0.001,
-    n_iterations=500,
-    lambda=0.001,
-    tolerance=0',     -- Optimizer params
-    'relu',           -- Activation function
-    NULL,             -- Default weight (1)
-    FALSE,            -- No warm start
-    FALSE,            -- Not verbose
-    'zipcode'         -- Grouping column
-);
-</pre>
--# View the result for the model.
-<pre class="example">
--- Results may vary depending on platform
-SELECT * FROM mlp_regress_group;
-</pre>
-Result:
-<pre class="result">
--[ RECORD 1 ]--+------------------------------------------------------------------------------------
-zipcode        | 200001
-coeff          | {-0.193588485849,0.063428493184,-0.30440608833,-0.355695802004,-0.175942716164 ...
-loss           | 0.029218326712
-num_iterations | 500
--[ RECORD 2 ]--+------------------------------------------------------------------------------------
-zipcode        | 94016
-coeff          | {-0.18965351506,0.0633650963628,-0.302423579808,-0.334367637252,-0.230043593847 ...
-loss           | 0.0324142074233
-num_iterations | 500
-</pre>
-
--# Now, use the n_tries optimizer parameter to learn and choose the best model
-among n_tries number of models learnt by the algorithm for each zipcode.
-<pre class="example">
-DROP TABLE IF EXISTS mlp_regress_group, mlp_regress_group_summary, mlp_regress_group_standardization;
--- Set seed so results are reproducible
-SELECT setseed(0);
-SELECT madlib.mlp_regression(
-    'lin_housing',    -- Source table
-    'mlp_regress_group',    -- Desination table
-    'x',              -- Input features
-    'y',              -- Dependent variable
-    ARRAY[25,25],     -- Number of units per layer
-    'learning_rate_init=0.001,
-    n_iterations=50,
-    n_tries=3,
-    lambda=0.001,
-    tolerance=0',     -- Optimizer params, with n_tries
-    'relu',           -- Activation function
-    NULL,             -- Default weight (1)
-    FALSE,            -- No warm start
-    FALSE,            -- Not verbose
-    'zipcode'         -- Grouping column
-);
-</pre>
--# View the result for the model.
-<pre class="example">
--- Results may vary depending on platform
-SELECT * FROM mlp_regress_group ORDER BY zipcode;
-</pre>
-Result:
-<pre class="result">
--[ RECORD 1 ]--+------------------------------------------------------------------------------------
-zipcode        | 200001
-coeff          | {-0.193777024396,0.0636858996065,-0.30615041286,-0.356474420158,-0.177321601091 ...
-loss           | 0.0603516434838
-num_iterations | 50
--[ RECORD 2 ]--+------------------------------------------------------------------------------------
-zipcode        | 94016
-coeff          | {-0.199320228996,0.362810670091,-0.299294076026,-0.255872794062,0.420597404648 ...
-loss           | 1.02032647855
-num_iterations | 50
-</pre>
-
--# Next use warm_start to improve the models that are already present
-in the mlp_regress_group table.
-<pre class="example">
-SELECT madlib.mlp_regression(
-    'lin_housing',    -- Source table
-    'mlp_regress_group',    -- Desination table
-    'x',              -- Input features
-    'y',              -- Dependent variable
-    ARRAY[25,25],     -- Number of units per layer
-    'learning_rate_init=0.001,
-    n_iterations=450,
-    lambda=0.001,
-    tolerance=0',     -- Optimizer params
-    'relu',           -- Activation function
-    NULL,             -- Default weight (1)
-    TRUE,             -- Warm start
-    FALSE,            -- Not verbose
-    'zipcode'         -- Grouping column
-);
-</pre>
--# View the result for the model.
-<pre class="example">
--- Results may vary depending on platform
-SELECT * FROM mlp_regress_group ORDER BY zipcode;
-</pre>
-Result:
-<pre class="result">
--[ RECORD 1 ]--+------------------------------------------------------------------------------------
-zipcode        | 200001
-coeff          | {-0.193247562723,0.063399956769,-0.304224541276,-0.355615534078,-0.175783800454 ...
-loss           | 0.0289356302498
-num_iterations | 450
--[ RECORD 2 ]--+------------------------------------------------------------------------------------
-zipcode        | 94016
-coeff          | {-0.168703459263,0.360820687875,-0.284997924553,-0.266391951143,0.437996440846 ...
-loss           | 0.0330375613383
-num_iterations | 450
-</pre>
-
--# Now let's look at the prediction functions. In the following examples we will
-use the training data set for prediction as well, which is not usual but serves to
-show the syntax. First we will test the classification example.
-The prediction is in the the estimated_class_text column with the
-actual value in the class_text column.
-<pre class="example">
-DROP TABLE IF EXISTS mlp_prediction;
-SELECT madlib.mlp_predict(
-         'mlp_model',         -- Model table
-         'iris_data',         -- Test data table
-         'id',                -- Id column in test table
-         'mlp_prediction',    -- Output table for predictions
-         'response'           -- Output classes, not probabilities
-     );
-SELECT * FROM mlp_prediction JOIN iris_data USING (id) ORDER BY id;
-</pre>
-Result for the classification model:
-<pre class="result">
- id | estimated_class_text |    attributes     |   class_text    | class |   state
-----+----------------------+-------------------+-----------------+-------+-----------
-  1 | Iris_setosa          | {5.0,3.2,1.2,0.2} | Iris_setosa     |     1 | Alaska
-  2 | Iris_setosa          | {5.5,3.5,1.3,0.2} | Iris_setosa     |     1 | Alaska
-  3 | Iris_setosa          | {4.9,3.1,1.5,0.1} | Iris_setosa     |     1 | Alaska
-  4 | Iris_setosa          | {4.4,3.0,1.3,0.2} | Iris_setosa     |     1 | Alaska
-  5 | Iris_setosa          | {5.1,3.4,1.5,0.2} | Iris_setosa     |     1 | Alaska
-  6 | Iris_setosa          | {5.0,3.5,1.3,0.3} | Iris_setosa     |     1 | Alaska
-  7 | Iris_setosa          | {4.5,2.3,1.3,0.3} | Iris_setosa     |     1 | Alaska
-  8 | Iris_setosa          | {4.4,3.2,1.3,0.2} | Iris_setosa     |     1 | Alaska
-  9 | Iris_setosa          | {5.0,3.5,1.6,0.6} | Iris_setosa     |     1 | Alaska
- 10 | Iris_setosa          | {5.1,3.8,1.9,0.4} | Iris_setosa     |     1 | Alaska
- 11 | Iris_setosa          | {4.8,3.0,1.4,0.3} | Iris_setosa     |     1 | Alaska
- 12 | Iris_setosa          | {5.1,3.8,1.6,0.2} | Iris_setosa     |     1 | Alaska
- 13 | Iris_versicolor      | {5.7,2.8,4.5,1.3} | Iris_versicolor |     2 | Alaska
- 14 | Iris_versicolor      | {6.3,3.3,4.7,1.6} | Iris_versicolor |     2 | Alaska
- 15 | Iris_versicolor      | {4.9,2.4,3.3,1.0} | Iris_versicolor |     2 | Alaska
- 16 | Iris_versicolor      | {6.6,2.9,4.6,1.3} | Iris_versicolor |     2 | Alaska
- 17 | Iris_versicolor      | {5.2,2.7,3.9,1.4} | Iris_versicolor |     2 | Alaska
- 18 | Iris_versicolor      | {5.0,2.0,3.5,1.0} | Iris_versicolor |     2 | Alaska
- 19 | Iris_versicolor      | {5.9,3.0,4.2,1.5} | Iris_versicolor |     2 | Alaska
- 20 | Iris_versicolor      | {6.0,2.2,4.0,1.0} | Iris_versicolor |     2 | Alaska
- 21 | Iris_versicolor      | {6.1,2.9,4.7,1.4} | Iris_versicolor |     2 | Alaska
- 22 | Iris_versicolor      | {5.6,2.9,3.6,1.3} | Iris_versicolor |     2 | Alaska
- 23 | Iris_versicolor      | {6.7,3.1,4.4,1.4} | Iris_versicolor |     2 | Alaska
- 24 | Iris_versicolor      | {5.6,3.0,4.5,1.5} | Iris_versicolor |     2 | Alaska
- 25 | Iris_versicolor      | {5.8,2.7,4.1,1.0} | Iris_versicolor |     2 | Alaska
- 26 | Iris_versicolor      | {6.2,2.2,4.5,1.5} | Iris_versicolor |     2 | Alaska
- 27 | Iris_versicolor      | {5.6,2.5,3.9,1.1} | Iris_versicolor |     2 | Alaska
- 28 | Iris_setosa          | {5.0,3.4,1.5,0.2} | Iris_setosa     |     1 | Tennessee
- 29 | Iris_setosa          | {4.4,2.9,1.4,0.2} | Iris_setosa     |     1 | Tennessee
- 30 | Iris_setosa          | {4.9,3.1,1.5,0.1} | Iris_setosa     |     1 | Tennessee
- 31 | Iris_setosa          | {5.4,3.7,1.5,0.2} | Iris_setosa     |     1 | Tennessee
- 32 | Iris_setosa          | {4.8,3.4,1.6,0.2} | Iris_setosa     |     1 | Tennessee
- 33 | Iris_setosa          | {4.8,3.0,1.4,0.1} | Iris_setosa     |     1 | Tennessee
- 34 | Iris_setosa          | {4.3,3.0,1.1,0.1} | Iris_setosa     |     1 | Tennessee
- 35 | Iris_setosa          | {5.8,4.0,1.2,0.2} | Iris_setosa     |     1 | Tennessee
- 36 | Iris_setosa          | {5.7,4.4,1.5,0.4} | Iris_setosa     |     1 | Tennessee
- 37 | Iris_setosa          | {5.4,3.9,1.3,0.4} | Iris_setosa     |     1 | Tennessee
- 38 | Iris_versicolor      | {6.0,2.9,4.5,1.5} | Iris_versicolor |     2 | Tennessee
- 39 | Iris_versicolor      | {5.7,2.6,3.5,1.0} | Iris_versicolor |     2 | Tennessee
- 40 | Iris_versicolor      | {5.5,2.4,3.8,1.1} | Iris_versicolor |     2 | Tennessee
- 41 | Iris_versicolor      | {5.5,2.4,3.7,1.0} | Iris_versicolor |     2 | Tennessee
- 42 | Iris_versicolor      | {5.8,2.7,3.9,1.2} | Iris_versicolor |     2 | Tennessee
- 43 | Iris_versicolor      | {6.0,2.7,5.1,1.6} | Iris_versicolor |     2 | Tennessee
- 44 | Iris_versicolor      | {5.4,3.0,4.5,1.5} | Iris_versicolor |     2 | Tennessee
- 45 | Iris_versicolor      | {6.0,3.4,4.5,1.6} | Iris_versicolor |     2 | Tennessee
- 46 | Iris_versicolor      | {6.7,3.1,4.7,1.5} | Iris_versicolor |     2 | Tennessee
- 47 | Iris_versicolor      | {6.3,2.3,4.4,1.3} | Iris_versicolor |     2 | Tennessee
- 48 | Iris_versicolor      | {5.6,3.0,4.1,1.3} | Iris_versicolor |     2 | Tennessee
- 49 | Iris_versicolor      | {5.5,2.5,4.0,1.3} | Iris_versicolor |     2 | Tennessee
- 50 | Iris_versicolor      | {5.5,2.6,4.4,1.2} | Iris_versicolor |     2 | Tennessee
- 51 | Iris_versicolor      | {6.1,3.0,4.6,1.4} | Iris_versicolor |     2 | Tennessee
- 52 | Iris_versicolor      | {5.8,2.6,4.0,1.2} | Iris_versicolor |     2 | Tennessee
-(52 rows)
-</pre>
-Count the misclassifications:
-<pre class="example">
-SELECT COUNT(*) FROM mlp_prediction JOIN iris_data USING (id)
-WHERE mlp_prediction.estimated_class_text != iris_data.class_text;
-</pre>
-<pre class="result">
- count
--------+
-     0
-</pre>
-
--# Prediction based on grouping using the state column:
-<pre class="example">
-DROP TABLE IF EXISTS mlp_prediction;
-SELECT madlib.mlp_predict(
-         'mlp_model_group',   -- Model table
-         'iris_data',         -- Test data table
-         'id',                -- Id column in test table
-         'mlp_prediction',    -- Output table for predictions
-         'response'           -- Output classes, not probabilities
-     );
-SELECT * FROM mlp_prediction JOIN iris_data USING (state,id) ORDER BY state, id;
-</pre>
-Result for the classification model:
+Result for the classification model:
 <pre class="result">
    state   | id | estimated_class_text |    attributes     |   class_text    | class
 -----------+----+----------------------+-------------------+-----------------+-------
@@ -1133,6 +1028,68 @@ Result for the classification model:
 (52 rows)
 </pre>
 
+<h4>Regression without Mini-Batching</h4>
+
+-# Create a dataset with housing prices data.
+<pre class="example">
+DROP TABLE IF EXISTS lin_housing;
+CREATE TABLE lin_housing (id serial, x numeric[], zipcode int, y float8);
+INSERT INTO lin_housing(id, x, zipcode, y) VALUES
+(1,ARRAY[1,0.00632,18.00,2.310,0,0.5380,6.5750,65.20,4.0900,1,296.0,15.30,396.90,4.98],94016,24.00),
+(2,ARRAY[1,0.02731,0.00,7.070,0,0.4690,6.4210,78.90,4.9671,2,242.0,17.80,396.90,9.14],94016,21.60),
+(3,ARRAY[1,0.02729,0.00,7.070,0,0.4690,7.1850,61.10,4.9671,2,242.0,17.80,392.83,4.03],94016,34.70),
+(4,ARRAY[1,0.03237,0.00,2.180,0,0.4580,6.9980,45.80,6.0622,3,222.0,18.70,394.63,2.94],94016,33.40),
+(5,ARRAY[1,0.06905,0.00,2.180,0,0.4580,7.1470,54.20,6.0622,3,222.0,18.70,396.90,5.33],94016,36.20),
+(6,ARRAY[1,0.02985,0.00,2.180,0,0.4580,6.4300,58.70,6.0622,3,222.0,18.70,394.12,5.21],94016,28.70),
+(7,ARRAY[1,0.08829,12.50,7.870,0,0.5240,6.0120,66.60,5.5605,5,311.0,15.20,395.60,12.43],94016,22.90),
+(8,ARRAY[1,0.14455,12.50,7.870,0,0.5240,6.1720,96.10,5.9505,5,311.0,15.20,396.90,19.15],94016,27.10),
+(9,ARRAY[1,0.21124,12.50,7.870,0,0.5240,5.6310,100.00,6.0821,5,311.0,15.20,386.63,29.93],94016,16.50),
+(10,ARRAY[1,0.17004,12.50,7.870,0,0.5240,6.0040,85.90,6.5921,5,311.0,15.20,386.71,17.10],94016,18.90),
+(11,ARRAY[1,0.22489,12.50,7.870,0,0.5240,6.3770,94.30,6.3467,5,311.0,15.20,392.52,20.45],94016,15.00),
+(12,ARRAY[1,0.11747,12.50,7.870,0,0.5240,6.0090,82.90,6.2267,5,311.0,15.20,396.90,13.27],20001,18.90),
+(13,ARRAY[1,0.09378,12.50,7.870,0,0.5240,5.8890,39.00,5.4509,5,311.0,15.20,390.50,15.71],20001,21.70),
+(14,ARRAY[1,0.62976,0.00,8.140,0,0.5380,5.9490,61.80,4.7075,4,307.0,21.00,396.90,8.26],20001,20.40),
+(15,ARRAY[1,0.63796,0.00,8.140,0,0.5380,6.0960,84.50,4.4619,4,307.0,21.00,380.02,10.26],20001,18.20),
+(16,ARRAY[1,0.62739,0.00,8.140,0,0.5380,5.8340,56.50,4.4986,4,307.0,21.00,395.62,8.47],20001,19.90),
+(17,ARRAY[1,1.05393,0.00,8.140,0,0.5380,5.9350,29.30,4.4986,4,307.0,21.00,386.85,6.58],20001, 23.10),
+(18,ARRAY[1,0.78420,0.00,8.140,0,0.5380,5.9900,81.70,4.2579,4,307.0,21.00,386.75,14.67],20001,17.50),
+(19,ARRAY[1,0.80271,0.00,8.140,0,0.5380,5.4560,36.60,3.7965,4,307.0,21.00,288.99,11.69],20001,20.20),
+(20,ARRAY[1,0.72580,0.00,8.140,0,0.5380,5.7270,69.50,3.7965,4,307.0,21.00,390.95,11.28],20001,18.20);
+</pre>
+
+-# Now train a regression model using a multilayer
+perceptron with two hidden layers of twenty five nodes each:
+<pre class="example">
+DROP TABLE IF EXISTS mlp_regress, mlp_regress_summary, mlp_regress_standardization;
+SELECT setseed(0);
+SELECT madlib.mlp_regression(
+    'lin_housing',    -- Source table
+    'mlp_regress',    -- Desination table
+    'x',              -- Input features
+    'y',              -- Dependent variable
+    ARRAY[25,25],     -- Number of units per layer
+    'learning_rate_init=0.001,
+    n_iterations=500,
+    lambda=0.001,
+    tolerance=0',     -- Optimizer params
+    'relu',           -- Activation function
+    NULL,             -- Default weight (1)
+    FALSE,            -- No warm start
+    FALSE             -- Not verbose
+);
+</pre>
+View the model:
+<pre class="example">
+\\x on
+SELECT * FROM mlp_regress;
+</pre>
+<pre class="result">
+[ RECORD 1 ]--+-------------------------------------------------------------------------------------
+coeff          | {-0.250057620174,0.0630805938982,-0.290635490112,-0.382966162592,-0.212206338909...
+loss           | 1.07042781236
+num_iterations | 500
+</pre>
+
 -# Prediction using the regression model:
 <pre class="example">
 DROP TABLE IF EXISTS mlp_regress_prediction;
@@ -1144,11 +1101,10 @@ SELECT madlib.mlp_predict(
          'response'                   -- Output values, not probabilities
      );
 </pre>
-View results
+View results:
 <pre class="example">
 SELECT * FROM lin_housing JOIN mlp_regress_prediction USING (id) ORDER BY id;
 </pre>
-Result for the regression model:
 <pre class="result">
  id |                                         x                                        | zipcode |  y   |   estimated_y
 ----+----------------------------------------------------------------------------------+---------+------+------------------
@@ -1185,6 +1141,154 @@ JOIN mlp_regress_prediction USING (id);
  0.544960829104004
 </pre>
 
+<h4>Regression with Mini-Batching</h4>
+
+-# Call min-batch preprocessor using 
+the same data set as above:
+<pre class="example">
+DROP TABLE IF EXISTS lin_housing_packed, lin_housing_packed_summary, lin_housing_packed_standardization;
+SELECT madlib.minibatch_preprocessor('lin_housing',         -- Source table
+                                     'lin_housing_packed',  -- Output table
+                                     'y',                   -- Dependent variable
+                                     'x'                   -- Independent variables
+                                     );
+</pre>
+-# Train regression model with mini-batching
+<pre class="example">
+DROP TABLE IF EXISTS mlp_regress, mlp_regress_summary, mlp_regress_standardization;
+SELECT setseed(0);
+SELECT madlib.mlp_regression(
+    'lin_housing_packed',    -- Source table
+    'mlp_regress',           -- Desination table
+    'independent_varname',   -- Hardcode to this, from table lin_housing_packed
+    'dependent_varname',     -- Hardcode to this, from table lin_housing_packed
+    ARRAY[25,25],            -- Number of units per layer
+    'learning_rate_init=0.01,
+    n_iterations=500,
+    lambda=0.001,
+    tolerance=0',            -- Optimizer params
+    'tanh',                  -- Activation function
+    NULL,                    -- Default weight (1)
+    FALSE,                   -- No warm start
+    FALSE                    -- Not verbose
+);
+</pre>
+View model:
+<pre class="example">
+\\x on
+SELECT * FROM mlp_regress;
+</pre>
+<pre class="result">
+-[ RECORD 1 ]--+-------------------------------------------------------------
+coeff          | {0.0395865908810001,-0.164860448878703,-0.132787863194324...
+loss           | 0.0442383714892138
+num_iterations | 500
+</pre>
+-# Prediction for regression:
+<pre class="example">
+DROP TABLE IF EXISTS mlp_regress_prediction;
+SELECT madlib.mlp_predict(
+         'mlp_regress',               -- Model table
+         'lin_housing',               -- Test data table
+         'id',                        -- Id column in test table
+         'mlp_regress_prediction',    -- Output table for predictions
+         'response'                   -- Output values, not probabilities
+     );
+\\x off
+SELECT *, ABS(y-estimated_y) as abs_diff FROM lin_housing JOIN mlp_regress_prediction USING (id) ORDER BY id;
+</pre>
+<pre class="result">
+ id |                                        x                                         | zipcode |  y   | zipcode |   estimated_y    |      abs_diff      
+----+----------------------------------------------------------------------------------+---------+------+---------+------------------+--------------------
+  1 | {1,0.00632,18.00,2.310,0,0.5380,6.5750,65.20,4.0900,1,296.0,15.30,396.90,4.98}   |   94016 |   24 |   94016 | 23.9714991250013 | 0.0285008749987092
+  2 | {1,0.02731,0.00,7.070,0,0.4690,6.4210,78.90,4.9671,2,242.0,17.80,396.90,9.14}    |   94016 | 21.6 |   94016 | 22.3655180133895 |  0.765518013389535
+  3 | {1,0.02729,0.00,7.070,0,0.4690,7.1850,61.10,4.9671,2,242.0,17.80,392.83,4.03}    |   94016 | 34.7 |   94016 | 33.8620767428645 |  0.837923257135465
+  4 | {1,0.03237,0.00,2.180,0,0.4580,6.9980,45.80,6.0622,3,222.0,18.70,394.63,2.94}    |   94016 | 33.4 |   94016 | 35.3094157686524 |   1.90941576865244
+  5 | {1,0.06905,0.00,2.180,0,0.4580,7.1470,54.20,6.0622,3,222.0,18.70,396.90,5.33}    |   94016 | 36.2 |   94016 | 35.0379122731818 |   1.16208772681817
+  6 | {1,0.02985,0.00,2.180,0,0.4580,6.4300,58.70,6.0622,3,222.0,18.70,394.12,5.21}    |   94016 | 28.7 |   94016 | 27.5207943492151 |   1.17920565078487
+  7 | {1,0.08829,12.50,7.870,0,0.5240,6.0120,66.60,5.5605,5,311.0,15.20,395.60,12.43}  |   94016 | 22.9 |   94016 | 24.9841422781166 |    2.0841422781166
+  8 | {1,0.14455,12.50,7.870,0,0.5240,6.1720,96.10,5.9505,5,311.0,15.20,396.90,19.15}  |   94016 | 27.1 |   94016 | 24.5403994064793 |   2.55960059352067
+  9 | {1,0.21124,12.50,7.870,0,0.5240,5.6310,100.00,6.0821,5,311.0,15.20,386.63,29.93} |   94016 | 16.5 |   94016 | 17.2588278443879 |   0.75882784438787
+ 10 | {1,0.17004,12.50,7.870,0,0.5240,6.0040,85.90,6.5921,5,311.0,15.20,386.71,17.10}  |   94016 | 18.9 |   94016 | 17.0600407532569 |    1.8399592467431
+ 11 | {1,0.22489,12.50,7.870,0,0.5240,6.3770,94.30,6.3467,5,311.0,15.20,392.52,20.45}  |   94016 |   15 |   94016 | 15.2284207930287 |  0.228420793028732
+ 12 | {1,0.11747,12.50,7.870,0,0.5240,6.0090,82.90,6.2267,5,311.0,15.20,396.90,13.27}  |   20001 | 18.9 |   20001 | 19.2272848285357 |  0.327284828535671
+ 13 | {1,0.09378,12.50,7.870,0,0.5240,5.8890,39.00,5.4509,5,311.0,15.20,390.50,15.71}  |   20001 | 21.7 |   20001 | 21.3979318641202 |  0.302068135879811
+ 14 | {1,0.62976,0.00,8.140,0,0.5380,5.9490,61.80,4.7075,4,307.0,21.00,396.90,8.26}    |   20001 | 20.4 |   20001 | 19.7743403979155 |  0.625659602084532
+ 15 | {1,0.63796,0.00,8.140,0,0.5380,6.0960,84.50,4.4619,4,307.0,21.00,380.02,10.26}   |   20001 | 18.2 |   20001 | 18.7400800902121 |  0.540080090212125
+ 16 | {1,0.62739,0.00,8.140,0,0.5380,5.8340,56.50,4.4986,4,307.0,21.00,395.62,8.47}    |   20001 | 19.9 |   20001 | 19.6187933144569 |  0.281206685543061
+ 17 | {1,1.05393,0.00,8.140,0,0.5380,5.9350,29.30,4.4986,4,307.0,21.00,386.85,6.58}    |   20001 | 23.1 |   20001 | 23.3492239648177 |  0.249223964817737
+ 18 | {1,0.78420,0.00,8.140,0,0.5380,5.9900,81.70,4.2579,4,307.0,21.00,386.75,14.67}   |   20001 | 17.5 |   20001 | 17.0806608347814 |  0.419339165218577
+ 19 | {1,0.80271,0.00,8.140,0,0.5380,5.4560,36.60,3.7965,4,307.0,21.00,288.99,11.69}   |   20001 | 20.2 |   20001 | 20.1559086626409 |  0.044091337359113
+ 20 | {1,0.72580,0.00,8.140,0,0.5380,5.7270,69.50,3.7965,4,307.0,21.00,390.95,11.28}   |   20001 | 18.2 |   20001 | 18.6980897920022 |  0.498089792002183
+(20 rows)
+</pre>
+RMS error:
+<pre class="example">
+SELECT SQRT(SUM(ABS(y-estimated_y))/COUNT(y)) as rms_error FROM lin_housing 
+JOIN mlp_regress_prediction USING (id);
+</pre>
+<pre class="result">
+     rms_error     
+-------------------+
+ 0.912158035902468
+(1 row)
+</pre>
+
+<h4>Regression with Grouping and Mini-Batching</h4>
+
+-# To use grouping and mini-batching, we must first
+re-run the preprocessor and specify grouping:
+<pre class="example">
+DROP TABLE IF EXISTS lin_housing_packed, lin_housing_packed_summary, lin_housing_packed_standardization;
+SELECT madlib.minibatch_preprocessor('lin_housing',         -- Source table
+                                     'lin_housing_packed',  -- Output table
+                                     'y',                   -- Dependent variable
+                                     'x',                   -- Independent variables
+                                     'zipcode'              -- Group by zipcode
+                                     );
+</pre>
+
+-# Train regression model and group the training data by zipcode
+to learn a different model for each zipcode.
+<pre class="example">
+DROP TABLE IF EXISTS mlp_regress_group, mlp_regress_group_summary, mlp_regress_group_standardization;
+-- Set seed so results are reproducible
+SELECT setseed(0);
+SELECT madlib.mlp_regression(
+    'lin_housing_packed',    -- Source table
+    'mlp_regress_group',     -- Desination table
+    'independent_varname',   -- Input features
+    'dependent_varname',     -- Dependent variable
+    ARRAY[25,25],     -- Number of units per layer
+    'learning_rate_init=0.001,
+    n_iterations=500,
+    lambda=0.001,
+    tolerance=0',            -- Optimizer params
+    'relu',                  -- Activation function
+    NULL,                    -- Default weight (1)
+    FALSE,                   -- No warm start
+    FALSE,                   -- Not verbose
+    'zipcode'                -- Grouping column
+);
+</pre>
+View regression model with grouping:
+<pre class="example">
+\\x on
+SELECT * FROM mlp_regress_group;
+</pre>
+<pre class="result">
+-[ RECORD 1 ]--+------------------------------------------------------------------------------------
+zipcode        | 200001
+coeff          | {-0.193588485849,0.063428493184,-0.30440608833,-0.355695802004,-0.175942716164 ...
+loss           | 0.0904009145541
+num_iterations | 500
+-[ RECORD 2 ]--+------------------------------------------------------------------------------------
+zipcode        | 94016
+coeff          | {-0.18965351506,0.0633650963628,-0.302423579808,-0.334367637252,-0.230043593847 ...
+loss           | 1.04772100552
+num_iterations | 500
+</pre>
+
 -# Prediction using the regression model for each group based on the zipcode:
 <pre class="example">
 DROP TABLE IF EXISTS mlp_regress_prediction;
@@ -1195,38 +1299,35 @@ SELECT madlib.mlp_predict(
          'mlp_regress_prediction',    -- Output table for predictions
          'response'                   -- Output values, not probabilities
      );
-</pre>
-View results
-<pre class="example">
+\\x off
 SELECT * FROM lin_housing JOIN mlp_regress_prediction USING (zipcode, id) ORDER BY zipcode, id;
 </pre>
-Result for the regression model:
 <pre class="result">
- zipcode | id |                                    x                                    |  y   |   estimated_y
----------+----+-------------------------------------------------------------------------+------+------------------
-   20001 | 12 | {1,0.11747,12.5,7.87,0,0.524,6.009,82.9,6.2267,5,311,15.2,396.9,13.27}  | 18.9 | 18.9015848636694
-   20001 | 13 | {1,0.09378,12.5,7.87,0,0.524,5.889,39,5.4509,5,311,15.2,390.5,15.71}    | 21.7 | 21.6953448259341
-   20001 | 14 | {1,0.62976,0,8.14,0,0.538,5.949,61.8,4.7075,4,307,21,396.9,8.26}        | 20.4 | 20.3111342795946
-   20001 | 15 | {1,0.63796,0,8.14,0,0.538,6.096,84.5,4.4619,4,307,21,380.02,10.26}      | 18.2 | 18.2483496870986
-   20001 | 16 | {1,0.62739,0,8.14,0,0.538,5.834,56.5,4.4986,4,307,21,395.62,8.47}       | 19.9 | 20.0052448114423
-   20001 | 17 | {1,1.05393,0,8.14,0,0.538,5.935,29.3,4.4986,4,307,21,386.85,6.58}       | 23.1 |  23.096355805823
-   20001 | 18 | {1,0.7842,0,8.14,0,0.538,5.99,81.7,4.2579,4,307,21,386.75,14.67}        | 17.5 |  17.494006122731
-   20001 | 19 | {1,0.80271,0,8.14,0,0.538,5.456,36.6,3.7965,4,307,21,288.99,11.69}      | 20.2 | 20.1906548782618
-   20001 | 20 | {1,0.7258,0,8.14,0,0.538,5.727,69.5,3.7965,4,307,21,390.95,11.28}       | 18.2 | 18.1765233580419
-   94016 |  1 | {1,0.00632,18,2.31,0,0.538,6.575,65.2,4.09,1,296,15.3,396.9,4.98}       |   24 | 23.9998833863682
-   94016 |  2 | {1,0.02731,0,7.07,0,0.469,6.421,78.9,4.9671,2,242,17.8,396.9,9.14}      | 21.6 | 21.6005641560929
-   94016 |  3 | {1,0.02729,0,7.07,0,0.469,7.185,61.1,4.9671,2,242,17.8,392.83,4.03}     | 34.7 | 34.6986979263639
-   94016 |  4 | {1,0.03237,0,2.18,0,0.458,6.998,45.8,6.0622,3,222,18.7,394.63,2.94}     | 33.4 | 33.4004355375207
-   94016 |  5 | {1,0.06905,0,2.18,0,0.458,7.147,54.2,6.0622,3,222,18.7,396.9,5.33}      | 36.2 | 36.1960129477404
-   94016 |  6 | {1,0.02985,0,2.18,0,0.458,6.43,58.7,6.0622,3,222,18.7,394.12,5.21}      | 28.7 | 28.6982928526275
-   94016 |  7 | {1,0.08829,12.5,7.87,0,0.524,6.012,66.6,5.5605,5,311,15.2,395.6,12.43}  | 22.9 | 22.8994945607741
-   94016 |  8 | {1,0.14455,12.5,7.87,0,0.524,6.172,96.1,5.9505,5,311,15.2,396.9,19.15}  | 27.1 | 27.0986128441621
-   94016 |  9 | {1,0.21124,12.5,7.87,0,0.524,5.631,100,6.0821,5,311,15.2,386.63,29.93}  | 16.5 | 16.5002686243432
-   94016 | 10 | {1,0.17004,12.5,7.87,0,0.524,6.004,85.9,6.5921,5,311,15.2,386.71,17.1}  | 18.9 | 18.8994689601322
-   94016 | 11 | {1,0.22489,12.5,7.87,0,0.524,6.377,94.3,6.3467,5,311,15.2,392.52,20.45} |   15 | 15.0010664443965
+ zipcode | id |                                        x                                         |  y   |   estimated_y    
+---------+----+----------------------------------------------------------------------------------+------+------------------
+   20001 | 12 | {1,0.11747,12.50,7.870,0,0.5240,6.0090,82.90,6.2267,5,311.0,15.20,396.90,13.27}  | 18.9 | 19.2272848285357
+   20001 | 13 | {1,0.09378,12.50,7.870,0,0.5240,5.8890,39.00,5.4509,5,311.0,15.20,390.50,15.71}  | 21.7 | 21.3979318641202
+   20001 | 14 | {1,0.62976,0.00,8.140,0,0.5380,5.9490,61.80,4.7075,4,307.0,21.00,396.90,8.26}    | 20.4 | 19.7743403979155
+   20001 | 15 | {1,0.63796,0.00,8.140,0,0.5380,6.0960,84.50,4.4619,4,307.0,21.00,380.02,10.26}   | 18.2 | 18.7400800902121
+   20001 | 16 | {1,0.62739,0.00,8.140,0,0.5380,5.8340,56.50,4.4986,4,307.0,21.00,395.62,8.47}    | 19.9 | 19.6187933144569
+   20001 | 17 | {1,1.05393,0.00,8.140,0,0.5380,5.9350,29.30,4.4986,4,307.0,21.00,386.85,6.58}    | 23.1 | 23.3492239648177
+   20001 | 18 | {1,0.78420,0.00,8.140,0,0.5380,5.9900,81.70,4.2579,4,307.0,21.00,386.75,14.67}   | 17.5 | 17.0806608347814
+   20001 | 19 | {1,0.80271,0.00,8.140,0,0.5380,5.4560,36.60,3.7965,4,307.0,21.00,288.99,11.69}   | 20.2 | 20.1559086626409
+   20001 | 20 | {1,0.72580,0.00,8.140,0,0.5380,5.7270,69.50,3.7965,4,307.0,21.00,390.95,11.28}   | 18.2 | 18.6980897920022
+   94016 |  1 | {1,0.00632,18.00,2.310,0,0.5380,6.5750,65.20,4.0900,1,296.0,15.30,396.90,4.98}   |   24 | 23.9714991250013
+   94016 |  2 | {1,0.02731,0.00,7.070,0,0.4690,6.4210,78.90,4.9671,2,242.0,17.80,396.90,9.14}    | 21.6 | 22.3655180133895
+   94016 |  3 | {1,0.02729,0.00,7.070,0,0.4690,7.1850,61.10,4.9671,2,242.0,17.80,392.83,4.03}    | 34.7 | 33.8620767428645
+   94016 |  4 | {1,0.03237,0.00,2.180,0,0.4580,6.9980,45.80,6.0622,3,222.0,18.70,394.63,2.94}    | 33.4 | 35.3094157686524
+   94016 |  5 | {1,0.06905,0.00,2.180,0,0.4580,7.1470,54.20,6.0622,3,222.0,18.70,396.90,5.33}    | 36.2 | 35.0379122731818
+   94016 |  6 | {1,0.02985,0.00,2.180,0,0.4580,6.4300,58.70,6.0622,3,222.0,18.70,394.12,5.21}    | 28.7 | 27.5207943492151
+   94016 |  7 | {1,0.08829,12.50,7.870,0,0.5240,6.0120,66.60,5.5605,5,311.0,15.20,395.60,12.43}  | 22.9 | 24.9841422781166
+   94016 |  8 | {1,0.14455,12.50,7.870,0,0.5240,6.1720,96.10,5.9505,5,311.0,15.20,396.90,19.15}  | 27.1 | 24.5403994064793
+   94016 |  9 | {1,0.21124,12.50,7.870,0,0.5240,5.6310,100.00,6.0821,5,311.0,15.20,386.63,29.93} | 16.5 | 17.2588278443879
+   94016 | 10 | {1,0.17004,12.50,7.870,0,0.5240,6.0040,85.90,6.5921,5,311.0,15.20,386.71,17.10}  | 18.9 | 17.0600407532569
+   94016 | 11 | {1,0.22489,12.50,7.870,0,0.5240,6.3770,94.30,6.3467,5,311.0,15.20,392.52,20.45}  |   15 | 15.2284207930287
 (20 rows)
 </pre>
-Note that the results you get for all examples may vary with the platform you are using.
+Note that the results you get for all examples may vary with the database you are using.
 
 @anchor background
 @par Technical Background
@@ -1249,8 +1350,12 @@ For details on backpropogation, see [2].
 [1] https://en.wikipedia.org/wiki/Multilayer_perceptron
 
 [2] Yu Hen Hu. "Lecture 11. MLP (III): Back-Propagation."
-    University of Wisconsin Madison: Computer-Aided Engineering. Web. 12 July 2017,
-    http://homepages.cae.wisc.edu/~ece539/videocourse/notes/pdf/lec%2011%20MLP%20(3)%20BP.pdf
+University of Wisconsin Madison: Computer-Aided Engineering. Web. 12 July 2017,
+http://homepages.cae.wisc.edu/~ece539/videocourse/notes/pdf/lec%2011%20MLP%20(3)%20BP.pdf
+
+[3] "Neural Networks for Machine Learning", Lectures 6a and 6b on mini-batch gradient descent,  
+Geoffrey Hinton with Nitish Srivastava and Kevin Swersky,
+http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf
 
 @anchor related
 @par Related Topics


[4/6] madlib git commit: Docs: Update MLP, mini-batch documentation

Posted by ri...@apache.org.
http://git-wip-us.apache.org/repos/asf/madlib/blob/0f9f12f3/src/ports/postgres/modules/utilities/minibatch_preprocessing.sql_in
----------------------------------------------------------------------
diff --git a/src/ports/postgres/modules/utilities/minibatch_preprocessing.sql_in b/src/ports/postgres/modules/utilities/minibatch_preprocessing.sql_in
index 44e3e26..4a08702 100644
--- a/src/ports/postgres/modules/utilities/minibatch_preprocessing.sql_in
+++ b/src/ports/postgres/modules/utilities/minibatch_preprocessing.sql_in
@@ -30,24 +30,41 @@ m4_include(`SQLCommon.m4')
 @addtogroup grp_minibatch_preprocessing
 
 <div class="toc"><b>Contents</b><ul>
-<li class="level1"><a href="#todo">todo</a></li>
+<li class="level1"><a href="#minibatch_preprocessor">Mini-Batch Preprocessor</a></li>
+<li class="level1"><a href="#example">Examples</a></li>
+<li class="level1"><a href="#literature">Literature</a></li>
+<li class="level1"><a href="#related">Related Topics</a></li>
 </ul></div>
 
-MiniBatch Preprocessor is a utility function to pre process the input
-data for use with models that support mini-batching as an optimization
+The mini-batch preprocessor is a utility that prepares input
+data for use by models that support mini-batch as an optimization option.
+(This is currently
+only the case for <a href="group__grp__nn.html">Neural Networks</a>.)
+It is effectively a packing operation that builds
+arrays of dependent and independent variables from the source data table.
+
+The advantage of using mini-batching is that it can perform better than
+stochastic gradient descent (default MADlib optimizer) because it
+uses more than one training
+example at a time, typically resulting faster and smoother convergence [1].
 
 @brief
+Utility that prepares input data for use by models that support
+mini-batch as an optimization option.
+
 @anchor minibatch_preprocessor
-@par MiniBatch Preprocessor
+@par Mini-Batch Preprocessor
+The mini-batch preprocessor has the following format:
+
 <pre class="syntax">
-minibatch_preprocessor(
-    source_table
-    output_table
-    dependent_varname
-    independent_varname
-    buffer_size,
-    one_hot_encode_int_dep_var
-    )
+minibatch_preprocessor( source_table,
+                        output_table,
+                        dependent_varname,
+                        independent_varname,
+                        grouping_col,
+                        buffer_size,
+                        one_hot_encode_int_dep_var
+                        )
 </pre>
 
 \b Arguments
@@ -57,8 +74,11 @@ minibatch_preprocessor(
   </dd>
 
   <dt>output_table</dt>
-  <dd>TEXT.  Name of the output table from the preprocessor which will be used
-    as input to algorithms that support mini-batching.
+  <dd>TEXT.  Name of the output table from the preprocessor which
+  will be used as input to algorithms that support mini-batching.
+  Note that the arrays packed into the output table are randomized
+  and normalized, so they will not match up in an obvious way with the
+  rows in the source table.
   </dd>
 
   <dt>dependent_varname</dt>
@@ -67,30 +87,62 @@ minibatch_preprocessor(
 
   <dt>independent_varname</dt>
   <dd>TEXT. Column name or expression list to evaluate for the independent
-  variable.  Will be cast to double when packing.
+  variable.  Please note that independent variables
+  are cast to double precision by the preprocessor,
+  so categorical variables should be
+  one-hot or dummy encoded as appropriate.
+  See <a href="group__grp__encode__categorical.html">Encoding Categorical Variables</a>
+  for more details on this.
   @note
-  Supported expressions for independent variable
-  ‘ARRAY[x1,x2,x3]’ , where x1,x2,x3 are columns in source table with scalar values
-  ‘x1’, where x1 is a single column in source table, with value as an array, like ARRAY[1,2,3] or {1,2,3}
-  We might already support expressions that evaluate to array but haven't tested it.
-
-  Not supported
-  ‘x1,x2,x3’, where x1,x2,x3 are columns in source table with scalar values
-  ARRAY[x1,x2] where x1 is scalar and x2 is array
-  ARRAY[x1,x2] where both x1 and x2 are arrays
-  ARRAY[x1] where x1 is array
-  </dd>
-
-  <dt>buffer_size</dt>
-  <dd>INTEGER. default: ???. Number of source input rows to pack into batch
+  Supported expressions for independent variables include:
+  - ‘ARRAY[x1,x2,x3]’, where x1, x2, and x3 are
+  columns in the source table containing scalar values.
+  - Single column in the source table containing
+  an array like ARRAY[1,2,3] or {1,2,3}.
+  @note
+  The following forms are not currently supported:
+  - ‘x1,x2,x3’, where x1,x2,x3 are columns in source table with scalar values
+  - ARRAY[x1,x2] where x1 is scalar and x2 is array
+  - ARRAY[x1,x2] where both x1 and x2 are arrays
+  - ARRAY[x1] where x1 is array
   </dd>
 
   <dt>grouping_col (optional)</dt>
   <dd>TEXT, default: NULL.
    An expression list used to group the input dataset into discrete groups,
-   running one preprocessing step per group. Similar to the SQL GROUP BY clause.
-   When this value is NULL, no grouping is used and a single preprocessing step
-   is performed for the whole data set.
+   which runs the preprocessing separately for each group.
+   When this value is NULL, no grouping is used and a single preprocessor step
+   is run for the whole data set.
+   @note
+   If you plan to use grouping in model training, then you must set
+   up the groups in the preprocessor exactly as you want to use them
+   in training.
+  </dd>
+
+  <dt>buffer_size (optional)</dt>
+  <dd>INTEGER, default: computed.  Buffer size is the
+  number of rows from the
+  source table that are packed into one row of the preprocessor
+  output table.  The default value is computed considering size of
+  the source table, number of independent variables, number of groups,
+  and number of segments in the database cluster.  For larger data sets,
+  the computed buffer size will typically be a value in the millions.
+  </dd>
+
+  <dt>one_hot_encode_int_dep_var (optional)</dt>
+  <dd> BOOLEAN. default: FALSE.
+  Flag to one-hot encode dependent variables that are
+  scalar integers. This parameter is ignored if the
+  dependent variable is not a scalar integer.
+
+@note The mini-batch preprocessor automatically encodes
+dependent variables that are boolean and character types such as text, char and
+varchar.  However, scalar integers are a special case because they can be used
+in both classification and regression problems, so you must tell the mini-batch
+preprocessor whether you want to encode them or not. In the case that you have
+already encoded the dependent variable yourself,  you can ignore this parameter.
+Also, if you want to encode float values for some reason, cast them to text
+first.
   </dd>
 
   <dt>one_hot_encode_int_dep_var (optional)</dt>
@@ -112,16 +164,18 @@ first.
 
 <b>Output tables</b>
 <br>
-    The output table produced by MLP contains the following columns:
+    The output table produced by the mini-batch preprocessor contains the following columns:
     <table class="output">
       <tr>
-        <th>id</th>
+        <th>__id__</th>
         <td>INTEGER. Unique id for packed table.
         </td>
       </tr>
       <tr>
         <th>dependent_varname</th>
-        <td>FLOAT8[]. Packed array of dependent variables.
+        <td>FLOAT8[]. Packed array of dependent variables.  If the
+        dependent variable in the source table is categorical,
+        the preprocessor will one-hot encode it.
         </td>
       </tr>
       <tr>
@@ -131,7 +185,7 @@ first.
       </tr>
       <tr>
         <th>grouping_cols</th>
-        <td>TEXT. Name of grouping columns
+        <td>TEXT. Name of grouping columns.
         </td>
       </tr>
     </table>
@@ -140,15 +194,15 @@ A summary table named \<output_table\>_summary is also created, which has the fo
     <table class="output">
     <tr>
         <th>source_table</th>
-        <td>The source table.</td>
+        <td>Name of the source table.</td>
     </tr>
     <tr>
         <th>output_table</th>
-        <td>Output table name from preprocessor.</td>
+        <td>Name of output table generated by preprocessor.</td>
     </tr>
     <tr>
         <th>dependent_varname</th>
-        <td>Dependent variable from the input table.</td>
+        <td>Dependent variable from the source table.</td>
     </tr>
     <tr>
         <th>independent_varname</th>
@@ -160,43 +214,330 @@ A summary table named \<output_table\>_summary is also created, which has the fo
     </tr>
     <tr>
         <th>class_values</th>
-        <td>Class values of the dependent variable (‘NULL’(as TEXT type) for non categorical vars, i,e., if dependent_vartype=”Categorical”)./td>
+        <td>Class values (i.e., levels) of the dependent
+        variable if categorical.  If the dependent variable is not
+        categorical, this will be NULL./td>
     </tr>
     <tr>
         <th>num_rows_processed</th>
-        <td>The total number of rows that were used in the computation.</td>
+        <td>The total number of rows that were used in the
+        preprocessing operation.</td>
     </tr>
     <tr>
         <th>num_missing_rows_skipped</th>
-        <td>The total number of rows that were skipped because of NULL values in them.</td>
+        <td>The total number of rows that were skipped because of
+        NULL values in either the dependent or independent variables.</td>
     </tr>
     <tr>
         <th>grouping_col</th>
-        <td>NULL if no grouping_col was specified , and a comma separated
-        list of grouping column names if not.</td>
+        <td>Comma separated list of grouping column names
+        if grouping is used. If no grouping, will be NULL.</td>
     </tr>
    </table>
 
-A standardization table named \<output_table\>_standardization is also created, that has the
-following columns:
+A standardization table named \<output_table\>_standardization
+is also created.  This is needed by the models that will use the
+preprocessed data so is likely not of much interest to users.
+It has the following columns:
   <table class="output">
     <tr>
         <th>grouping columns</th>
-        <td>If grouping_col is specified during training, a column for each grouping column
+        <td>If 'grouping_col' is specified,
+        a column for each grouping column
         is created.</td>
     </tr>
     <tr>
         <th>mean</th>
-        <td>Mean of independent vars by group</td>
+        <td>Mean of independent variables.</td>
     </tr>
     <tr>
         <th>std</th>
-        <td>Standard deviation of independent vars by group</td>
+        <td>Population standard deviation of
+        independent variables.</td>
     </tr>
   </table>
 
 @anchor example
 @par Examples
+-#  Create an input data set based on the well known iris data set:
+<pre class="example">
+DROP TABLE IF EXISTS iris_data;
+CREATE TABLE iris_data(
+    id serial,
+    attributes numeric[],
+    class_text varchar,
+    class integer,
+    state varchar
+);
+INSERT INTO iris_data(id, attributes, class_text, class, state) VALUES
+(1,ARRAY[5.0,3.2,1.2,0.2],'Iris_setosa',1,'Alaska'),
+(2,ARRAY[5.5,3.5,1.3,0.2],'Iris_setosa',1,'Alaska'),
+(3,ARRAY[4.9,3.1,1.5,0.1],'Iris_setosa',1,'Alaska'),
+(4,ARRAY[4.4,3.0,1.3,0.2],'Iris_setosa',1,'Alaska'),
+(5,ARRAY[5.1,3.4,1.5,0.2],'Iris_setosa',1,'Alaska'),
+(6,ARRAY[5.0,3.5,1.3,0.3],'Iris_setosa',1,'Alaska'),
+(7,ARRAY[4.5,2.3,1.3,0.3],'Iris_setosa',1,'Alaska'),
+(8,ARRAY[4.4,3.2,1.3,0.2],'Iris_setosa',1,'Alaska'),
+(9,ARRAY[5.0,3.5,1.6,0.6],'Iris_setosa',1,'Alaska'),
+(10,ARRAY[5.1,3.8,1.9,0.4],'Iris_setosa',1,'Alaska'),
+(11,ARRAY[4.8,3.0,1.4,0.3],'Iris_setosa',1,'Alaska'),
+(12,ARRAY[5.1,3.8,1.6,0.2],'Iris_setosa',1,'Alaska'),
+(13,ARRAY[5.7,2.8,4.5,1.3],'Iris_versicolor',2,'Alaska'),
+(14,ARRAY[6.3,3.3,4.7,1.6],'Iris_versicolor',2,'Alaska'),
+(15,ARRAY[4.9,2.4,3.3,1.0],'Iris_versicolor',2,'Alaska'),
+(16,ARRAY[6.6,2.9,4.6,1.3],'Iris_versicolor',2,'Alaska'),
+(17,ARRAY[5.2,2.7,3.9,1.4],'Iris_versicolor',2,'Alaska'),
+(18,ARRAY[5.0,2.0,3.5,1.0],'Iris_versicolor',2,'Alaska'),
+(19,ARRAY[5.9,3.0,4.2,1.5],'Iris_versicolor',2,'Alaska'),
+(20,ARRAY[6.0,2.2,4.0,1.0],'Iris_versicolor',2,'Alaska'),
+(21,ARRAY[6.1,2.9,4.7,1.4],'Iris_versicolor',2,'Alaska'),
+(22,ARRAY[5.6,2.9,3.6,1.3],'Iris_versicolor',2,'Alaska'),
+(23,ARRAY[6.7,3.1,4.4,1.4],'Iris_versicolor',2,'Alaska'),
+(24,ARRAY[5.6,3.0,4.5,1.5],'Iris_versicolor',2,'Alaska'),
+(25,ARRAY[5.8,2.7,4.1,1.0],'Iris_versicolor',2,'Alaska'),
+(26,ARRAY[6.2,2.2,4.5,1.5],'Iris_versicolor',2,'Alaska'),
+(27,ARRAY[5.6,2.5,3.9,1.1],'Iris_versicolor',2,'Alaska'),
+(28,ARRAY[5.0,3.4,1.5,0.2],'Iris_setosa',1,'Tennessee'),
+(29,ARRAY[4.4,2.9,1.4,0.2],'Iris_setosa',1,'Tennessee'),
+(30,ARRAY[4.9,3.1,1.5,0.1],'Iris_setosa',1,'Tennessee'),
+(31,ARRAY[5.4,3.7,1.5,0.2],'Iris_setosa',1,'Tennessee'),
+(32,ARRAY[4.8,3.4,1.6,0.2],'Iris_setosa',1,'Tennessee'),
+(33,ARRAY[4.8,3.0,1.4,0.1],'Iris_setosa',1,'Tennessee'),
+(34,ARRAY[4.3,3.0,1.1,0.1],'Iris_setosa',1,'Tennessee'),
+(35,ARRAY[5.8,4.0,1.2,0.2],'Iris_setosa',1,'Tennessee'),
+(36,ARRAY[5.7,4.4,1.5,0.4],'Iris_setosa',1,'Tennessee'),
+(37,ARRAY[5.4,3.9,1.3,0.4],'Iris_setosa',1,'Tennessee'),
+(38,ARRAY[6.0,2.9,4.5,1.5],'Iris_versicolor',2,'Tennessee'),
+(39,ARRAY[5.7,2.6,3.5,1.0],'Iris_versicolor',2,'Tennessee'),
+(40,ARRAY[5.5,2.4,3.8,1.1],'Iris_versicolor',2,'Tennessee'),
+(41,ARRAY[5.5,2.4,3.7,1.0],'Iris_versicolor',2,'Tennessee'),
+(42,ARRAY[5.8,2.7,3.9,1.2],'Iris_versicolor',2,'Tennessee'),
+(43,ARRAY[6.0,2.7,5.1,1.6],'Iris_versicolor',2,'Tennessee'),
+(44,ARRAY[5.4,3.0,4.5,1.5],'Iris_versicolor',2,'Tennessee'),
+(45,ARRAY[6.0,3.4,4.5,1.6],'Iris_versicolor',2,'Tennessee'),
+(46,ARRAY[6.7,3.1,4.7,1.5],'Iris_versicolor',2,'Tennessee'),
+(47,ARRAY[6.3,2.3,4.4,1.3],'Iris_versicolor',2,'Tennessee'),
+(48,ARRAY[5.6,3.0,4.1,1.3],'Iris_versicolor',2,'Tennessee'),
+(49,ARRAY[5.5,2.5,4.0,1.3],'Iris_versicolor',2,'Tennessee'),
+(50,ARRAY[5.5,2.6,4.4,1.2],'Iris_versicolor',2,'Tennessee'),
+(51,ARRAY[6.1,3.0,4.6,1.4],'Iris_versicolor',2,'Tennessee'),
+(52,ARRAY[5.8,2.6,4.0,1.2],'Iris_versicolor',2,'Tennessee');
+</pre>
+
+-#  Run the preprocessor:
+<pre class="example">
+DROP TABLE IF EXISTS iris_data_packed, iris_data_packed_summary, iris_data_packed_standardization;
+SELECT madlib.minibatch_preprocessor('iris_data',         -- Source table
+                                     'iris_data_packed',  -- Output table
+                                     'class_text',        -- Dependent variable
+                                     'attributes'         -- Independent variables
+                                     );
+</pre>
+For small datasets like in this example, buffer size is mainly
+determined by the number of segments in the database.
+This example is run on a Greenplum database with 2 segments,
+so there are 2 rows with a buffer size of 26.
+For PostgresSQL, there would be only one row with a buffer
+size of 52 since it is a single node database.
+For larger data sets, other factors go into
+computing buffers size besides number of segments.
+Also, note that the dependent variable has
+been one-hot encoded since it is categorical.
+Here is a sample of the packed output table:
+<pre class="example">
+\\x on
+SELECT * FROM iris_data_packed;
+</pre>
+<pre class="result">
+-[ RECORD 1 ]-------+-------------------------------------
+__id__              | 0
+dependent_varname   | {{1,0},{0,1},{1,0},{0,1},{1,0},{0,1},{0,1},{1,0},{1,0},{1,0},{1,0},{0,1},{0,1},{0,1},{1,0},{0,1},{0,1},{0,1},{1,0},{0,1},{1,0},{0,1},{1,0},{1,0},{1,0},{0,1}}
+independent_varname | {{-0.767560815504508,0.806649237861967,-1.07515071152907,-1.18456909732025},{-0.0995580974152422,0.00385956572525086,1.03989986852812,1.17758048907675},...
+...
+-[ RECORD 2 ]-------+-------------------------------------
+__id__              | 1
+dependent_varname   | {{1,0},{1,0},{1,0},{0,1},{0,1},{1,0},{0,1},{0,1},{0,1},{0,1},{0,1},{0,1},{0,1},{1,0},{0,1},{0,1},{0,1},{0,1},{0,1},{1,0},{0,1},{1,0},{0,1},{1,0},{1,0},{0,1}}
+independent_varname | {{0.568444620674023,2.01083374606704,-1.28665576953479,-1.18456909732025},{-1.76956489263841,0.405254401793609,-1.21615408353289,-1.18456909732025},...
+...
+</pre>
+Review the output summary table:
+<pre class="example">
+SELECT * FROM iris_data_packed_summary;
+</pre>
+<pre class="result">
+-[ RECORD 1 ]------------+------------------------------
+source_table             | iris_data
+output_table             | iris_data_packed
+dependent_varname        | class_text
+independent_varname      | attributes
+buffer_size              | 26
+class_values             | {Iris_setosa,Iris_versicolor}
+num_rows_processed       | 52
+num_missing_rows_skipped | 0
+grouping_cols            |
+</pre>
+Review the output standardization table:
+<pre class="example">
+SELECT * FROM iris_data_packed_standardization;
+</pre>
+<pre class="result">
+-[ RECORD 1 ]------------------------------------------------------
+mean | {5.45961538462,2.99807692308,3.025,0.851923076923}
+std  | {0.598799958695,0.498262513686,1.41840579525,0.550346179381}
+</pre>
+
+-# Generally the default buffer size will work well,
+but if you have occasion to change it:
+<pre class="example">
+DROP TABLE IF EXISTS iris_data_packed, iris_data_packed_summary, iris_data_packed_standardization;
+SELECT madlib.minibatch_preprocessor('iris_data',         -- Source table
+                                     'iris_data_packed',  -- Output table
+                                     'class_text',        -- Dependent variable
+                                     'attributes',        -- Independent variables
+                                     NULL,                -- Grouping
+                                     10                   -- Buffer size
+                                     );
+</pre>
+Review the output summary table:
+<pre class="example">
+SELECT * FROM iris_data_packed_summary;
+</pre>
+<pre class="result">
+-[ RECORD 1 ]------------+------------------------------
+source_table             | iris_data
+output_table             | iris_data_packed
+dependent_varname        | class_text
+independent_varname      | attributes
+buffer_size              | 10
+class_values             | {Iris_setosa,Iris_versicolor}
+num_rows_processed       | 52
+num_missing_rows_skipped | 0
+grouping_cols            |
+</pre>
+
+-# Run the preprocessor with grouping by state:
+<pre class="example">
+DROP TABLE IF EXISTS iris_data_packed, iris_data_packed_summary, iris_data_packed_standardization;
+SELECT madlib.minibatch_preprocessor('iris_data',         -- Source table
+                                     'iris_data_packed',  -- Output table
+                                     'class_text',        -- Dependent variable
+                                     'attributes',        -- Independent variables
+                                     'state'              -- Grouping
+                                     );
+</pre>
+Review the output table:
+<pre class="example">
+SELECT * FROM iris_data_packed ORDER BY state, __id__;
+</pre>
+<pre class="result">
+-[ RECORD 1 ]-------+-------------------------------------
+__id__              | 0
+state               | Alaska
+dependent_varname   | {{0,1},{0,1},{1,0},{0,1},{0,1},{0,1},{1,0},{0,1},{0,1},{1,0},{1,0},{0,1},{0,1}}
+independent_varname | {{0.306242850830503,-0.977074857057813,0.680489757142278 ...
+...
+-[ RECORD 2 ]-------+-------------------------------------
+__id__              | 1
+state               | Alaska
+dependent_varname   | {{0,1},{1,0},{0,1},{0,1},{1,0},{1,0},{1,0},{0,1},{1,0},{0,1},{0,1},{1,0},{1,0}}
+independent_varname | {{1.10129640587123,-0.126074175104234,1.2524188915498 ...
+...
+-[ RECORD 3 ]-------+-------------------------------------
+__id__              | 2
+state               | Alaska
+dependent_varname   | {{1,0}}
+independent_varname | {{-0.647821415218373,1.15042684782613,-1.17827992968215 ...
+...
+-[ RECORD 4 ]-------+-------------------------------------
+__id__              | 0
+state               | Tennessee
+dependent_varname   | {{1,0},{0,1},{1,0},{1,0},{1,0},{0,1},{1,0},{0,1},{0,1},{0,1},{1,0},{1,0},{0,1}}
+independent_varname | {{0.32912603663053,2.59625206429212,-1.12079945083087 ...
+...
+-[ RECORD 5 ]-------+-------------------------------------
+__id__              | 1
+state               | Tennessee
+dependent_varname   | {{0,1},{0,1},{0,1},{1,0},{1,0},{0,1},{0,1},{1,0},{0,1},{0,1},{0,1},{0,1}}
+independent_varname | {{0.865744574615085,-0.267261241912424,0.970244300719264 ...
+...
+</pre>
+Review the output summary table:
+<pre class="example">
+SELECT * FROM iris_data_packed_summary;
+</pre>
+<pre class="result">
+-[ RECORD 1 ]------------+------------------------------
+source_table             | iris_data
+output_table             | iris_data_packed
+dependent_varname        | class_text
+independent_varname      | attributes
+buffer_size              | 13
+class_values             | {Iris_setosa,Iris_versicolor}
+num_rows_processed       | 52
+num_missing_rows_skipped | 0
+grouping_cols            | state
+</pre>
+Review the output standardization table:
+<pre class="example">
+SELECT * FROM iris_data_packed_standardization;
+</pre>
+<pre class="result">
+-[ RECORD 1 ]-------------------------------------------------------------------
+state | Alaska
+mean  | {5.40740740740741,2.95925925925926,2.94814814814815,0.833333333333333}
+std   | {0.628888452645665,0.470034875978888,1.39877469405147,0.536103914747325}
+-[ RECORD 2 ]-------------------------------------------------------------------
+state | Tennessee
+mean  | {5.516,3.04,3.108,0.872}
+std   | {0.55905634778617,0.523832034148353,1.43469021046357,0.564637937088893}
+</pre>
+
+-# If the depedent variable is scalar integer,
+and you have not already encoded it, you can ask
+the preprocessor to encode it for you:
+<pre class="example">
+DROP TABLE IF EXISTS iris_data_packed, iris_data_packed_summary, iris_data_packed_standardization;
+SELECT madlib.minibatch_preprocessor('iris_data',         -- Source table
+                                     'iris_data_packed',  -- Output table
+                                     'class',             -- Integer dependent variable
+                                     'attributes',        -- Independent variables
+                                     NULL,                -- Grouping
+                                     NULL,                -- Buffer size
+                                     TRUE                 -- Encode scalar int dependent variable
+                                     );
+</pre>
+Review the output summary table:
+<pre class="example">
+SELECT * FROM iris_data_packed_summary;
+</pre>
+<pre class="result">
+-[ RECORD 1 ]------------+-----------------
+source_table             | iris_data
+output_table             | iris_data_packed
+dependent_varname        | class
+independent_varname      | attributes
+dependent_vartype        | integer
+buffer_size              | 26
+class_values             | {1,2}
+num_rows_processed       | 52
+num_missing_rows_skipped | 0
+grouping_cols            |
+</pre>
+
+@anchor literature
+@literature
+
+[1] "Neural Networks for Machine Learning", Lectures 6a and 6b on mini-batch gradient descent,
+Geoffrey Hinton with Nitish Srivastava and Kevin Swersky,
+http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf
+
+@anchor related
+@par Related Topics
+
+minibatch_preprocessing.sql_in
+
+<a href="group__grp__nn.html"><b>Neural Networks</b></a>
+
  */
 
 CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.minibatch_preprocessor(