You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by am...@apache.org on 2015/04/15 21:49:35 UTC

[01/50] [abbrv] incubator-lens git commit: LENS-153 : Add examples execution to ML (Sharad Agarwal via jdhok)

Repository: incubator-lens
Updated Branches:
  refs/heads/current-release-line f90a94dfc -> b10d15be5


LENS-153 : Add examples execution to ML (Sharad Agarwal via jdhok)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/8d185913
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/8d185913
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/8d185913

Branch: refs/heads/current-release-line
Commit: 8d185913e76d632027028b3bbba15ceb8e158a9f
Parents: fc26d91
Author: Sharad Agarwal <sh...@apache.org>
Authored: Tue Mar 24 14:27:04 2015 +0530
Committer: jdhok <ja...@inmobi.com>
Committed: Tue Mar 24 14:27:26 2015 +0530

----------------------------------------------------------------------
 lens-docker/lens-test/Dockerfile                |   8 +-
 lens-docker/lens-test/lens-bootstrap.sh         |   5 +
 lens-ml-dist/LICENSE.txt                        | 201 +++++++++++++++++++
 lens-ml-dist/pom.xml                            |  77 +++++++
 lens-ml-dist/src/main/assembly/ml-dist.xml      |  65 ++++++
 lens-ml-dist/src/site/apt/index.apt             |  20 ++
 lens-ml-lib/data/naive_bayes/ml.properties      |   7 +
 .../data/naive_bayes/naive_bayes_train.data     |   6 -
 lens-ml-lib/data/naive_bayes/test.data          |   6 +
 lens-ml-lib/data/naive_bayes/train.data         |   6 +
 .../org/apache/lens/client/LensMLClient.java    |  33 +--
 .../main/java/org/apache/lens/ml/MLRunner.java  | 173 ++++++++++++++++
 .../java/org/apache/lens/ml/task/MLTask.java    |  73 +++----
 .../java/org/apache/lens/ml/TestMLResource.java | 130 ++++++------
 .../java/org/apache/lens/ml/TestMLRunner.java   | 138 +++++++++++++
 lens-ml-lib/tools/scripts/lens-ml-classpath.sh  |  52 +++++
 pom.xml                                         |   2 +
 .../client/lens-client-site.xml                 |   5 +
 tools/conf-pseudo-distr/server/lens-site.xml    |  35 ++++
 tools/scripts/lens-config.sh                    |   4 +
 tools/scripts/lens-run-class.sh                 | 170 ++++++++++++++++
 21 files changed, 1079 insertions(+), 137 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-docker/lens-test/Dockerfile
----------------------------------------------------------------------
diff --git a/lens-docker/lens-test/Dockerfile b/lens-docker/lens-test/Dockerfile
index d26facd..c4886c2 100644
--- a/lens-docker/lens-test/Dockerfile
+++ b/lens-docker/lens-test/Dockerfile
@@ -17,14 +17,16 @@
 
 FROM inmobi/docker-hive
 
-ENV LENS_VERSION apache-lens-2.1.0-beta-incubating-SNAPSHOT
+ENV LENS_VERSION 2.1.0-beta-incubating-SNAPSHOT
 ENV BASEDIR /opt/lens
-ENV LENS_HOME $BASEDIR/lens-dist/target/${LENS_VERSION}-bin/${LENS_VERSION}-bin/server
-ENV LENS_CLIENT $BASEDIR/lens-dist/target/${LENS_VERSION}-bin/${LENS_VERSION}-bin/client
+ENV LENS_HOME $BASEDIR/lens-dist/target/apache-lens-${LENS_VERSION}-bin/apache-lens-${LENS_VERSION}-bin/server
+ENV LENS_CLIENT $BASEDIR/lens-dist/target/apache-lens-${LENS_VERSION}-bin/apache-lens-${LENS_VERSION}-bin/client
 
 ENV LENS_SERVER_CONF $LENS_HOME/conf-pseudo-distr/
 ENV LENS_CLIENT_CONF $LENS_CLIENT/conf-pseudo-distr/
 
+ENV LENS_ML $BASEDIR/lens-ml-dist/target/apache-lens-${LENS_VERSION}-ml/
+
 
 # set permissions for lens bootstrap file
 ADD lens-bootstrap.sh /etc/lens-bootstrap.sh

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-docker/lens-test/lens-bootstrap.sh
----------------------------------------------------------------------
diff --git a/lens-docker/lens-test/lens-bootstrap.sh b/lens-docker/lens-test/lens-bootstrap.sh
index 1c61603..19685bb 100644
--- a/lens-docker/lens-test/lens-bootstrap.sh
+++ b/lens-docker/lens-test/lens-bootstrap.sh
@@ -21,6 +21,11 @@ echo "LENS_HOME " $LENS_HOME
 echo "LENS_SERVER_CONF " $LENS_SERVER_CONF
 echo "LENS_CLIENT " $LENS_CLIENT
 echo "LENS_CLIENT_CONF " $LENS_CLIENT_CONF
+echo "LENS_ML " $LENS_ML
+ 
+#set ml classpath into LENS_EXT_CLASSPATH
+LENS_EXT_CLASSPATH=$LENS_EXT_CLASSPATH:`$LENS_ML/bin/lens-ml-classpath.sh`
+export LENS_EXT_CLASSPATH
 
 #start hive bootstrap script
 /etc/hive-bootstrap.sh

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-ml-dist/LICENSE.txt
----------------------------------------------------------------------
diff --git a/lens-ml-dist/LICENSE.txt b/lens-ml-dist/LICENSE.txt
new file mode 100644
index 0000000..261eeb9
--- /dev/null
+++ b/lens-ml-dist/LICENSE.txt
@@ -0,0 +1,201 @@
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-ml-dist/pom.xml
----------------------------------------------------------------------
diff --git a/lens-ml-dist/pom.xml b/lens-ml-dist/pom.xml
new file mode 100644
index 0000000..9e4fd1d
--- /dev/null
+++ b/lens-ml-dist/pom.xml
@@ -0,0 +1,77 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+	<name>Lens ML Ext Distribution</name>
+   
+  <parent>
+    <artifactId>apache-lens</artifactId>
+    <groupId>org.apache.lens</groupId>
+    <version>2.1.0-beta-incubating-SNAPSHOT</version>
+  </parent>
+    
+  <artifactId>lens-ml-dist</artifactId>
+  <description> Packaging and distribution for lens ml </description>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.lens</groupId>
+      <artifactId>lens-ml-lib</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <configuration>
+          <finalName>apache-lens-${project.version}</finalName>
+          <ignoreDirFormatExtensions>true</ignoreDirFormatExtensions>
+          <descriptors>
+            <descriptor>src/main/assembly/ml-dist.xml</descriptor>
+          </descriptors>
+        </configuration>
+        <executions>
+          <execution>
+            <id>assemble</id>
+            <phase>package</phase>
+            <goals>
+              <goal>single</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <!--Plugin to build deb-->
+      
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-deploy-plugin</artifactId>
+        <version>2.7</version>
+        <configuration>
+          <skip>true</skip>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+</project>    

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-ml-dist/src/main/assembly/ml-dist.xml
----------------------------------------------------------------------
diff --git a/lens-ml-dist/src/main/assembly/ml-dist.xml b/lens-ml-dist/src/main/assembly/ml-dist.xml
new file mode 100644
index 0000000..30b6f23
--- /dev/null
+++ b/lens-ml-dist/src/main/assembly/ml-dist.xml
@@ -0,0 +1,65 @@
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+          xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+  <id>ml</id>
+
+  <formats>
+    <format>dir</format>
+  </formats>
+
+  <baseDirectory>apache-lens-${project.version}-ml</baseDirectory>
+  <includeBaseDirectory>false</includeBaseDirectory>
+
+  <dependencySets>
+    <dependencySet>
+      <unpack>false</unpack>
+      <scope>runtime</scope>
+      <outputDirectory>lib</outputDirectory>
+      <includes>
+        <include>org.apache.lens:lens-ml</include>
+      </includes>
+      <useTransitiveFiltering>true</useTransitiveFiltering>
+      <useTransitiveDependencies>true</useTransitiveDependencies>
+    </dependencySet>
+  </dependencySets>
+
+  <fileSets>
+    <fileSet>
+      <fileMode>755</fileMode>  
+      <directory>../lens-ml-lib/tools/scripts</directory>
+      <outputDirectory>/bin/</outputDirectory>
+      <includes>
+        <include>**</include>
+      </includes>
+    </fileSet>
+
+    <fileSet>
+      <directory>../lens-ml-lib/data</directory>
+      <outputDirectory>/data/</outputDirectory>
+      <includes>
+        <include>**</include>
+      </includes>
+    </fileSet>
+    
+  </fileSets>
+</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-ml-dist/src/site/apt/index.apt
----------------------------------------------------------------------
diff --git a/lens-ml-dist/src/site/apt/index.apt b/lens-ml-dist/src/site/apt/index.apt
new file mode 100644
index 0000000..b45f4f8
--- /dev/null
+++ b/lens-ml-dist/src/site/apt/index.apt
@@ -0,0 +1,20 @@
+~~
+~~ Licensed to the Apache Software Foundation (ASF) under one
+~~ or more contributor license agreements.  See the NOTICE file
+~~ distributed with this work for additional information
+~~ regarding copyright ownership.  The ASF licenses this file
+~~ to you under the Apache License, Version 2.0 (the
+~~ "License"); you may not use this file except in compliance
+~~ with the License.  You may obtain a copy of the License at
+~~
+~~   http://www.apache.org/licenses/LICENSE-2.0
+~~
+~~ Unless required by applicable law or agreed to in writing,
+~~ software distributed under the License is distributed on an
+~~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+~~ KIND, either express or implied.  See the License for the
+~~ specific language governing permissions and limitations
+~~ under the License.
+~~
+
+Lens Distribution Documentation

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-ml-lib/data/naive_bayes/ml.properties
----------------------------------------------------------------------
diff --git a/lens-ml-lib/data/naive_bayes/ml.properties b/lens-ml-lib/data/naive_bayes/ml.properties
new file mode 100644
index 0000000..61b156e
--- /dev/null
+++ b/lens-ml-lib/data/naive_bayes/ml.properties
@@ -0,0 +1,7 @@
+algo=spark_naive_bayes
+database=default
+traintable=naive_bayes_train
+testtable=naive_bayes_test
+outputtable=naive_bayes_output
+features=feature1,feature2,feature3
+labelcolumn=label
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-ml-lib/data/naive_bayes/naive_bayes_train.data
----------------------------------------------------------------------
diff --git a/lens-ml-lib/data/naive_bayes/naive_bayes_train.data b/lens-ml-lib/data/naive_bayes/naive_bayes_train.data
deleted file mode 100644
index dfb9ccd..0000000
--- a/lens-ml-lib/data/naive_bayes/naive_bayes_train.data
+++ /dev/null
@@ -1,6 +0,0 @@
-0 1 0 0
-0 2 0 0
-1 0 1 0
-1 0 2 0
-2 0 0 1
-2 0 0 2

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-ml-lib/data/naive_bayes/test.data
----------------------------------------------------------------------
diff --git a/lens-ml-lib/data/naive_bayes/test.data b/lens-ml-lib/data/naive_bayes/test.data
new file mode 100644
index 0000000..0677fe0
--- /dev/null
+++ b/lens-ml-lib/data/naive_bayes/test.data
@@ -0,0 +1,6 @@
+1 0 0
+2 0 0
+0 1 0
+0 2 0
+0 0 1
+0 0 2

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-ml-lib/data/naive_bayes/train.data
----------------------------------------------------------------------
diff --git a/lens-ml-lib/data/naive_bayes/train.data b/lens-ml-lib/data/naive_bayes/train.data
new file mode 100644
index 0000000..dfb9ccd
--- /dev/null
+++ b/lens-ml-lib/data/naive_bayes/train.data
@@ -0,0 +1,6 @@
+0 1 0 0
+0 2 0 0
+1 0 1 0
+1 0 2 0
+2 0 0 1
+2 0 0 2

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLClient.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLClient.java b/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLClient.java
index 9f7fa26..d9ec314 100644
--- a/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLClient.java
+++ b/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLClient.java
@@ -54,24 +54,25 @@ public class LensMLClient implements LensML, Closeable {
   /** The client. */
   private LensMLJerseyClient client;
 
-  /**
-   * Instantiates a new lens ml client.
-   *
-   * @param clientConf the client conf
-   */
-  public LensMLClient(LensConnectionParams clientConf, String password) {
-    client = new LensMLJerseyClient(new LensConnection(clientConf), password);
-    LOG.info("Client created with new session");
+  public LensMLClient(String password) {
+    this(new LensClientConfig(), password);
   }
 
-  /**
-   * Instantiates a new lens ml client.
-   *
-   * @param clientConf the client conf
-   */
-  public LensMLClient(LensConnectionParams clientConf, LensSessionHandle sessionHandle) {
-    client = new LensMLJerseyClient(new LensConnection(clientConf, sessionHandle), sessionHandle);
-    LOG.info("Client created with existing session");
+  public LensMLClient(LensClientConfig conf, String password) {
+    this(conf, conf.getUser(), password);
+  }
+
+  public LensMLClient(String username, String password) {
+    this(new LensClientConfig(), username, password);
+  }
+
+  public LensMLClient(LensClientConfig conf, String username, String password) {
+    this(new LensClient(conf, username, password));
+  }
+
+  public LensMLClient(LensClient lensClient) {
+    client = new LensMLJerseyClient(lensClient.getConnection(), lensClient
+        .getConnection().getSessionHandle());
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-ml-lib/src/main/java/org/apache/lens/ml/MLRunner.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLRunner.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/MLRunner.java
new file mode 100644
index 0000000..bd50cba
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/MLRunner.java
@@ -0,0 +1,173 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.lens.client.LensClient;
+import org.apache.lens.client.LensClientConfig;
+import org.apache.lens.client.LensMLClient;
+import org.apache.lens.ml.task.MLTask;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.mapred.TextInputFormat;
+
+public class MLRunner {
+
+  private static final Log LOG = LogFactory.getLog(MLRunner.class);
+
+  private LensMLClient mlClient;
+  private String algoName;
+  private String database;
+  private String trainTable;
+  private String trainFile;
+  private String testTable;
+  private String testFile;
+  private String outputTable;
+  private String[] features;
+  private String labelColumn;
+  private HiveConf conf;
+
+  public void init(LensMLClient mlClient, String confDir) throws Exception {
+    File dir = new File(confDir);
+    File propFile = new File(dir, "ml.properties");
+    Properties props = new Properties();
+    props.load(new FileInputStream(propFile));
+    String feat = props.getProperty("features");
+    String trainFile = confDir + File.separator + "train.data";
+    String testFile = confDir + File.separator + "test.data";
+    init(mlClient, props.getProperty("algo"), props.getProperty("database"),
+        props.getProperty("traintable"), trainFile,
+        props.getProperty("testtable"), testFile,
+        props.getProperty("outputtable"), feat.split(","),
+        props.getProperty("labelcolumn"));
+  }
+
+  public void init(LensMLClient mlClient, String algoName,
+      String database, String trainTable, String trainFile,
+      String testTable, String testFile, String outputTable, String[] features,
+      String labelColumn) {
+    this.mlClient = mlClient;
+    this.algoName = algoName;
+    this.database = database;
+    this.trainTable = trainTable;
+    this.trainFile = trainFile;
+    this.testTable = testTable;
+    this.testFile = testFile;
+    this.outputTable = outputTable;
+    this.features = features;
+    this.labelColumn = labelColumn;
+    //hive metastore settings are loaded via lens-site.xml, so loading LensClientConfig
+    //is required
+    this.conf = new HiveConf(new LensClientConfig(), MLRunner.class);
+  }
+
+  public MLTask train() throws Exception {
+    LOG.info("Starting train & eval");
+
+    createTable(trainTable, trainFile);
+    createTable(testTable, testFile);
+    MLTask.Builder taskBuilder = new MLTask.Builder();
+    taskBuilder.algorithm(algoName).hiveConf(conf).labelColumn(labelColumn).outputTable(outputTable)
+        .client(mlClient).trainingTable(trainTable).testTable(testTable);
+
+    // Add features
+    for (String feature : features) {
+      taskBuilder.addFeatureColumn(feature);
+    }
+    MLTask task = taskBuilder.build();
+    LOG.info("Created task " + task.toString());
+    task.run();
+    return task;
+  }
+
+  public void createTable(String tableName, String dataFile) throws HiveException {
+
+    File filedataFile = new File(dataFile);
+    Path dataFilePath = new Path(filedataFile.toURI());
+    Path partDir = dataFilePath.getParent();
+
+    // Create table
+    List<FieldSchema> columns = new ArrayList<FieldSchema>();
+
+    // Label is optional. Not used for unsupervised models.
+    // If present, label will be the first column, followed by features
+    if (labelColumn != null) {
+      columns.add(new FieldSchema(labelColumn, "double", "Labelled Column"));
+    }
+
+    for (String feature : features) {
+      columns.add(new FieldSchema(feature, "double", "Feature " + feature));
+    }
+
+    Table tbl = Hive.get(conf).newTable(database + "." + tableName);
+    tbl.setTableType(TableType.MANAGED_TABLE);
+    tbl.getTTable().getSd().setCols(columns);
+    // tbl.getTTable().getParameters().putAll(new HashMap<String, String>());
+    tbl.setInputFormatClass(TextInputFormat.class);
+    tbl.setSerdeParam(serdeConstants.LINE_DELIM, "\n");
+    tbl.setSerdeParam(serdeConstants.FIELD_DELIM, " ");
+
+    List<FieldSchema> partCols = new ArrayList<FieldSchema>(1);
+    partCols.add(new FieldSchema("dummy_partition_col", "string", ""));
+    tbl.setPartCols(partCols);
+
+    Hive.get(conf).dropTable(database, tableName, false, true);
+    Hive.get(conf).createTable(tbl, true);
+    LOG.info("Created table " + tableName);
+
+    // Add partition for the data file
+    AddPartitionDesc partitionDesc = new AddPartitionDesc(database, tableName,
+        false);
+    Map<String, String> partSpec = new HashMap<String, String>();
+    partSpec.put("dummy_partition_col", "dummy_val");
+    partitionDesc.addPartition(partSpec, partDir.toUri().toString());
+    Hive.get(conf).createPartitions(partitionDesc);
+    LOG.info(tableName + ": Added partition " + partDir.toUri().toString());
+  }
+
+  public static void main(String[] args) throws Exception {
+    if (args.length < 1) {
+      System.out.println("Usage: org.apache.lens.ml.MLRunner <ml-conf-dir>");
+      System.exit(-1);
+    }
+    String confDir = args[0];
+    LensMLClient client = new LensMLClient(new LensClient());
+    MLRunner runner = new MLRunner();
+    runner.init(client, confDir);
+    runner.train();
+    System.out.println("Created the Model successfully. Output Table: " + runner.outputTable);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-ml-lib/src/main/java/org/apache/lens/ml/task/MLTask.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/task/MLTask.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/task/MLTask.java
index aa59100..e4bb329 100644
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/task/MLTask.java
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/task/MLTask.java
@@ -20,8 +20,6 @@ package org.apache.lens.ml.task;
 
 import java.util.*;
 
-import org.apache.lens.api.LensSessionHandle;
-import org.apache.lens.client.LensConnectionParams;
 import org.apache.lens.client.LensMLClient;
 import org.apache.lens.ml.LensML;
 import org.apache.lens.ml.MLTestReport;
@@ -61,6 +59,13 @@ public class MLTask implements Runnable {
   private String trainingTable;
 
   /**
+   * Name of the table containing test data. Optional, if not provided trainingTable itself is
+   * used for testing
+   */
+  @Getter
+  private String testTable;
+
+  /**
    * Training table partition spec
    */
   @Getter
@@ -84,26 +89,20 @@ public class MLTask implements Runnable {
   @Getter
   private HiveConf configuration;
 
-  /**
-   * Lens Server base URL, when running example as a client.
-   */
-  @Getter
-  private String serverLocation;
-
   private LensML ml;
   private String taskID;
 
   /**
-   * Output table name
+   * ml client
    */
   @Getter
-  private String outputTable;
+  private LensMLClient mlClient;
 
   /**
-   * Session handle
+   * Output table name
    */
   @Getter
-  private LensSessionHandle sessionHandle;
+  private String outputTable;
 
   /**
    * Extra params passed to the training algorithm
@@ -111,18 +110,6 @@ public class MLTask implements Runnable {
   @Getter
   private Map<String, String> extraParams;
 
-  /**
-   * User name to connect to Lens server
-   */
-  @Getter
-  private String userName;
-
-  /**
-   * Password to connect to Lens server
-   */
-  @Getter
-  private String password;
-
   @Getter
   private String modelID;
 
@@ -153,6 +140,11 @@ public class MLTask implements Runnable {
       return this;
     }
 
+    public Builder testTable(String testTable) {
+      task.testTable = testTable;
+      return this;
+    }
+
     public Builder algorithm(String algorithm) {
       task.algorithm = algorithm;
       return this;
@@ -163,6 +155,11 @@ public class MLTask implements Runnable {
       return this;
     }
 
+    public Builder client(LensMLClient client) {
+      task.mlClient = client;
+      return this;
+    }
+
     public Builder addFeatureColumn(String featureColumn) {
       if (task.featureColumns == null) {
         task.featureColumns = new ArrayList<String>();
@@ -176,15 +173,7 @@ public class MLTask implements Runnable {
       return this;
     }
 
-    public Builder serverLocation(String serverLocation) {
-      task.serverLocation = serverLocation;
-      return this;
-    }
 
-    public Builder sessionHandle(LensSessionHandle sessionHandle) {
-      task.sessionHandle = sessionHandle;
-      return this;
-    }
 
     public Builder extraParam(String param, String value) {
       task.extraParams.put(param, value);
@@ -207,15 +196,6 @@ public class MLTask implements Runnable {
       return builtTask;
     }
 
-    public Builder userName(String userName) {
-      task.userName = userName;
-      return this;
-    }
-
-    public Builder password(String password) {
-      task.password = password;
-      return this;
-    }
   }
 
   @Override
@@ -239,14 +219,10 @@ public class MLTask implements Runnable {
    * @throws Exception
    */
   private void runTask() throws Exception {
-    if (serverLocation != null) {
+    if (mlClient != null) {
       // Connect to a remote Lens server
-      LensConnectionParams connectionParams = new LensConnectionParams();
-      connectionParams.setBaseUrl(serverLocation);
-      connectionParams.getConf().setUser(userName);
-      LensMLClient mlClient = new LensMLClient(connectionParams, sessionHandle);
       ml = mlClient;
-      LOG.info("Working in client mode. Lens session handle " + sessionHandle.getPublicId());
+      LOG.info("Working in client mode. Lens session handle " + mlClient.getSessionHandle().getPublicId());
     } else {
       // In server mode session handle has to be passed by the user as a request parameter
       ml = MLUtils.getMLService();
@@ -260,7 +236,8 @@ public class MLTask implements Runnable {
     printModelMetadata(taskID, modelID);
 
     LOG.info("Starting test " + taskID);
-    MLTestReport testReport = ml.testModel(sessionHandle, trainingTable, algorithm, modelID, outputTable);
+    testTable = (testTable != null) ? testTable : trainingTable;
+    MLTestReport testReport = ml.testModel(mlClient.getSessionHandle(), testTable, algorithm, modelID, outputTable);
     reportID = testReport.getReportID();
     printTestReport(taskID, testReport);
     saveTask();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java
index 1d40b76..f712481 100644
--- a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java
+++ b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java
@@ -20,13 +20,18 @@ package org.apache.lens.ml;
 
 import java.io.File;
 import java.net.URI;
-import java.util.*;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 import javax.ws.rs.client.WebTarget;
 import javax.ws.rs.core.Application;
+import javax.ws.rs.core.UriBuilder;
 
-import org.apache.lens.api.LensSessionHandle;
-import org.apache.lens.client.LensConnectionParams;
+import org.apache.lens.client.LensClient;
+import org.apache.lens.client.LensClientConfig;
 import org.apache.lens.client.LensMLClient;
 import org.apache.lens.ml.spark.algos.DecisionTreeAlgo;
 import org.apache.lens.ml.spark.algos.LogisticRegressionAlgo;
@@ -34,29 +39,21 @@ import org.apache.lens.ml.spark.algos.NaiveBayesAlgo;
 import org.apache.lens.ml.spark.algos.SVMAlgo;
 import org.apache.lens.ml.task.MLTask;
 import org.apache.lens.server.LensJerseyTest;
-import org.apache.lens.server.LensServerConf;
 import org.apache.lens.server.api.LensConfConstants;
-import org.apache.lens.server.api.ServiceProvider;
-import org.apache.lens.server.api.ServiceProviderFactory;
-import org.apache.lens.server.api.session.SessionService;
 import org.apache.lens.server.ml.MLApp;
-import org.apache.lens.server.ml.MLService;
-import org.apache.lens.server.ml.MLServiceImpl;
 import org.apache.lens.server.ml.MLServiceResource;
 import org.apache.lens.server.query.QueryServiceResource;
-import org.apache.lens.server.session.HiveSessionService;
 import org.apache.lens.server.session.SessionResource;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
 
-import org.apache.hive.service.Service;
-
 import org.glassfish.jersey.client.ClientConfig;
 import org.glassfish.jersey.media.multipart.MultiPartFeature;
 import org.testng.Assert;
@@ -65,33 +62,19 @@ import org.testng.annotations.BeforeMethod;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
+
+
 @Test
 public class TestMLResource extends LensJerseyTest {
   private static final Log LOG = LogFactory.getLog(TestMLResource.class);
-  private static final String TEST_CONN_URL = "http://localhost:8089/lens-server";
-  private static final LensConnectionParams LENS_CONNECTION_PARAMS = new LensConnectionParams();
-
-  static {
-    LENS_CONNECTION_PARAMS.setBaseUrl(TEST_CONN_URL);
-    LENS_CONNECTION_PARAMS.getConf().setUser("foo@localhost");
-  }
+  private static final String TEST_DB = "default";
 
   private WebTarget mlTarget;
   private LensMLClient mlClient;
-  private ServiceProvider serviceProvider;
-  private LensSessionHandle sessionHandle;
-
-  public void setServiceProvider() throws Exception {
-    HiveConf conf = LensServerConf.get();
-    Class<? extends ServiceProviderFactory> spfClass = conf.getClass(LensConfConstants.SERVICE_PROVIDER_FACTORY, null,
-      ServiceProviderFactory.class);
-    ServiceProviderFactory spf = spfClass.newInstance();
-    this.serviceProvider = spf.getServiceProvider();
-  }
 
   @Override
   protected int getTestPort() {
-    return 8089;
+    return 10002;
   }
 
   @Override
@@ -104,18 +87,37 @@ public class TestMLResource extends LensJerseyTest {
     config.register(MultiPartFeature.class);
   }
 
+  @Override
+  protected URI getBaseUri() {
+    return UriBuilder.fromUri("http://localhost/").port(getTestPort()).path("/lensapi").build();
+  }
+
   @BeforeTest
   public void setUp() throws Exception {
     super.setUp();
-    setServiceProvider();
-    HiveSessionService sessionService = serviceProvider.getService(SessionService.NAME);
-    this.sessionHandle = sessionService.openSession("foo@localhost", "bar", new HashMap<String, String>());
-    mlClient = new LensMLClient(LENS_CONNECTION_PARAMS, sessionHandle);
+    Hive hive = Hive.get(new HiveConf());
+    Database db = new Database();
+    db.setName(TEST_DB);
+    hive.createDatabase(db, true);
+    LensClientConfig lensClientConfig = new LensClientConfig();
+    lensClientConfig.setLensDatabase(TEST_DB);
+    lensClientConfig.set(LensConfConstants.SERVER_BASE_URL,
+        "http://localhost:" + getTestPort() + "/lensapi");
+    LensClient client = new LensClient(lensClientConfig);
+    mlClient = new LensMLClient(client);
   }
 
   @AfterTest
   public void tearDown() throws Exception {
     super.tearDown();
+    Hive hive = Hive.get(new HiveConf());
+
+    try {
+      hive.dropDatabase(TEST_DB);
+    } catch (Exception exc) {
+      // Ignore drop db exception
+      exc.printStackTrace();
+    }
     mlClient.close();
   }
 
@@ -125,13 +127,6 @@ public class TestMLResource extends LensJerseyTest {
   }
 
   @Test
-  public void testStartMLServiceStarted() throws Exception {
-    LOG.info("## testStartMLServiceStarted");
-    MLServiceImpl svcImpl = serviceProvider.getService(MLService.NAME);
-    Assert.assertEquals(svcImpl.getServiceState(), Service.STATE.STARTED);
-  }
-
-  @Test
   public void testMLResourceUp() throws Exception {
     String mlUpMsg = mlTarget.request().get(String.class);
     Assert.assertEquals(mlUpMsg, MLServiceResource.ML_UP_MESSAGE);
@@ -142,22 +137,26 @@ public class TestMLResource extends LensJerseyTest {
     List<String> algoNames = mlClient.getAlgorithms();
     Assert.assertNotNull(algoNames);
 
-    Assert.assertTrue(algoNames.contains(MLUtils.getAlgoName(NaiveBayesAlgo.class)),
-      MLUtils.getAlgoName(NaiveBayesAlgo.class));
+    Assert.assertTrue(
+        algoNames.contains(MLUtils.getAlgoName(NaiveBayesAlgo.class)),
+        MLUtils.getAlgoName(NaiveBayesAlgo.class));
 
     Assert.assertTrue(algoNames.contains(MLUtils.getAlgoName(SVMAlgo.class)),
-      MLUtils.getAlgoName(SVMAlgo.class));
+        MLUtils.getAlgoName(SVMAlgo.class));
 
-    Assert.assertTrue(algoNames.contains(MLUtils.getAlgoName(LogisticRegressionAlgo.class)),
-      MLUtils.getAlgoName(LogisticRegressionAlgo.class));
+    Assert.assertTrue(
+        algoNames.contains(MLUtils.getAlgoName(LogisticRegressionAlgo.class)),
+        MLUtils.getAlgoName(LogisticRegressionAlgo.class));
 
-    Assert.assertTrue(algoNames.contains(MLUtils.getAlgoName(DecisionTreeAlgo.class)),
-      MLUtils.getAlgoName(DecisionTreeAlgo.class));
+    Assert.assertTrue(
+        algoNames.contains(MLUtils.getAlgoName(DecisionTreeAlgo.class)),
+        MLUtils.getAlgoName(DecisionTreeAlgo.class));
   }
 
   @Test
   public void testGetAlgoParams() throws Exception {
-    Map<String, String> params = mlClient.getAlgoParamDescription(MLUtils.getAlgoName(DecisionTreeAlgo.class));
+    Map<String, String> params = mlClient.getAlgoParamDescription(MLUtils
+        .getAlgoName(DecisionTreeAlgo.class));
     Assert.assertNotNull(params);
     Assert.assertFalse(params.isEmpty());
 
@@ -171,7 +170,6 @@ public class TestMLResource extends LensJerseyTest {
     LOG.info("Starting train & eval");
     final String algoName = MLUtils.getAlgoName(NaiveBayesAlgo.class);
     HiveConf conf = new HiveConf();
-    String database = "default";
     String tableName = "naivebayes_training_table";
     String sampleDataFilePath = "data/naive_bayes/naive_bayes_train.data";
 
@@ -179,26 +177,27 @@ public class TestMLResource extends LensJerseyTest {
     URI sampleDataFileURI = sampleDataFile.toURI();
 
     String labelColumn = "label";
-    String[] features = {"feature_1", "feature_2", "feature_3"};
+    String[] features = { "feature_1", "feature_2", "feature_3" };
     String outputTable = "naivebayes_eval_table";
 
-    LOG.info("Creating training table from file " + sampleDataFileURI.toString());
+    LOG.info("Creating training table from file "
+        + sampleDataFileURI.toString());
 
     Map<String, String> tableParams = new HashMap<String, String>();
     try {
-      ExampleUtils.createTable(conf, database, tableName, sampleDataFileURI.toString(), labelColumn, tableParams,
-        features);
+      ExampleUtils.createTable(conf, TEST_DB, tableName,
+          sampleDataFileURI.toString(), labelColumn, tableParams, features);
     } catch (HiveException exc) {
       exc.printStackTrace();
     }
     MLTask.Builder taskBuilder = new MLTask.Builder();
 
-    taskBuilder.algorithm(algoName).hiveConf(conf).labelColumn(labelColumn).outputTable(outputTable)
-      .serverLocation(getBaseUri().toString()).sessionHandle(mlClient.getSessionHandle()).trainingTable(tableName)
-      .userName("foo@localhost").password("bar");
+    taskBuilder.algorithm(algoName).hiveConf(conf).labelColumn(labelColumn)
+        .outputTable(outputTable).client(mlClient).trainingTable(tableName);
 
     // Add features
-    taskBuilder.addFeatureColumn("feature_1").addFeatureColumn("feature_2").addFeatureColumn("feature_3");
+    taskBuilder.addFeatureColumn("feature_1").addFeatureColumn("feature_2")
+        .addFeatureColumn("feature_3");
 
     MLTask task = taskBuilder.build();
 
@@ -212,10 +211,11 @@ public class TestMLResource extends LensJerseyTest {
     Assert.assertNotNull(firstModelID);
 
     taskBuilder = new MLTask.Builder();
-    taskBuilder.algorithm(algoName).hiveConf(conf).labelColumn(labelColumn).outputTable(outputTable)
-      .serverLocation(getBaseUri().toString()).sessionHandle(mlClient.getSessionHandle()).trainingTable(tableName)
-      .userName("foo@localhost").password("bar");
-    taskBuilder.addFeatureColumn("feature_1").addFeatureColumn("feature_2").addFeatureColumn("feature_3");
+    taskBuilder.algorithm(algoName).hiveConf(conf).labelColumn(labelColumn)
+        .outputTable(outputTable).client(mlClient).trainingTable(tableName);
+
+    taskBuilder.addFeatureColumn("feature_1").addFeatureColumn("feature_2")
+        .addFeatureColumn("feature_3");
 
     MLTask anotherTask = taskBuilder.build();
 
@@ -241,8 +241,10 @@ public class TestMLResource extends LensJerseyTest {
     }
 
     // Verify partitions created for each run
-    Assert.assertTrue(partReports.contains(firstReportID), firstReportID + "  first partition not there");
-    Assert.assertTrue(partReports.contains(secondReportID), secondReportID + " second partition not there");
+    Assert.assertTrue(partReports.contains(firstReportID), firstReportID
+        + "  first partition not there");
+    Assert.assertTrue(partReports.contains(secondReportID), secondReportID
+        + " second partition not there");
 
     LOG.info("Completed task run");
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
new file mode 100644
index 0000000..d928ff1
--- /dev/null
+++ b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
@@ -0,0 +1,138 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml;
+
+import java.net.URI;
+
+import javax.ws.rs.core.Application;
+import javax.ws.rs.core.UriBuilder;
+
+import org.apache.lens.client.LensClient;
+import org.apache.lens.client.LensClientConfig;
+import org.apache.lens.client.LensMLClient;
+import org.apache.lens.ml.task.MLTask;
+import org.apache.lens.server.LensJerseyTest;
+import org.apache.lens.server.api.LensConfConstants;
+import org.apache.lens.server.metastore.MetastoreResource;
+import org.apache.lens.server.ml.MLApp;
+import org.apache.lens.server.query.QueryServiceResource;
+import org.apache.lens.server.session.SessionResource;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+
+import org.glassfish.jersey.client.ClientConfig;
+import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.testng.Assert;
+import org.testng.annotations.AfterTest;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+
+@Test
+public class TestMLRunner extends LensJerseyTest {
+  private static final Log LOG = LogFactory.getLog(TestMLRunner.class);
+  private static final String TEST_DB = TestMLRunner.class.getSimpleName();
+
+  private LensMLClient mlClient;
+
+  @Override
+  protected int getTestPort() {
+    return 10000;
+  }
+
+  @Override
+  protected Application configure() {
+    return new MLApp(SessionResource.class, QueryServiceResource.class, MetastoreResource.class);
+  }
+
+  @Override
+  protected URI getBaseUri() {
+    return UriBuilder.fromUri("http://localhost/").port(getTestPort()).path("/lensapi").build();
+  }
+
+  @Override
+  protected void configureClient(ClientConfig config) {
+    config.register(MultiPartFeature.class);
+  }
+
+  @BeforeTest
+  public void setUp() throws Exception {
+    super.setUp();
+    Hive hive = Hive.get(new HiveConf());
+    Database db = new Database();
+    db.setName(TEST_DB);
+    hive.createDatabase(db, true);
+    LensClientConfig lensClientConfig = new LensClientConfig();
+    lensClientConfig.setLensDatabase(TEST_DB);
+    lensClientConfig.set(LensConfConstants.SERVER_BASE_URL,
+        "http://localhost:" + getTestPort() + "/lensapi");
+    LensClient client = new LensClient(lensClientConfig);
+    mlClient = new LensMLClient(client);
+  }
+
+  @AfterTest
+  public void tearDown() throws Exception {
+    super.tearDown();
+    Hive hive = Hive.get(new HiveConf());
+    hive.dropDatabase(TEST_DB);
+    mlClient.close();
+  }
+
+  @Test
+  public void trainAndEval() throws Exception {
+    LOG.info("Starting train & eval");
+    String algoName = "spark_naive_bayes";
+    String database = "default";
+    String trainTable = "naivebayes_training_table";
+    String trainFile = "data/naive_bayes/train.data";
+    String testTable = "naivebayes_test_table";
+    String testFile = "data/naive_bayes/test.data";
+    String outputTable = "naivebayes_eval_table";
+    String[] features = { "feature_1", "feature_2", "feature_3" };
+    String labelColumn = "label";
+
+    MLRunner runner = new MLRunner();
+    runner.init(mlClient, algoName, database, trainTable, trainFile,
+        testTable, testFile, outputTable, features, labelColumn);
+    MLTask task = runner.train();
+    Assert.assertEquals(task.getTaskState(), MLTask.State.SUCCESSFUL);
+    String modelID = task.getModelID();
+    String reportID = task.getReportID();
+    Assert.assertNotNull(modelID);
+    Assert.assertNotNull(reportID);
+  }
+
+  @Test
+  public void trainAndEvalFromDir() throws Exception {
+    LOG.info("Starting train & eval from Dir");
+    MLRunner runner = new MLRunner();
+    runner.init(mlClient, "data/naive_bayes");
+    MLTask task = runner.train();
+    Assert.assertEquals(task.getTaskState(), MLTask.State.SUCCESSFUL);
+    String modelID = task.getModelID();
+    String reportID = task.getReportID();
+    Assert.assertNotNull(modelID);
+    Assert.assertNotNull(reportID);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/lens-ml-lib/tools/scripts/lens-ml-classpath.sh
----------------------------------------------------------------------
diff --git a/lens-ml-lib/tools/scripts/lens-ml-classpath.sh b/lens-ml-lib/tools/scripts/lens-ml-classpath.sh
new file mode 100644
index 0000000..7254b78
--- /dev/null
+++ b/lens-ml-lib/tools/scripts/lens-ml-classpath.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License. See accompanying LICENSE file.
+#
+
+# resolve links - $0 may be a softlink
+PRG="${0}"
+
+while [ -h "${PRG}" ]; do
+  ls=`ls -ld "${PRG}"`
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '/.*' > /dev/null; then
+    PRG="$link"
+  else
+    PRG=`dirname "${PRG}"`/"$link"
+  fi
+done
+
+BASEDIR=`dirname ${PRG}`
+BASEDIR=`cd ${BASEDIR}/..;pwd`
+
+LENS_ML_CLASSPATH=`ls ${BASEDIR}/lib/* 2>/dev/null | tr "\n" ':' 2>/dev/null`
+echo $LENS_ML_CLASSPATH

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 16f5aad..a5dd80f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -496,6 +496,7 @@
             <!-- Used on ui -->
             <exclude>**/codemirror.min.*</exclude>
             <exclude>**/*.js</exclude>
+            <exclude>**/*.properties</exclude>
           </excludes>
         </configuration>
         <executions>
@@ -1134,6 +1135,7 @@
     <module>lens-examples</module>
     <module>lens-dist</module>
     <module>lens-ml-lib</module>
+    <module>lens-ml-dist</module>
     <module>lens-regression</module>
   </modules>
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/tools/conf-pseudo-distr/client/lens-client-site.xml
----------------------------------------------------------------------
diff --git a/tools/conf-pseudo-distr/client/lens-client-site.xml b/tools/conf-pseudo-distr/client/lens-client-site.xml
index 706e356..120bf67 100644
--- a/tools/conf-pseudo-distr/client/lens-client-site.xml
+++ b/tools/conf-pseudo-distr/client/lens-client-site.xml
@@ -23,5 +23,10 @@
 
 <configuration>
 
+  <property>
+    <name>hive.metastore.uris</name>
+    <value>thrift://localhost:9083</value>
+    <description>Thrift URI for the remote metastore. Used by metastore client to connect to remote metastore.</description>
+  </property>
 
 </configuration>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/tools/conf-pseudo-distr/server/lens-site.xml
----------------------------------------------------------------------
diff --git a/tools/conf-pseudo-distr/server/lens-site.xml b/tools/conf-pseudo-distr/server/lens-site.xml
index 6dc7eff..f11c6d6 100644
--- a/tools/conf-pseudo-distr/server/lens-site.xml
+++ b/tools/conf-pseudo-distr/server/lens-site.xml
@@ -43,4 +43,39 @@
   <name>lens.query.enable.persistent.resultset.indriver</name>
   <value>false</value>
 </property>
+
+<property>
+  <name>lens.server.drivers</name>
+  <value>org.apache.lens.driver.hive.HiveDriver,org.apache.lens.driver.jdbc.JDBCDriver</value>
+  <name>lens.server.servicenames</name>
+  <value>session,query,metastore,scheduler,quota,ml</value>
+</property>
+
+<property>
+  <name>lens.server.ws.resourcenames</name>
+  <value>session,metastore,query,quota,scheduler,ml</value>
+  <description>These JAXRS resources would be started in the specified order when lensserver starts up</description>
+</property>
+
+<property>
+  <name>lens.server.ml.ws.resource.impl</name>
+  <value>org.apache.lens.server.ml.MLServiceResource</value>
+  <description>Implementation class for ML Service Resource</description>
+</property>
+
+<property>
+  <name>lens.server.ml.service.impl</name>
+  <value>org.apache.lens.server.ml.MLServiceImpl</value>
+  <description>Implementation class for ML service</description>
+</property>
+
+<property>
+  <name>lens.ml.drivers</name>
+  <value>org.apache.lens.ml.spark.SparkMLDriver</value>
+</property>
+
+<property>
+  <name>lens.ml.sparkdriver.spark.master</name>
+  <value>local</value>
+</property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/tools/scripts/lens-config.sh
----------------------------------------------------------------------
diff --git a/tools/scripts/lens-config.sh b/tools/scripts/lens-config.sh
index 8bd9013..0e810cb 100644
--- a/tools/scripts/lens-config.sh
+++ b/tools/scripts/lens-config.sh
@@ -125,5 +125,9 @@ case $type in
     exit 1
   ;;
 esac
+
+# add LENS_EXT_CLASSPATH
+LENSCPPATH=$LENS_EXT_CLASSPATH:$LENSCPPATH
+
 export LENSCPPATH
 export LENS_OPTS

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8d185913/tools/scripts/lens-run-class.sh
----------------------------------------------------------------------
diff --git a/tools/scripts/lens-run-class.sh b/tools/scripts/lens-run-class.sh
new file mode 100644
index 0000000..ef64b03
--- /dev/null
+++ b/tools/scripts/lens-run-class.sh
@@ -0,0 +1,170 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# resolve links - $0 may be a softlink
+PRG="${0}"
+
+while [ -h "${PRG}" ]; do
+  ls=`ls -ld "${PRG}"`
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '/.*' > /dev/null; then
+    PRG="$link"
+  else
+    PRG=`dirname "${PRG}"`/"$link"
+  fi
+done
+
+BASEDIR=`dirname ${PRG}`
+BASEDIR=`cd ${BASEDIR}/..;pwd`
+. ${BASEDIR}/bin/lens-config.sh 'client'
+
+JAVA_PROPERTIES="$LENS_OPTS $LENS_PROPERTIES -Dlens.log.dir=$LENS_LOG_DIR -Dlens.home=${LENS_HOME_DIR} -Dconfig.location=$LENS_CONF"
+
+
+################################
+# functions
+################################
+
+info() {
+  local msg=$1
+
+  echo "Info: $msg" >&2
+}
+
+warn() {
+  local msg=$1
+
+  echo "Warning: $msg" >&2
+}
+
+error() {
+  local msg=$1
+  local exit_code=$2
+
+  echo "Error: $msg" >&2
+
+  if [ -n "$exit_code" ] ; then
+    exit $exit_code
+  fi
+}
+
+setenv() {
+
+# HADOOP_HOME env variable overrides hadoop in the path
+  HADOOP_HOME=${HADOOP_HOME:-${HADOOP_PREFIX}}
+  if [ "$HADOOP_HOME" == "" ]; then
+    echo "Cannot find hadoop installation: \$HADOOP_HOME or \$HADOOP_PREFIX must be set or hadoop must be in the path";
+    exit 4;
+  else
+    echo "Adding hadoop libs in classpath from $HADOOP_HOME"
+
+    #ASSUMPTION: hadoop jars would be present in HADOOP_HOME if installed through deb or would be present
+    # in HADOOP_HOME/share/hadoop if installed through tarball. They can not coexist.
+
+    CORE_JARS=`ls $HADOOP_HOME/hadoop-core-*.jar 2>/dev/null | tr "\n" ':' 2>/dev/null`
+
+    LIB_JARS=`ls $HADOOP_HOME/lib/guava-*.jar 2>/dev/null | tr "\n" ':' 2>/dev/null`
+    LIB_JARS=$LIB_JARS:`ls ${HADOOP_HOME}/lib/commons-configuration-*.jar 2>/dev/null | tr "\n" ':' 2>/dev/null`
+    LIB_JARS=$LIB_JARS:`ls ${HADOOP_HOME}/lib/protobuf-java-*.jar 2>/dev/null | tr "\n" ':' 2>/dev/null`
+    LIB_JARS=$LIB_JARS:`ls ${HADOOP_HOME}/share/hadoop/common/lib/commons-configuration-*.jar 2>/dev/null | tr "\n" ':' 2>/dev/null`
+    LIB_JARS=$LIB_JARS:`ls ${HADOOP_HOME}/share/hadoop/hdfs/lib/protobuf-java-*.jar 2>/dev/null | tr "\n" ':' 2>/dev/null`
+
+
+    COMMON_JARS=`ls ${HADOOP_HOME}/hadoop-*.jar 2>/dev/null | tr "\n" ':' 2>/dev/null`
+    COMMON_JARS=$COMMON_JARS:`ls ${HADOOP_HOME}/share/hadoop/common/hadoop-common-*.jar 2>/dev/null | tr "\n" ':' 2>/dev/null`
+    COMMON_JARS=$COMMON_JARS:`ls ${HADOOP_HOME}/share/hadoop/common/lib/hadoop-*.jar 2>/dev/null | tr "\n" ':' 2>/dev/null`
+
+    HDFS_JARS=`ls ${HADOOP_HOME}/../hadoop-hdfs/hadoop-hdfs-*.jar 2>/dev/null | tr "\n" ':' 2>/dev/null`
+    HDFS_JARS=$HDFS_JARS:`ls ${HADOOP_HOME}/share/hadoop/hdfs/hadoop-hdfs-*.jar 2>/dev/null | tr "\n" ':' 2>/dev/null`
+
+    MAPRED_JARS=`ls ${HADOOP_HOME}/../hadoop-mapreduce/hadoop-*.jar 2>/dev/null | tr "\n" ':' 2>/dev/null`
+    MAPRED_JARS=$MAPRED_JARS:`ls ${HADOOP_HOME}/share/hadoop/mapreduce/hadoop-*.jar 2>/dev/null | tr "\n" ':' 2>/dev/null`
+
+    HADOOP_JARPATH=$CORE_JARS:$LIB_JARS:$COMMON_JARS:$HDFS_JARS:$MAPRED_JARS
+    LENSCPPATH=${LENSCPPATH}:$HADOOP_JARPATH
+  fi
+
+  if [ "$HIVE_HOME" != "" ]; then    
+    echo "HIVE_HOME is set, adding ${HIVE_HOME}/lib/* into lens classpath"
+    LENSCPPATH=${LENSCPPATH}:`ls ${HIVE_HOME}/lib/* 2>/dev/null | tr "\n" ':' 2>/dev/null`
+  else
+    echo "HIVE_HOME is not set. Set HIVE_HOME and try again"
+    exit 1
+  fi
+  
+  # Add HIVE_HOME to HADOOP_CLASS_PATH
+  HADOOP_CLASSPATH="$HADOOP_CLASSPATH:${HIVE_HOME}/lib/*"
+  export HADOOP_CLASSPATH
+ 
+}
+################################
+# main
+################################
+
+opt_conf=""
+opt_classname=""
+while [ -n "$*" ] ; do
+  arg=$1
+  shift
+  case "$arg" in
+    --conf|-c)
+      [ -n "$1" ] || error "Option --conf requires an argument" 1
+      opt_conf=$1
+      shift
+      ;;
+    --classpath|-C)
+      [ -n "$1" ] || error "Option --classpath requires an argument" 1
+      LENSCPPATH="LENSCPPATH:$1"
+      shift
+      ;;
+    -D*)
+      JAVA_PROPERTIES="${JAVA_PROPERTIES} $arg"
+      ;;
+    *)
+      if [ "$opt_classname" == "" ]; then 
+        opt_classname=$arg
+        echo "opt_classname is " $opt_classname
+      else
+        args="$args $arg"
+      fi
+      ;;
+  esac
+done
+echo "args are  " $args
+
+
+# prepend conf dir to classpath
+if [ -n "$opt_conf" ]; then
+  LENSCPPATH="$opt_conf:$LENSCPPATH"
+fi
+
+# finally, invoke the appropriate command
+if [ "$opt_classname" == "" ]; then 
+  echo "Usage : $0 <classname>"
+  exit 1
+fi
+echo "Executing class " $opt_classname
+
+setenv
+
+exec ${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${LENSCPPATH} \
+      "$opt_classname" $args
+
+exit 0


[29/50] [abbrv] incubator-lens git commit: Lens-465 : Refactor ml packages. (sharad)

Posted by am...@apache.org.
Lens-465 : Refactor ml packages. (sharad)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/0f5ea4c7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/0f5ea4c7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/0f5ea4c7

Branch: refs/heads/current-release-line
Commit: 0f5ea4c7827fc4740c1c2ba0fb9527903a2b910c
Parents: 278e0e8
Author: Sharad Agarwal <sh...@flipkarts-MacBook-Pro.local>
Authored: Sun Apr 5 10:48:32 2015 +0530
Committer: Sharad Agarwal <sh...@flipkarts-MacBook-Pro.local>
Committed: Sun Apr 5 10:48:32 2015 +0530

----------------------------------------------------------------------
 .../org/apache/lens/api/ml/ModelMetadata.java   | 118 ---
 .../java/org/apache/lens/api/ml/TestReport.java | 125 ----
 .../org/apache/lens/client/LensMLClient.java    |  12 +-
 .../apache/lens/client/LensMLJerseyClient.java  |   4 +-
 .../java/org/apache/lens/ml/AlgoArgParser.java  | 114 ---
 .../main/java/org/apache/lens/ml/AlgoParam.java |  53 --
 .../main/java/org/apache/lens/ml/Algorithm.java |  46 --
 .../java/org/apache/lens/ml/Algorithms.java     |  87 ---
 .../org/apache/lens/ml/ClassifierBaseModel.java |  46 --
 .../java/org/apache/lens/ml/ExampleUtils.java   | 101 ---
 .../org/apache/lens/ml/ForecastingModel.java    |  93 ---
 .../main/java/org/apache/lens/ml/HiveMLUDF.java | 136 ----
 .../org/apache/lens/ml/LabelledPrediction.java  |  32 -
 .../main/java/org/apache/lens/ml/LensML.java    | 159 ----
 .../java/org/apache/lens/ml/LensMLImpl.java     | 734 ------------------
 .../main/java/org/apache/lens/ml/MLAlgo.java    |  53 --
 .../main/java/org/apache/lens/ml/MLDriver.java  |  71 --
 .../main/java/org/apache/lens/ml/MLModel.java   |  79 --
 .../main/java/org/apache/lens/ml/MLRunner.java  | 173 -----
 .../java/org/apache/lens/ml/MLTestMetric.java   |  28 -
 .../java/org/apache/lens/ml/MLTestReport.java   |  95 ---
 .../main/java/org/apache/lens/ml/MLUtils.java   |  60 --
 .../java/org/apache/lens/ml/ModelLoader.java    | 239 ------
 .../org/apache/lens/ml/MultiPrediction.java     |  28 -
 .../java/org/apache/lens/ml/QueryRunner.java    |  56 --
 .../org/apache/lens/ml/TableTestingSpec.java    | 325 --------
 .../org/apache/lens/ml/algo/api/AlgoParam.java  |  53 ++
 .../org/apache/lens/ml/algo/api/Algorithm.java  |  46 ++
 .../org/apache/lens/ml/algo/api/MLAlgo.java     |  53 ++
 .../org/apache/lens/ml/algo/api/MLDriver.java   |  71 ++
 .../org/apache/lens/ml/algo/api/MLModel.java    |  79 ++
 .../apache/lens/ml/algo/lib/AlgoArgParser.java  | 117 +++
 .../org/apache/lens/ml/algo/lib/Algorithms.java |  89 +++
 .../lens/ml/algo/lib/ClassifierBaseModel.java   |  48 ++
 .../lens/ml/algo/lib/ForecastingModel.java      |  95 +++
 .../lens/ml/algo/lib/LabelledPrediction.java    |  32 +
 .../lens/ml/algo/lib/MultiPrediction.java       |  28 +
 .../lens/ml/algo/spark/BaseSparkAlgo.java       | 287 +++++++
 .../spark/BaseSparkClassificationModel.java     |  65 ++
 .../ml/algo/spark/ColumnFeatureFunction.java    | 102 +++
 .../lens/ml/algo/spark/DoubleValueMapper.java   |  39 +
 .../lens/ml/algo/spark/FeatureFunction.java     |  40 +
 .../lens/ml/algo/spark/FeatureValueMapper.java  |  36 +
 .../apache/lens/ml/algo/spark/HiveTableRDD.java |  63 ++
 .../lens/ml/algo/spark/SparkMLDriver.java       | 278 +++++++
 .../lens/ml/algo/spark/TableTrainingSpec.java   | 433 +++++++++++
 .../lens/ml/algo/spark/dt/DecisionTreeAlgo.java | 108 +++
 .../dt/DecisionTreeClassificationModel.java     |  37 +
 .../algo/spark/dt/SparkDecisionTreeModel.java   |  75 ++
 .../lens/ml/algo/spark/kmeans/KMeansAlgo.java   | 163 ++++
 .../spark/kmeans/KMeansClusteringModel.java     |  67 ++
 .../algo/spark/lr/LogisticRegressionAlgo.java   |  86 +++
 .../lr/LogitRegressionClassificationModel.java  |  39 +
 .../lens/ml/algo/spark/nb/NaiveBayesAlgo.java   |  73 ++
 .../spark/nb/NaiveBayesClassificationModel.java |  39 +
 .../apache/lens/ml/algo/spark/svm/SVMAlgo.java  |  90 +++
 .../algo/spark/svm/SVMClassificationModel.java  |  39 +
 .../java/org/apache/lens/ml/api/LensML.java     | 161 ++++
 .../org/apache/lens/ml/api/MLTestReport.java    |  95 +++
 .../org/apache/lens/ml/api/ModelMetadata.java   | 118 +++
 .../java/org/apache/lens/ml/api/TestReport.java | 125 ++++
 .../java/org/apache/lens/ml/dao/MLDBUtils.java  |   6 +-
 .../java/org/apache/lens/ml/impl/HiveMLUDF.java | 138 ++++
 .../org/apache/lens/ml/impl/LensMLImpl.java     | 744 +++++++++++++++++++
 .../java/org/apache/lens/ml/impl/MLRunner.java  | 172 +++++
 .../java/org/apache/lens/ml/impl/MLTask.java    | 285 +++++++
 .../java/org/apache/lens/ml/impl/MLUtils.java   |  62 ++
 .../org/apache/lens/ml/impl/ModelLoader.java    | 242 ++++++
 .../org/apache/lens/ml/impl/QueryRunner.java    |  56 ++
 .../apache/lens/ml/impl/TableTestingSpec.java   | 325 ++++++++
 .../java/org/apache/lens/ml/server/MLApp.java   |  60 ++
 .../org/apache/lens/ml/server/MLService.java    |  27 +
 .../apache/lens/ml/server/MLServiceImpl.java    | 329 ++++++++
 .../lens/ml/server/MLServiceResource.java       | 427 +++++++++++
 .../lens/ml/spark/ColumnFeatureFunction.java    | 102 ---
 .../apache/lens/ml/spark/DoubleValueMapper.java |  39 -
 .../apache/lens/ml/spark/FeatureFunction.java   |  40 -
 .../lens/ml/spark/FeatureValueMapper.java       |  36 -
 .../org/apache/lens/ml/spark/HiveTableRDD.java  |  63 --
 .../org/apache/lens/ml/spark/SparkMLDriver.java | 275 -------
 .../apache/lens/ml/spark/TableTrainingSpec.java | 433 -----------
 .../lens/ml/spark/algos/BaseSparkAlgo.java      | 290 --------
 .../lens/ml/spark/algos/DecisionTreeAlgo.java   | 109 ---
 .../apache/lens/ml/spark/algos/KMeansAlgo.java  | 163 ----
 .../ml/spark/algos/LogisticRegressionAlgo.java  |  86 ---
 .../lens/ml/spark/algos/NaiveBayesAlgo.java     |  73 --
 .../org/apache/lens/ml/spark/algos/SVMAlgo.java |  90 ---
 .../models/BaseSparkClassificationModel.java    |  65 --
 .../models/DecisionTreeClassificationModel.java |  35 -
 .../ml/spark/models/KMeansClusteringModel.java  |  67 --
 .../LogitRegressionClassificationModel.java     |  37 -
 .../models/NaiveBayesClassificationModel.java   |  37 -
 .../ml/spark/models/SVMClassificationModel.java |  37 -
 .../ml/spark/models/SparkDecisionTreeModel.java |  75 --
 .../java/org/apache/lens/ml/task/MLTask.java    | 286 -------
 .../java/org/apache/lens/rdd/LensRDDClient.java |   2 +-
 .../java/org/apache/lens/server/ml/MLApp.java   |  60 --
 .../org/apache/lens/server/ml/MLService.java    |  27 -
 .../apache/lens/server/ml/MLServiceImpl.java    | 324 --------
 .../lens/server/ml/MLServiceResource.java       | 415 -----------
 .../java/org/apache/lens/ml/ExampleUtils.java   | 101 +++
 .../java/org/apache/lens/ml/TestMLResource.java |  15 +-
 .../java/org/apache/lens/ml/TestMLRunner.java   |   7 +-
 lens-ml-lib/src/test/resources/lens-site.xml    |   6 +-
 tools/conf-pseudo-distr/server/lens-site.xml    |   6 +-
 105 files changed, 6367 insertions(+), 6343 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/api/ml/ModelMetadata.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/api/ml/ModelMetadata.java b/lens-ml-lib/src/main/java/org/apache/lens/api/ml/ModelMetadata.java
deleted file mode 100644
index 0f072bf..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/api/ml/ModelMetadata.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.api.ml;
-
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import lombok.AllArgsConstructor;
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-
-/**
- * The Class ModelMetadata.
- */
-@XmlRootElement
-/**
- * Instantiates a new model metadata.
- *
- * @param modelID
- *          the model id
- * @param table
- *          the table
- * @param algorithm
- *          the algorithm
- * @param params
- *          the params
- * @param createdAt
- *          the created at
- * @param modelPath
- *          the model path
- * @param labelColumn
- *          the label column
- * @param features
- *          the features
- */
-@AllArgsConstructor
-/**
- * Instantiates a new model metadata.
- */
-@NoArgsConstructor
-public class ModelMetadata {
-
-  /** The model id. */
-  @XmlElement
-  @Getter
-  private String modelID;
-
-  /** The table. */
-  @XmlElement
-  @Getter
-  private String table;
-
-  /** The algorithm. */
-  @XmlElement
-  @Getter
-  private String algorithm;
-
-  /** The params. */
-  @XmlElement
-  @Getter
-  private String params;
-
-  /** The created at. */
-  @XmlElement
-  @Getter
-  private String createdAt;
-
-  /** The model path. */
-  @XmlElement
-  @Getter
-  private String modelPath;
-
-  /** The label column. */
-  @XmlElement
-  @Getter
-  private String labelColumn;
-
-  /** The features. */
-  @XmlElement
-  @Getter
-  private String features;
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.lang.Object#toString()
-   */
-  @Override
-  public String toString() {
-    StringBuilder builder = new StringBuilder();
-
-    builder.append("Algorithm: ").append(algorithm).append('\n');
-    builder.append("Model ID: ").append(modelID).append('\n');
-    builder.append("Training table: ").append(table).append('\n');
-    builder.append("Features: ").append(features).append('\n');
-    builder.append("Labelled Column: ").append(labelColumn).append('\n');
-    builder.append("Training params: ").append(params).append('\n');
-    builder.append("Created on: ").append(createdAt).append('\n');
-    builder.append("Model saved at: ").append(modelPath).append('\n');
-    return builder.toString();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/api/ml/TestReport.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/api/ml/TestReport.java b/lens-ml-lib/src/main/java/org/apache/lens/api/ml/TestReport.java
deleted file mode 100644
index 2ae384b..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/api/ml/TestReport.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.api.ml;
-
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import lombok.AllArgsConstructor;
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-
-/**
- * The Class TestReport.
- */
-@XmlRootElement
-/**
- * Instantiates a new test report.
- *
- * @param testTable
- *          the test table
- * @param outputTable
- *          the output table
- * @param outputColumn
- *          the output column
- * @param labelColumn
- *          the label column
- * @param featureColumns
- *          the feature columns
- * @param algorithm
- *          the algorithm
- * @param modelID
- *          the model id
- * @param reportID
- *          the report id
- * @param queryID
- *          the query id
- */
-@AllArgsConstructor
-/**
- * Instantiates a new test report.
- */
-@NoArgsConstructor
-public class TestReport {
-
-  /** The test table. */
-  @XmlElement
-  @Getter
-  private String testTable;
-
-  /** The output table. */
-  @XmlElement
-  @Getter
-  private String outputTable;
-
-  /** The output column. */
-  @XmlElement
-  @Getter
-  private String outputColumn;
-
-  /** The label column. */
-  @XmlElement
-  @Getter
-  private String labelColumn;
-
-  /** The feature columns. */
-  @XmlElement
-  @Getter
-  private String featureColumns;
-
-  /** The algorithm. */
-  @XmlElement
-  @Getter
-  private String algorithm;
-
-  /** The model id. */
-  @XmlElement
-  @Getter
-  private String modelID;
-
-  /** The report id. */
-  @XmlElement
-  @Getter
-  private String reportID;
-
-  /** The query id. */
-  @XmlElement
-  @Getter
-  private String queryID;
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.lang.Object#toString()
-   */
-  @Override
-  public String toString() {
-    StringBuilder builder = new StringBuilder();
-    builder.append("Input test table: ").append(testTable).append('\n');
-    builder.append("Algorithm: ").append(algorithm).append('\n');
-    builder.append("Report id: ").append(reportID).append('\n');
-    builder.append("Model id: ").append(modelID).append('\n');
-    builder.append("Lens Query id: ").append(queryID).append('\n');
-    builder.append("Feature columns: ").append(featureColumns).append('\n');
-    builder.append("Labelled column: ").append(labelColumn).append('\n');
-    builder.append("Predicted column: ").append(outputColumn).append('\n');
-    builder.append("Test output table: ").append(outputTable).append('\n');
-    return builder.toString();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLClient.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLClient.java b/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLClient.java
index d9ec314..4d4521e 100644
--- a/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLClient.java
+++ b/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLClient.java
@@ -32,12 +32,12 @@ import javax.ws.rs.core.Form;
 
 import org.apache.lens.api.LensException;
 import org.apache.lens.api.LensSessionHandle;
-import org.apache.lens.api.ml.ModelMetadata;
-import org.apache.lens.api.ml.TestReport;
-import org.apache.lens.ml.LensML;
-import org.apache.lens.ml.MLAlgo;
-import org.apache.lens.ml.MLModel;
-import org.apache.lens.ml.MLTestReport;
+import org.apache.lens.ml.algo.api.MLAlgo;
+import org.apache.lens.ml.algo.api.MLModel;
+import org.apache.lens.ml.api.LensML;
+import org.apache.lens.ml.api.MLTestReport;
+import org.apache.lens.ml.api.ModelMetadata;
+import org.apache.lens.ml.api.TestReport;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLJerseyClient.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLJerseyClient.java b/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLJerseyClient.java
index af47a41..c68dd12 100644
--- a/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLJerseyClient.java
+++ b/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLJerseyClient.java
@@ -31,8 +31,8 @@ import javax.ws.rs.core.MediaType;
 
 import org.apache.lens.api.LensSessionHandle;
 import org.apache.lens.api.StringList;
-import org.apache.lens.api.ml.ModelMetadata;
-import org.apache.lens.api.ml.TestReport;
+import org.apache.lens.ml.api.ModelMetadata;
+import org.apache.lens.ml.api.TestReport;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/AlgoArgParser.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/AlgoArgParser.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/AlgoArgParser.java
deleted file mode 100644
index 20da083..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/AlgoArgParser.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.lang.reflect.Field;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * The Class AlgoArgParser.
- */
-public final class AlgoArgParser {
-  private AlgoArgParser() {
-  }
-
-  /**
-   * The Class CustomArgParser.
-   *
-   * @param <E> the element type
-   */
-  public abstract static class CustomArgParser<E> {
-
-    /**
-     * Parses the.
-     *
-     * @param value the value
-     * @return the e
-     */
-    public abstract E parse(String value);
-  }
-
-  /** The Constant LOG. */
-  public static final Log LOG = LogFactory.getLog(AlgoArgParser.class);
-
-  /**
-   * Extracts feature names. If the algo has any parameters associated with @AlgoParam annotation, those are set
-   * as well.
-   *
-   * @param algo the algo
-   * @param args    the args
-   * @return List of feature column names.
-   */
-  public static List<String> parseArgs(MLAlgo algo, String[] args) {
-    List<String> featureColumns = new ArrayList<String>();
-    Class<? extends MLAlgo> algoClass = algo.getClass();
-    // Get param fields
-    Map<String, Field> fieldMap = new HashMap<String, Field>();
-
-    for (Field fld : algoClass.getDeclaredFields()) {
-      fld.setAccessible(true);
-      AlgoParam paramAnnotation = fld.getAnnotation(AlgoParam.class);
-      if (paramAnnotation != null) {
-        fieldMap.put(paramAnnotation.name(), fld);
-      }
-    }
-
-    for (int i = 0; i < args.length; i += 2) {
-      String key = args[i].trim();
-      String value = args[i + 1].trim();
-
-      try {
-        if ("feature".equalsIgnoreCase(key)) {
-          featureColumns.add(value);
-        } else if (fieldMap.containsKey(key)) {
-          Field f = fieldMap.get(key);
-          if (String.class.equals(f.getType())) {
-            f.set(algo, value);
-          } else if (Integer.TYPE.equals(f.getType())) {
-            f.setInt(algo, Integer.parseInt(value));
-          } else if (Double.TYPE.equals(f.getType())) {
-            f.setDouble(algo, Double.parseDouble(value));
-          } else if (Long.TYPE.equals(f.getType())) {
-            f.setLong(algo, Long.parseLong(value));
-          } else {
-            // check if the algo provides a deserializer for this param
-            String customParserClass = algo.getConf().getProperties().get("lens.ml.args." + key);
-            if (customParserClass != null) {
-              Class<? extends CustomArgParser<?>> clz = (Class<? extends CustomArgParser<?>>) Class
-                .forName(customParserClass);
-              CustomArgParser<?> parser = clz.newInstance();
-              f.set(algo, parser.parse(value));
-            } else {
-              LOG.warn("Ignored param " + key + "=" + value + " as no parser found");
-            }
-          }
-        }
-      } catch (Exception exc) {
-        LOG.error("Error while setting param " + key + " to " + value + " for algo " + algo);
-      }
-    }
-    return featureColumns;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/AlgoParam.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/AlgoParam.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/AlgoParam.java
deleted file mode 100644
index 5836f51..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/AlgoParam.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- * The Interface AlgoParam.
- */
-@Retention(RetentionPolicy.RUNTIME)
-@Target(ElementType.FIELD)
-public @interface AlgoParam {
-
-  /**
-   * Name.
-   *
-   * @return the string
-   */
-  String name();
-
-  /**
-   * Help.
-   *
-   * @return the string
-   */
-  String help();
-
-  /**
-   * Default value.
-   *
-   * @return the string
-   */
-  String defaultValue() default "None";
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/Algorithm.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/Algorithm.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/Algorithm.java
deleted file mode 100644
index 7025d7b..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/Algorithm.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.lang.annotation.ElementType;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-
-/**
- * The Interface Algorithm.
- */
-@Retention(RetentionPolicy.RUNTIME)
-@Target(ElementType.TYPE)
-public @interface Algorithm {
-
-  /**
-   * Name.
-   *
-   * @return the string
-   */
-  String name();
-
-  /**
-   * Description.
-   *
-   * @return the string
-   */
-  String description();
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/Algorithms.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/Algorithms.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/Algorithms.java
deleted file mode 100644
index c1b7212..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/Algorithms.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.lang.reflect.Constructor;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.lens.api.LensException;
-
-/**
- * The Class Algorithms.
- */
-public class Algorithms {
-
-  /** The algorithm classes. */
-  private final Map<String, Class<? extends MLAlgo>> algorithmClasses
-    = new HashMap<String, Class<? extends MLAlgo>>();
-
-  /**
-   * Register.
-   *
-   * @param algoClass the algo class
-   */
-  public void register(Class<? extends MLAlgo> algoClass) {
-    if (algoClass != null && algoClass.getAnnotation(Algorithm.class) != null) {
-      algorithmClasses.put(algoClass.getAnnotation(Algorithm.class).name(), algoClass);
-    } else {
-      throw new IllegalArgumentException("Not a valid algorithm class: " + algoClass);
-    }
-  }
-
-  /**
-   * Gets the algo for name.
-   *
-   * @param name the name
-   * @return the algo for name
-   * @throws LensException the lens exception
-   */
-  public MLAlgo getAlgoForName(String name) throws LensException {
-    Class<? extends MLAlgo> algoClass = algorithmClasses.get(name);
-    if (algoClass == null) {
-      return null;
-    }
-    Algorithm algoAnnotation = algoClass.getAnnotation(Algorithm.class);
-    String description = algoAnnotation.description();
-    try {
-      Constructor<? extends MLAlgo> algoConstructor = algoClass.getConstructor(String.class, String.class);
-      return algoConstructor.newInstance(name, description);
-    } catch (Exception exc) {
-      throw new LensException("Unable to get algo: " + name, exc);
-    }
-  }
-
-  /**
-   * Checks if is algo supported.
-   *
-   * @param name the name
-   * @return true, if is algo supported
-   */
-  public boolean isAlgoSupported(String name) {
-    return algorithmClasses.containsKey(name);
-  }
-
-  public List<String> getAlgorithmNames() {
-    return new ArrayList<String>(algorithmClasses.keySet());
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/ClassifierBaseModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/ClassifierBaseModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/ClassifierBaseModel.java
deleted file mode 100644
index 68008fe..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/ClassifierBaseModel.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-/**
- * Return a single double value as a prediction. This is useful in classifiers where the classifier returns a single
- * class label as a prediction.
- */
-public abstract class ClassifierBaseModel extends MLModel<Double> {
-
-  /**
-   * Gets the feature vector.
-   *
-   * @param args the args
-   * @return the feature vector
-   */
-  public final double[] getFeatureVector(Object[] args) {
-    double[] features = new double[args.length];
-    for (int i = 0; i < args.length; i++) {
-      if (args[i] instanceof Double) {
-        features[i] = (Double) args[i];
-      } else if (args[i] instanceof String) {
-        features[i] = Double.parseDouble((String) args[i]);
-      } else {
-        features[i] = 0.0;
-      }
-    }
-    return features;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/ExampleUtils.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/ExampleUtils.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/ExampleUtils.java
deleted file mode 100644
index 9fe1ea0..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/ExampleUtils.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
-import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.mapred.TextInputFormat;
-
-/**
- * The Class ExampleUtils.
- */
-public final class ExampleUtils {
-  private ExampleUtils() {
-  }
-
-  private static final Log LOG = LogFactory.getLog(ExampleUtils.class);
-
-  /**
-   * Creates the example table.
-   *
-   * @param conf           the conf
-   * @param database       the database
-   * @param tableName      the table name
-   * @param sampleDataFile the sample data file
-   * @param labelColumn    the label column
-   * @param features       the features
-   * @throws HiveException the hive exception
-   */
-  public static void createTable(HiveConf conf, String database, String tableName, String sampleDataFile,
-    String labelColumn, Map<String, String> tableParams, String... features) throws HiveException {
-
-    Path dataFilePath = new Path(sampleDataFile);
-    Path partDir = dataFilePath.getParent();
-
-    // Create table
-    List<FieldSchema> columns = new ArrayList<FieldSchema>();
-
-    // Label is optional. Not used for unsupervised models.
-    // If present, label will be the first column, followed by features
-    if (labelColumn != null) {
-      columns.add(new FieldSchema(labelColumn, "double", "Labelled Column"));
-    }
-
-    for (String feature : features) {
-      columns.add(new FieldSchema(feature, "double", "Feature " + feature));
-    }
-
-    Table tbl = Hive.get(conf).newTable(database + "." + tableName);
-    tbl.setTableType(TableType.MANAGED_TABLE);
-    tbl.getTTable().getSd().setCols(columns);
-    tbl.getTTable().getParameters().putAll(tableParams);
-    tbl.setInputFormatClass(TextInputFormat.class);
-    tbl.setSerdeParam(serdeConstants.LINE_DELIM, "\n");
-    tbl.setSerdeParam(serdeConstants.FIELD_DELIM, " ");
-
-    List<FieldSchema> partCols = new ArrayList<FieldSchema>(1);
-    partCols.add(new FieldSchema("dummy_partition_col", "string", ""));
-    tbl.setPartCols(partCols);
-
-    Hive.get(conf).createTable(tbl, false);
-    LOG.info("Created table " + tableName);
-
-    // Add partition for the data file
-    AddPartitionDesc partitionDesc = new AddPartitionDesc(database, tableName, false);
-    Map<String, String> partSpec = new HashMap<String, String>();
-    partSpec.put("dummy_partition_col", "dummy_val");
-    partitionDesc.addPartition(partSpec, partDir.toUri().toString());
-    Hive.get(conf).createPartitions(partitionDesc);
-    LOG.info(tableName + ": Added partition " + partDir.toUri().toString());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/ForecastingModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/ForecastingModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/ForecastingModel.java
deleted file mode 100644
index 5163db5..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/ForecastingModel.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.util.List;
-
-/**
- * The Class ForecastingModel.
- */
-public class ForecastingModel extends MLModel<MultiPrediction> {
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.MLModel#predict(java.lang.Object[])
-   */
-  @Override
-  public MultiPrediction predict(Object... args) {
-    return new ForecastingPredictions(null);
-  }
-
-  /**
-   * The Class ForecastingPredictions.
-   */
-  public static class ForecastingPredictions implements MultiPrediction {
-
-    /** The values. */
-    private final List<LabelledPrediction> values;
-
-    /**
-     * Instantiates a new forecasting predictions.
-     *
-     * @param values the values
-     */
-    public ForecastingPredictions(List<LabelledPrediction> values) {
-      this.values = values;
-    }
-
-    @Override
-    public List<LabelledPrediction> getPredictions() {
-      return values;
-    }
-  }
-
-  /**
-   * The Class ForecastingLabel.
-   */
-  public static class ForecastingLabel implements LabelledPrediction<Long, Double> {
-
-    /** The timestamp. */
-    private final Long timestamp;
-
-    /** The value. */
-    private final double value;
-
-    /**
-     * Instantiates a new forecasting label.
-     *
-     * @param timestamp the timestamp
-     * @param value     the value
-     */
-    public ForecastingLabel(long timestamp, double value) {
-      this.timestamp = timestamp;
-      this.value = value;
-    }
-
-    @Override
-    public Long getLabel() {
-      return timestamp;
-    }
-
-    @Override
-    public Double getPrediction() {
-      return value;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/HiveMLUDF.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/HiveMLUDF.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/HiveMLUDF.java
deleted file mode 100644
index 687ca54..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/HiveMLUDF.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.MapredContext;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.serde2.lazy.LazyDouble;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDoubleObjectInspector;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
-import org.apache.hadoop.mapred.JobConf;
-
-/**
- * Generic UDF to laod ML Models saved in HDFS and apply the model on list of columns passed as argument.
- */
-@Description(name = "predict",
-  value = "_FUNC_(algorithm, modelID, features...) - Run prediction algorithm with given "
-    + "algorithm name, model ID and input feature columns")
-public final class HiveMLUDF extends GenericUDF {
-  private HiveMLUDF() {
-  }
-
-  /** The Constant UDF_NAME. */
-  public static final String UDF_NAME = "predict";
-
-  /** The Constant LOG. */
-  public static final Log LOG = LogFactory.getLog(HiveMLUDF.class);
-
-  /** The conf. */
-  private JobConf conf;
-
-  /** The soi. */
-  private StringObjectInspector soi;
-
-  /** The doi. */
-  private LazyDoubleObjectInspector doi;
-
-  /** The model. */
-  private MLModel model;
-
-  /**
-   * Currently we only support double as the return value.
-   *
-   * @param objectInspectors the object inspectors
-   * @return the object inspector
-   * @throws UDFArgumentException the UDF argument exception
-   */
-  @Override
-  public ObjectInspector initialize(ObjectInspector[] objectInspectors) throws UDFArgumentException {
-    // We require algo name, model id and at least one feature
-    if (objectInspectors.length < 3) {
-      throw new UDFArgumentLengthException("Algo name, model ID and at least one feature should be passed to "
-        + UDF_NAME);
-    }
-    LOG.info(UDF_NAME + " initialized");
-    return PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.hadoop.hive.ql.udf.generic.GenericUDF#evaluate(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.
-   * DeferredObject[])
-   */
-  @Override
-  public Object evaluate(DeferredObject[] deferredObjects) throws HiveException {
-    String algorithm = soi.getPrimitiveJavaObject(deferredObjects[0].get());
-    String modelId = soi.getPrimitiveJavaObject(deferredObjects[1].get());
-
-    Double[] features = new Double[deferredObjects.length - 2];
-    for (int i = 2; i < deferredObjects.length; i++) {
-      LazyDouble lazyDouble = (LazyDouble) deferredObjects[i].get();
-      features[i - 2] = (lazyDouble == null) ? 0d : doi.get(lazyDouble);
-    }
-
-    try {
-      if (model == null) {
-        model = ModelLoader.loadModel(conf, algorithm, modelId);
-      }
-    } catch (IOException e) {
-      throw new HiveException(e);
-    }
-
-    return model.predict(features);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.hadoop.hive.ql.udf.generic.GenericUDF#getDisplayString(java.lang.String[])
-   */
-  @Override
-  public String getDisplayString(String[] strings) {
-    return UDF_NAME;
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.hadoop.hive.ql.udf.generic.GenericUDF#configure(org.apache.hadoop.hive.ql.exec.MapredContext)
-   */
-  @Override
-  public void configure(MapredContext context) {
-    super.configure(context);
-    conf = context.getJobConf();
-    soi = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
-    doi = LazyPrimitiveObjectInspectorFactory.LAZY_DOUBLE_OBJECT_INSPECTOR;
-    LOG.info(UDF_NAME + " configured. Model base dir path: " + conf.get(ModelLoader.MODEL_PATH_BASE_DIR));
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/LabelledPrediction.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/LabelledPrediction.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/LabelledPrediction.java
deleted file mode 100644
index 6e7f677..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/LabelledPrediction.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-/**
- * Prediction type used when the model prediction is of complex types. For example, in forecasting the predictions are a
- * series of timestamp, and value pairs.
- *
- * @param <LABELTYPE>      the generic type
- * @param <PREDICTIONTYPE> the generic type
- */
-public interface LabelledPrediction<LABELTYPE, PREDICTIONTYPE> {
-  LABELTYPE getLabel();
-
-  PREDICTIONTYPE getPrediction();
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/LensML.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/LensML.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/LensML.java
deleted file mode 100644
index cdf28dd..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/LensML.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.util.List;
-import java.util.Map;
-
-import org.apache.lens.api.LensException;
-import org.apache.lens.api.LensSessionHandle;
-
-/**
- * Lens's machine learning interface used by client code as well as Lens ML service.
- */
-public interface LensML {
-
-  /** Name of ML service */
-  String NAME = "ml";
-
-  /**
-   * Get list of available machine learning algorithms
-   *
-   * @return
-   */
-  List<String> getAlgorithms();
-
-  /**
-   * Get user friendly information about parameters accepted by the algorithm.
-   *
-   * @param algorithm the algorithm
-   * @return map of param key to its help message
-   */
-  Map<String, String> getAlgoParamDescription(String algorithm);
-
-  /**
-   * Get a algo object instance which could be used to generate a model of the given algorithm.
-   *
-   * @param algorithm the algorithm
-   * @return the algo for name
-   * @throws LensException the lens exception
-   */
-  MLAlgo getAlgoForName(String algorithm) throws LensException;
-
-  /**
-   * Create a model using the given HCatalog table as input. The arguments should contain information needeed to
-   * generate the model.
-   *
-   * @param table     the table
-   * @param algorithm the algorithm
-   * @param args      the args
-   * @return Unique ID of the model created after training is complete
-   * @throws LensException the lens exception
-   */
-  String train(String table, String algorithm, String[] args) throws LensException;
-
-  /**
-   * Get model IDs for the given algorithm.
-   *
-   * @param algorithm the algorithm
-   * @return the models
-   * @throws LensException the lens exception
-   */
-  List<String> getModels(String algorithm) throws LensException;
-
-  /**
-   * Get a model instance given the algorithm name and model ID.
-   *
-   * @param algorithm the algorithm
-   * @param modelId   the model id
-   * @return the model
-   * @throws LensException the lens exception
-   */
-  MLModel getModel(String algorithm, String modelId) throws LensException;
-
-  /**
-   * Get the FS location where model instance is saved.
-   *
-   * @param algorithm the algorithm
-   * @param modelID   the model id
-   * @return the model path
-   */
-  String getModelPath(String algorithm, String modelID);
-
-  /**
-   * Evaluate model by running it against test data contained in the given table.
-   *
-   * @param session   the session
-   * @param table     the table
-   * @param algorithm the algorithm
-   * @param modelID   the model id
-   * @return Test report object containing test output table, and various evaluation metrics
-   * @throws LensException the lens exception
-   */
-  MLTestReport testModel(LensSessionHandle session, String table, String algorithm, String modelID,
-    String outputTable) throws LensException;
-
-  /**
-   * Get test reports for an algorithm.
-   *
-   * @param algorithm the algorithm
-   * @return the test reports
-   * @throws LensException the lens exception
-   */
-  List<String> getTestReports(String algorithm) throws LensException;
-
-  /**
-   * Get a test report by ID.
-   *
-   * @param algorithm the algorithm
-   * @param reportID  the report id
-   * @return the test report
-   * @throws LensException the lens exception
-   */
-  MLTestReport getTestReport(String algorithm, String reportID) throws LensException;
-
-  /**
-   * Online predict call given a model ID, algorithm name and sample feature values.
-   *
-   * @param algorithm the algorithm
-   * @param modelID   the model id
-   * @param features  the features
-   * @return prediction result
-   * @throws LensException the lens exception
-   */
-  Object predict(String algorithm, String modelID, Object[] features) throws LensException;
-
-  /**
-   * Permanently delete a model instance.
-   *
-   * @param algorithm the algorithm
-   * @param modelID   the model id
-   * @throws LensException the lens exception
-   */
-  void deleteModel(String algorithm, String modelID) throws LensException;
-
-  /**
-   * Permanently delete a test report instance.
-   *
-   * @param algorithm the algorithm
-   * @param reportID  the report id
-   * @throws LensException the lens exception
-   */
-  void deleteTestReport(String algorithm, String reportID) throws LensException;
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/LensMLImpl.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/LensMLImpl.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/LensMLImpl.java
deleted file mode 100644
index b45f7f2..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/LensMLImpl.java
+++ /dev/null
@@ -1,734 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.io.IOException;
-import java.io.ObjectOutputStream;
-import java.util.*;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
-import javax.ws.rs.client.Client;
-import javax.ws.rs.client.ClientBuilder;
-import javax.ws.rs.client.Entity;
-import javax.ws.rs.client.WebTarget;
-import javax.ws.rs.core.MediaType;
-
-import org.apache.lens.api.LensConf;
-import org.apache.lens.api.LensException;
-import org.apache.lens.api.LensSessionHandle;
-import org.apache.lens.api.query.LensQuery;
-import org.apache.lens.api.query.QueryHandle;
-import org.apache.lens.api.query.QueryStatus;
-import org.apache.lens.ml.spark.SparkMLDriver;
-import org.apache.lens.ml.spark.algos.BaseSparkAlgo;
-import org.apache.lens.server.api.LensConfConstants;
-import org.apache.lens.server.api.session.SessionService;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.spark.api.java.JavaSparkContext;
-
-import org.glassfish.jersey.media.multipart.FormDataBodyPart;
-import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
-import org.glassfish.jersey.media.multipart.FormDataMultiPart;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
-
-/**
- * The Class LensMLImpl.
- */
-public class LensMLImpl implements LensML {
-
-  /** The Constant LOG. */
-  public static final Log LOG = LogFactory.getLog(LensMLImpl.class);
-
-  /** The drivers. */
-  protected List<MLDriver> drivers;
-
-  /** The conf. */
-  private HiveConf conf;
-
-  /** The spark context. */
-  private JavaSparkContext sparkContext;
-
-  /** Check if the predict UDF has been registered for a user */
-  private final Map<LensSessionHandle, Boolean> predictUdfStatus;
-  /** Background thread to periodically check if we need to clear expire status for a session */
-  private ScheduledExecutorService udfStatusExpirySvc;
-
-  /**
-   * Instantiates a new lens ml impl.
-   *
-   * @param conf the conf
-   */
-  public LensMLImpl(HiveConf conf) {
-    this.conf = conf;
-    this.predictUdfStatus = new ConcurrentHashMap<LensSessionHandle, Boolean>();
-  }
-
-  public HiveConf getConf() {
-    return conf;
-  }
-
-  /**
-   * Use an existing Spark context. Useful in case of
-   *
-   * @param jsc JavaSparkContext instance
-   */
-  public void setSparkContext(JavaSparkContext jsc) {
-    this.sparkContext = jsc;
-  }
-
-  public List<String> getAlgorithms() {
-    List<String> algos = new ArrayList<String>();
-    for (MLDriver driver : drivers) {
-      algos.addAll(driver.getAlgoNames());
-    }
-    return algos;
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#getAlgoForName(java.lang.String)
-   */
-  public MLAlgo getAlgoForName(String algorithm) throws LensException {
-    for (MLDriver driver : drivers) {
-      if (driver.isAlgoSupported(algorithm)) {
-        return driver.getAlgoInstance(algorithm);
-      }
-    }
-    throw new LensException("Algo not supported " + algorithm);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#train(java.lang.String, java.lang.String, java.lang.String[])
-   */
-  public String train(String table, String algorithm, String[] args) throws LensException {
-    MLAlgo algo = getAlgoForName(algorithm);
-
-    String modelId = UUID.randomUUID().toString();
-
-    LOG.info("Begin training model " + modelId + ", algo=" + algorithm + ", table=" + table + ", params="
-      + Arrays.toString(args));
-
-    String database = null;
-    if (SessionState.get() != null) {
-      database = SessionState.get().getCurrentDatabase();
-    } else {
-      database = "default";
-    }
-
-    MLModel model = algo.train(toLensConf(conf), database, table, modelId, args);
-
-    LOG.info("Done training model: " + modelId);
-
-    model.setCreatedAt(new Date());
-    model.setAlgoName(algorithm);
-
-    Path modelLocation = null;
-    try {
-      modelLocation = persistModel(model);
-      LOG.info("Model saved: " + modelId + ", algo: " + algorithm + ", path: " + modelLocation);
-      return model.getId();
-    } catch (IOException e) {
-      throw new LensException("Error saving model " + modelId + " for algo " + algorithm, e);
-    }
-  }
-
-  /**
-   * Gets the algo dir.
-   *
-   * @param algoName the algo name
-   * @return the algo dir
-   * @throws IOException Signals that an I/O exception has occurred.
-   */
-  private Path getAlgoDir(String algoName) throws IOException {
-    String modelSaveBaseDir = conf.get(ModelLoader.MODEL_PATH_BASE_DIR, ModelLoader.MODEL_PATH_BASE_DIR_DEFAULT);
-    return new Path(new Path(modelSaveBaseDir), algoName);
-  }
-
-  /**
-   * Persist model.
-   *
-   * @param model the model
-   * @return the path
-   * @throws IOException Signals that an I/O exception has occurred.
-   */
-  private Path persistModel(MLModel model) throws IOException {
-    // Get model save path
-    Path algoDir = getAlgoDir(model.getAlgoName());
-    FileSystem fs = algoDir.getFileSystem(conf);
-
-    if (!fs.exists(algoDir)) {
-      fs.mkdirs(algoDir);
-    }
-
-    Path modelSavePath = new Path(algoDir, model.getId());
-    ObjectOutputStream outputStream = null;
-
-    try {
-      outputStream = new ObjectOutputStream(fs.create(modelSavePath, false));
-      outputStream.writeObject(model);
-      outputStream.flush();
-    } catch (IOException io) {
-      LOG.error("Error saving model " + model.getId() + " reason: " + io.getMessage());
-      throw io;
-    } finally {
-      IOUtils.closeQuietly(outputStream);
-    }
-    return modelSavePath;
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#getModels(java.lang.String)
-   */
-  public List<String> getModels(String algorithm) throws LensException {
-    try {
-      Path algoDir = getAlgoDir(algorithm);
-      FileSystem fs = algoDir.getFileSystem(conf);
-      if (!fs.exists(algoDir)) {
-        return null;
-      }
-
-      List<String> models = new ArrayList<String>();
-
-      for (FileStatus stat : fs.listStatus(algoDir)) {
-        models.add(stat.getPath().getName());
-      }
-
-      if (models.isEmpty()) {
-        return null;
-      }
-
-      return models;
-    } catch (IOException ioex) {
-      throw new LensException(ioex);
-    }
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#getModel(java.lang.String, java.lang.String)
-   */
-  public MLModel getModel(String algorithm, String modelId) throws LensException {
-    try {
-      return ModelLoader.loadModel(conf, algorithm, modelId);
-    } catch (IOException e) {
-      throw new LensException(e);
-    }
-  }
-
-  /**
-   * Inits the.
-   *
-   * @param hiveConf the hive conf
-   */
-  public synchronized void init(HiveConf hiveConf) {
-    this.conf = hiveConf;
-
-    // Get all the drivers
-    String[] driverClasses = hiveConf.getStrings("lens.ml.drivers");
-
-    if (driverClasses == null || driverClasses.length == 0) {
-      throw new RuntimeException("No ML Drivers specified in conf");
-    }
-
-    LOG.info("Loading drivers " + Arrays.toString(driverClasses));
-    drivers = new ArrayList<MLDriver>(driverClasses.length);
-
-    for (String driverClass : driverClasses) {
-      Class<?> cls;
-      try {
-        cls = Class.forName(driverClass);
-      } catch (ClassNotFoundException e) {
-        LOG.error("Driver class not found " + driverClass);
-        continue;
-      }
-
-      if (!MLDriver.class.isAssignableFrom(cls)) {
-        LOG.warn("Not a driver class " + driverClass);
-        continue;
-      }
-
-      try {
-        Class<? extends MLDriver> mlDriverClass = (Class<? extends MLDriver>) cls;
-        MLDriver driver = mlDriverClass.newInstance();
-        driver.init(toLensConf(conf));
-        drivers.add(driver);
-        LOG.info("Added driver " + driverClass);
-      } catch (Exception e) {
-        LOG.error("Failed to create driver " + driverClass + " reason: " + e.getMessage(), e);
-      }
-    }
-    if (drivers.isEmpty()) {
-      throw new RuntimeException("No ML drivers loaded");
-    }
-
-    LOG.info("Inited ML service");
-  }
-
-  /**
-   * Start.
-   */
-  public synchronized void start() {
-    for (MLDriver driver : drivers) {
-      try {
-        if (driver instanceof SparkMLDriver && sparkContext != null) {
-          ((SparkMLDriver) driver).useSparkContext(sparkContext);
-        }
-        driver.start();
-      } catch (LensException e) {
-        LOG.error("Failed to start driver " + driver, e);
-      }
-    }
-
-    udfStatusExpirySvc = Executors.newSingleThreadScheduledExecutor();
-    udfStatusExpirySvc.scheduleAtFixedRate(new UDFStatusExpiryRunnable(), 60, 60, TimeUnit.SECONDS);
-
-    LOG.info("Started ML service");
-  }
-
-  /**
-   * Stop.
-   */
-  public synchronized void stop() {
-    for (MLDriver driver : drivers) {
-      try {
-        driver.stop();
-      } catch (LensException e) {
-        LOG.error("Failed to stop driver " + driver, e);
-      }
-    }
-    drivers.clear();
-    udfStatusExpirySvc.shutdownNow();
-    LOG.info("Stopped ML service");
-  }
-
-  public synchronized HiveConf getHiveConf() {
-    return conf;
-  }
-
-  /**
-   * Clear models.
-   */
-  public void clearModels() {
-    ModelLoader.clearCache();
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#getModelPath(java.lang.String, java.lang.String)
-   */
-  public String getModelPath(String algorithm, String modelID) {
-    return ModelLoader.getModelLocation(conf, algorithm, modelID).toString();
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#testModel(org.apache.lens.api.LensSessionHandle, java.lang.String, java.lang.String,
-   * java.lang.String)
-   */
-  @Override
-  public MLTestReport testModel(LensSessionHandle session, String table, String algorithm, String modelID,
-    String outputTable) throws LensException {
-    return null;
-  }
-
-  /**
-   * Test a model in embedded mode.
-   *
-   * @param sessionHandle the session handle
-   * @param table         the table
-   * @param algorithm     the algorithm
-   * @param modelID       the model id
-   * @param queryApiUrl   the query api url
-   * @return the ML test report
-   * @throws LensException the lens exception
-   */
-  public MLTestReport testModelRemote(LensSessionHandle sessionHandle, String table, String algorithm, String modelID,
-    String queryApiUrl, String outputTable) throws LensException {
-    return testModel(sessionHandle, table, algorithm, modelID, new RemoteQueryRunner(sessionHandle, queryApiUrl),
-      outputTable);
-  }
-
-  /**
-   * Evaluate a model. Evaluation is done on data selected table from an input table. The model is run as a UDF and its
-   * output is inserted into a table with a partition. Each evaluation is given a unique ID. The partition label is
-   * associated with this unique ID.
-   * <p/>
-   * <p>
-   * This call also required a query runner. Query runner is responsible for executing the evaluation query against Lens
-   * server.
-   * </p>
-   *
-   * @param sessionHandle the session handle
-   * @param table         the table
-   * @param algorithm     the algorithm
-   * @param modelID       the model id
-   * @param queryRunner   the query runner
-   * @param outputTable   table where test output will be written
-   * @return the ML test report
-   * @throws LensException the lens exception
-   */
-  public MLTestReport testModel(final LensSessionHandle sessionHandle, String table, String algorithm, String modelID,
-    QueryRunner queryRunner, String outputTable) throws LensException {
-    if (sessionHandle == null) {
-      throw new NullPointerException("Null session not allowed");
-    }
-    // check if algorithm exists
-    if (!getAlgorithms().contains(algorithm)) {
-      throw new LensException("No such algorithm " + algorithm);
-    }
-
-    MLModel<?> model;
-    try {
-      model = ModelLoader.loadModel(conf, algorithm, modelID);
-    } catch (IOException e) {
-      throw new LensException(e);
-    }
-
-    if (model == null) {
-      throw new LensException("Model not found: " + modelID + " algorithm=" + algorithm);
-    }
-
-    String database = null;
-
-    if (SessionState.get() != null) {
-      database = SessionState.get().getCurrentDatabase();
-    }
-
-    String testID = UUID.randomUUID().toString().replace("-", "_");
-    final String testTable = outputTable;
-    final String testResultColumn = "prediction_result";
-
-    // TODO support error metric UDAFs
-    TableTestingSpec spec = TableTestingSpec.newBuilder().hiveConf(conf)
-      .database(database == null ? "default" : database).inputTable(table).featureColumns(model.getFeatureColumns())
-      .outputColumn(testResultColumn).lableColumn(model.getLabelColumn()).algorithm(algorithm).modelID(modelID)
-      .outputTable(testTable).testID(testID).build();
-
-    String testQuery = spec.getTestQuery();
-    if (testQuery == null) {
-      throw new LensException("Invalid test spec. " + "table=" + table + " algorithm=" + algorithm + " modelID="
-        + modelID);
-    }
-
-    if (!spec.isOutputTableExists()) {
-      LOG.info("Output table '" + testTable + "' does not exist for test algorithm = " + algorithm + " modelid="
-        + modelID + ", Creating table using query: " + spec.getCreateOutputTableQuery());
-      // create the output table
-      String createOutputTableQuery = spec.getCreateOutputTableQuery();
-      queryRunner.runQuery(createOutputTableQuery);
-      LOG.info("Table created " + testTable);
-    }
-
-    // Check if ML UDF is registered in this session
-    registerPredictUdf(sessionHandle, queryRunner);
-
-    LOG.info("Running evaluation query " + testQuery);
-    queryRunner.setQueryName("model_test_" + modelID);
-    QueryHandle testQueryHandle = queryRunner.runQuery(testQuery);
-
-    MLTestReport testReport = new MLTestReport();
-    testReport.setReportID(testID);
-    testReport.setAlgorithm(algorithm);
-    testReport.setFeatureColumns(model.getFeatureColumns());
-    testReport.setLabelColumn(model.getLabelColumn());
-    testReport.setModelID(model.getId());
-    testReport.setOutputColumn(testResultColumn);
-    testReport.setOutputTable(testTable);
-    testReport.setTestTable(table);
-    testReport.setQueryID(testQueryHandle.toString());
-
-    // Save test report
-    persistTestReport(testReport);
-    LOG.info("Saved test report " + testReport.getReportID());
-    return testReport;
-  }
-
-  /**
-   * Persist test report.
-   *
-   * @param testReport the test report
-   * @throws LensException the lens exception
-   */
-  private void persistTestReport(MLTestReport testReport) throws LensException {
-    LOG.info("saving test report " + testReport.getReportID());
-    try {
-      ModelLoader.saveTestReport(conf, testReport);
-      LOG.info("Saved report " + testReport.getReportID());
-    } catch (IOException e) {
-      LOG.error("Error saving report " + testReport.getReportID() + " reason: " + e.getMessage());
-    }
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#getTestReports(java.lang.String)
-   */
-  public List<String> getTestReports(String algorithm) throws LensException {
-    Path reportBaseDir = new Path(conf.get(ModelLoader.TEST_REPORT_BASE_DIR, ModelLoader.TEST_REPORT_BASE_DIR_DEFAULT));
-    FileSystem fs = null;
-
-    try {
-      fs = reportBaseDir.getFileSystem(conf);
-      if (!fs.exists(reportBaseDir)) {
-        return null;
-      }
-
-      Path algoDir = new Path(reportBaseDir, algorithm);
-      if (!fs.exists(algoDir)) {
-        return null;
-      }
-
-      List<String> reports = new ArrayList<String>();
-      for (FileStatus stat : fs.listStatus(algoDir)) {
-        reports.add(stat.getPath().getName());
-      }
-      return reports;
-    } catch (IOException e) {
-      LOG.error("Error reading report list for " + algorithm, e);
-      return null;
-    }
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#getTestReport(java.lang.String, java.lang.String)
-   */
-  public MLTestReport getTestReport(String algorithm, String reportID) throws LensException {
-    try {
-      return ModelLoader.loadReport(conf, algorithm, reportID);
-    } catch (IOException e) {
-      throw new LensException(e);
-    }
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#predict(java.lang.String, java.lang.String, java.lang.Object[])
-   */
-  public Object predict(String algorithm, String modelID, Object[] features) throws LensException {
-    // Load the model instance
-    MLModel<?> model = getModel(algorithm, modelID);
-    return model.predict(features);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#deleteModel(java.lang.String, java.lang.String)
-   */
-  public void deleteModel(String algorithm, String modelID) throws LensException {
-    try {
-      ModelLoader.deleteModel(conf, algorithm, modelID);
-      LOG.info("DELETED model " + modelID + " algorithm=" + algorithm);
-    } catch (IOException e) {
-      LOG.error(
-        "Error deleting model file. algorithm=" + algorithm + " model=" + modelID + " reason: " + e.getMessage(), e);
-      throw new LensException("Unable to delete model " + modelID + " for algorithm " + algorithm, e);
-    }
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#deleteTestReport(java.lang.String, java.lang.String)
-   */
-  public void deleteTestReport(String algorithm, String reportID) throws LensException {
-    try {
-      ModelLoader.deleteTestReport(conf, algorithm, reportID);
-      LOG.info("DELETED report=" + reportID + " algorithm=" + algorithm);
-    } catch (IOException e) {
-      LOG.error("Error deleting report " + reportID + " algorithm=" + algorithm + " reason: " + e.getMessage(), e);
-      throw new LensException("Unable to delete report " + reportID + " for algorithm " + algorithm, e);
-    }
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#getAlgoParamDescription(java.lang.String)
-   */
-  public Map<String, String> getAlgoParamDescription(String algorithm) {
-    MLAlgo algo = null;
-    try {
-      algo = getAlgoForName(algorithm);
-    } catch (LensException e) {
-      LOG.error("Error getting algo description : " + algorithm, e);
-      return null;
-    }
-    if (algo instanceof BaseSparkAlgo) {
-      return ((BaseSparkAlgo) algo).getArgUsage();
-    }
-    return null;
-  }
-
-  /**
-   * Submit model test query to a remote Lens server.
-   */
-  class RemoteQueryRunner extends QueryRunner {
-
-    /** The query api url. */
-    final String queryApiUrl;
-
-    /**
-     * Instantiates a new remote query runner.
-     *
-     * @param sessionHandle the session handle
-     * @param queryApiUrl   the query api url
-     */
-    public RemoteQueryRunner(LensSessionHandle sessionHandle, String queryApiUrl) {
-      super(sessionHandle);
-      this.queryApiUrl = queryApiUrl;
-    }
-
-    /*
-     * (non-Javadoc)
-     *
-     * @see org.apache.lens.ml.TestQueryRunner#runQuery(java.lang.String)
-     */
-    @Override
-    public QueryHandle runQuery(String query) throws LensException {
-      // Create jersey client for query endpoint
-      Client client = ClientBuilder.newBuilder().register(MultiPartFeature.class).build();
-      WebTarget target = client.target(queryApiUrl);
-      final FormDataMultiPart mp = new FormDataMultiPart();
-      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), sessionHandle,
-        MediaType.APPLICATION_XML_TYPE));
-      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), query));
-      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
-
-      LensConf lensConf = new LensConf();
-      lensConf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, false + "");
-      lensConf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false + "");
-      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), lensConf,
-        MediaType.APPLICATION_XML_TYPE));
-
-      final QueryHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
-        QueryHandle.class);
-
-      LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", sessionHandle).request()
-        .get(LensQuery.class);
-
-      QueryStatus stat = ctx.getStatus();
-      while (!stat.isFinished()) {
-        ctx = target.path(handle.toString()).queryParam("sessionid", sessionHandle).request().get(LensQuery.class);
-        stat = ctx.getStatus();
-        try {
-          Thread.sleep(500);
-        } catch (InterruptedException e) {
-          throw new LensException(e);
-        }
-      }
-
-      if (stat.getStatus() != QueryStatus.Status.SUCCESSFUL) {
-        throw new LensException("Query failed " + ctx.getQueryHandle().getHandleId() + " reason:"
-          + stat.getErrorMessage());
-      }
-
-      return ctx.getQueryHandle();
-    }
-  }
-
-  /**
-   * To lens conf.
-   *
-   * @param conf the conf
-   * @return the lens conf
-   */
-  private LensConf toLensConf(HiveConf conf) {
-    LensConf lensConf = new LensConf();
-    lensConf.getProperties().putAll(conf.getValByRegex(".*"));
-    return lensConf;
-  }
-
-  protected void registerPredictUdf(LensSessionHandle sessionHandle, QueryRunner queryRunner) throws LensException {
-    if (isUdfRegisterd(sessionHandle)) {
-      // Already registered, nothing to do
-      return;
-    }
-
-    LOG.info("Registering UDF for session " + sessionHandle.getPublicId().toString());
-    // We have to add UDF jars to the session
-    try {
-      SessionService sessionService = (SessionService) MLUtils.getServiceProvider().getService(SessionService.NAME);
-      String[] udfJars = conf.getStrings("lens.server.ml.predict.udf.jars");
-      if (udfJars != null) {
-        for (String jar : udfJars) {
-          sessionService.addResource(sessionHandle, "jar", jar);
-          LOG.info(jar + " added UDF session " + sessionHandle.getPublicId().toString());
-        }
-      }
-    } catch (Exception e) {
-      throw new LensException(e);
-    }
-
-    String regUdfQuery = "CREATE TEMPORARY FUNCTION " + HiveMLUDF.UDF_NAME + " AS '" + HiveMLUDF.class
-      .getCanonicalName() + "'";
-    queryRunner.setQueryName("register_predict_udf_" + sessionHandle.getPublicId().toString());
-    QueryHandle udfQuery = queryRunner.runQuery(regUdfQuery);
-    predictUdfStatus.put(sessionHandle, true);
-    LOG.info("Predict UDF registered for session " + sessionHandle.getPublicId().toString());
-  }
-
-  protected boolean isUdfRegisterd(LensSessionHandle sessionHandle) {
-    return predictUdfStatus.containsKey(sessionHandle);
-  }
-
-  /**
-   * Periodically check if sessions have been closed, and clear UDF registered status.
-   */
-  private class UDFStatusExpiryRunnable implements Runnable {
-    public void run() {
-      try {
-        SessionService sessionService = (SessionService) MLUtils.getServiceProvider().getService(SessionService.NAME);
-        // Clear status of sessions which are closed.
-        List<LensSessionHandle> sessions = new ArrayList<LensSessionHandle>(predictUdfStatus.keySet());
-        for (LensSessionHandle sessionHandle : sessions) {
-          if (!sessionService.isOpen(sessionHandle)) {
-            LOG.info("Session closed, removing UDF status: " + sessionHandle);
-            predictUdfStatus.remove(sessionHandle);
-          }
-        }
-      } catch (Exception exc) {
-        LOG.warn("Error clearing UDF statuses", exc);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/MLAlgo.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLAlgo.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/MLAlgo.java
deleted file mode 100644
index 7dccf2c..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLAlgo.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import org.apache.lens.api.LensConf;
-import org.apache.lens.api.LensException;
-
-/**
- * The Interface MLAlgo.
- */
-public interface MLAlgo {
-  String getName();
-
-  String getDescription();
-
-  /**
-   * Configure.
-   *
-   * @param configuration the configuration
-   */
-  void configure(LensConf configuration);
-
-  LensConf getConf();
-
-  /**
-   * Train.
-   *
-   * @param conf    the conf
-   * @param db      the db
-   * @param table   the table
-   * @param modelId the model id
-   * @param params  the params
-   * @return the ML model
-   * @throws LensException the lens exception
-   */
-  MLModel train(LensConf conf, String db, String table, String modelId, String... params) throws LensException;
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/MLDriver.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLDriver.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/MLDriver.java
deleted file mode 100644
index 567e717..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLDriver.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.util.List;
-
-import org.apache.lens.api.LensConf;
-import org.apache.lens.api.LensException;
-
-/**
- * The Interface MLDriver.
- */
-public interface MLDriver {
-
-  /**
-   * Checks if is algo supported.
-   *
-   * @param algo the algo
-   * @return true, if is algo supported
-   */
-  boolean isAlgoSupported(String algo);
-
-  /**
-   * Gets the algo instance.
-   *
-   * @param algo the algo
-   * @return the algo instance
-   * @throws LensException the lens exception
-   */
-  MLAlgo getAlgoInstance(String algo) throws LensException;
-
-  /**
-   * Inits the.
-   *
-   * @param conf the conf
-   * @throws LensException the lens exception
-   */
-  void init(LensConf conf) throws LensException;
-
-  /**
-   * Start.
-   *
-   * @throws LensException the lens exception
-   */
-  void start() throws LensException;
-
-  /**
-   * Stop.
-   *
-   * @throws LensException the lens exception
-   */
-  void stop() throws LensException;
-
-  List<String> getAlgoNames();
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/MLModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/MLModel.java
deleted file mode 100644
index c177757..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLModel.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.io.Serializable;
-import java.util.Date;
-import java.util.List;
-
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-import lombok.Setter;
-import lombok.ToString;
-
-/**
- * Instantiates a new ML model.
- */
-@NoArgsConstructor
-@ToString
-public abstract class MLModel<PREDICTION> implements Serializable {
-
-  /** The id. */
-  @Getter
-  @Setter
-  private String id;
-
-  /** The created at. */
-  @Getter
-  @Setter
-  private Date createdAt;
-
-  /** The algo name. */
-  @Getter
-  @Setter
-  private String algoName;
-
-  /** The table. */
-  @Getter
-  @Setter
-  private String table;
-
-  /** The params. */
-  @Getter
-  @Setter
-  private List<String> params;
-
-  /** The label column. */
-  @Getter
-  @Setter
-  private String labelColumn;
-
-  /** The feature columns. */
-  @Getter
-  @Setter
-  private List<String> featureColumns;
-
-  /**
-   * Predict.
-   *
-   * @param args the args
-   * @return the prediction
-   */
-  public abstract PREDICTION predict(Object... args);
-}


[45/50] [abbrv] incubator-lens git commit: LENS-501 : Add cli command to print query details (amareshwari)

Posted by am...@apache.org.
LENS-501 : Add cli command to print query details (amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/cddfc844
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/cddfc844
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/cddfc844

Branch: refs/heads/current-release-line
Commit: cddfc844e8b729484f9551631e45a8f26304ff1f
Parents: 7586a83
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Tue Apr 14 03:52:24 2015 -0500
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Apr 14 03:52:24 2015 -0500

----------------------------------------------------------------------
 .../org/apache/lens/api/query/QueryStatus.java  |  2 +-
 .../lens/cli/commands/LensQueryCommands.java    | 23 +++++++++++++++++++
 .../apache/lens/cli/TestLensQueryCommands.java  | 10 +++++---
 .../java/org/apache/lens/client/LensClient.java |  8 +++++++
 .../org/apache/lens/client/LensStatement.java   |  6 ++---
 .../org/apache/lens/ml/impl/LensMLImpl.java     |  2 +-
 .../apache/lens/ml/server/MLServiceImpl.java    |  2 +-
 .../java/org/apache/lens/rdd/LensRDDClient.java |  4 ++--
 .../server/query/QueryExecutionServiceImpl.java |  8 +++----
 .../org/apache/lens/server/LensTestUtil.java    |  6 ++---
 .../org/apache/lens/server/TestServerMode.java  |  2 +-
 .../apache/lens/server/TestServerRestart.java   |  8 +++----
 .../apache/lens/server/query/TestLensDAO.java   |  2 +-
 .../server/query/TestQueryEndEmailNotifier.java |  2 +-
 .../lens/server/query/TestQueryService.java     | 24 ++++++++++----------
 .../lens/server/query/TestResultFormatting.java |  2 +-
 16 files changed, 73 insertions(+), 38 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java b/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java
index 1aa5822..f927375 100644
--- a/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java
+++ b/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java
@@ -177,7 +177,7 @@ public class QueryStatus implements Serializable {
     return str.toString();
   }
 
-  public boolean isFinished() {
+  public boolean finished() {
     return status.equals(Status.SUCCESSFUL) || status.equals(Status.FAILED) || status.equals(Status.CANCELED);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
index 016e35e..920ba9c 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
@@ -18,6 +18,7 @@
  */
 package org.apache.lens.cli.commands;
 
+import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.util.List;
 import java.util.UUID;
@@ -140,6 +141,28 @@ public class LensQueryCommands extends BaseLensCommand implements CommandMarker
   }
 
   /**
+   * Gets the query details.
+   *
+   * @param qh the qh
+   * @return the query
+   */
+  @CliCommand(value = "query details", help = "Get query details")
+  public String getDetails(
+    @CliOption(key = {"", "query"}, mandatory = true, help
+      = "<query-handle> for which details have to be fetched") String qh) {
+    LensQuery query = getClient().getQueryDetails(qh);
+    if (query == null) {
+      return "Unable to find query for " + qh;
+    }
+
+    try {
+      return formatJson(mapper.writer(pp).writeValueAsString(query));
+    } catch (IOException e) {
+      throw new IllegalArgumentException(e);
+    }
+  }
+
+  /**
    * Explain query.
    *
    * @param sql      the sql

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
index a69c53f..a48734a 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
@@ -130,7 +130,7 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
 
     String handle = qCom.executePreparedQuery(qh, true, "testPrepQuery2");
     LOG.debug("Perpared query handle is   " + handle);
-    while (!client.getQueryStatus(handle).isFinished()) {
+    while (!client.getQueryStatus(handle).finished()) {
       Thread.sleep(5000);
     }
     String status = qCom.getStatus(handle);
@@ -227,16 +227,20 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
     String[] resultSplits = result.split("\n");
     // assert on the number of queries
     Assert.assertEquals(String.valueOf(resultSplits.length - 1), resultSplits[resultSplits.length - 1].split(": ")[1]);
+    String details = qCom.getDetails(qh);
+    Assert.assertTrue(details.contains("driverQuery"), details);
 
     // Check that query name searching is 'ilike'
     String result2 = qCom.getAllQueries("", "query", "all", -1, Long.MAX_VALUE);
     Assert.assertTrue(result2.contains(qh), result2);
 
-    while (!client.getQueryStatus(qh).isFinished()) {
+    while (!client.getQueryStatus(qh).finished()) {
       Thread.sleep(5000);
     }
 
     Assert.assertTrue(qCom.getStatus(qh).contains("Status : SUCCESSFUL"));
+    details = qCom.getDetails(qh);
+    Assert.assertTrue(details.contains("driverQuery"));
 
     result = qCom.getQueryResults(qh);
     Assert.assertTrue(result.contains("1\tfirst"));
@@ -357,7 +361,7 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
     String query = "cube select id,name from test_dim";
     try {
       String qh = qCom.executeQuery(query, true, "testQuery");
-      while (!client.getQueryStatus(qh).isFinished()) {
+      while (!client.getQueryStatus(qh).finished()) {
         Thread.sleep(5000);
       }
       Assert.assertTrue(qCom.getStatus(qh).contains("Status : SUCCESSFUL"));

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-client/src/main/java/org/apache/lens/client/LensClient.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensClient.java b/lens-client/src/main/java/org/apache/lens/client/LensClient.java
index b5b4a90..449c8ec 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensClient.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensClient.java
@@ -153,10 +153,18 @@ public class LensClient {
     return new LensStatement(conn).getQuery(query).getStatus();
   }
 
+  public LensQuery getQueryDetails(QueryHandle handle) {
+    return new LensStatement(conn).getQuery(handle);
+  }
+
   public QueryStatus getQueryStatus(String q) {
     return getQueryStatus(QueryHandle.fromString(q));
   }
 
+  public LensQuery getQueryDetails(String handle) {
+    return getQueryDetails(QueryHandle.fromString(handle));
+  }
+
   public QueryPlan getQueryPlan(String q) {
     return new LensStatement(conn).explainQuery(q);
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensStatement.java b/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
index 1a050da..b19ccd1 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
@@ -185,7 +185,7 @@ public class LensStatement {
    */
   private void waitForQueryToComplete(QueryHandle handle) {
     query = getQuery(handle);
-    while (!query.getStatus().isFinished()) {
+    while (!query.getStatus().finished()) {
       query = getQuery(handle);
       try {
         Thread.sleep(connection.getLensConnectionParams().getQueryPollInterval());
@@ -437,7 +437,7 @@ public class LensStatement {
    */
   public boolean kill(LensQuery query) {
 
-    if (query.getStatus().isFinished()) {
+    if (query.getStatus().finished()) {
       return false;
     }
 
@@ -485,7 +485,7 @@ public class LensStatement {
   }
 
   public boolean isIdle() {
-    return query == null || query.getStatus().isFinished();
+    return query == null || query.getStatus().finished();
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/LensMLImpl.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/LensMLImpl.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/LensMLImpl.java
index f0c6e04..39617df 100644
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/LensMLImpl.java
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/LensMLImpl.java
@@ -657,7 +657,7 @@ public class LensMLImpl implements LensML {
         .get(LensQuery.class);
 
       QueryStatus stat = ctx.getStatus();
-      while (!stat.isFinished()) {
+      while (!stat.finished()) {
         ctx = target.path(handle.toString()).queryParam("sessionid", sessionHandle).request().get(LensQuery.class);
         stat = ctx.getStatus();
         try {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLServiceImpl.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLServiceImpl.java
index f3e8ec1..dc29aef 100644
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLServiceImpl.java
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLServiceImpl.java
@@ -298,7 +298,7 @@ public class MLServiceImpl extends CompositeService implements MLService {
       // Wait for test query to complete
       LensQuery query = queryService.getQuery(sessionHandle, testQueryHandle);
       LOG.info("Submitted query " + testQueryHandle.getHandleId());
-      while (!query.getStatus().isFinished()) {
+      while (!query.getStatus().finished()) {
         try {
           Thread.sleep(500);
         } catch (InterruptedException e) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java b/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java
index cdcdec0..dcd3751 100644
--- a/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java
+++ b/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java
@@ -182,7 +182,7 @@ public class LensRDDClient {
    */
   public boolean isReadyForRDD(QueryHandle queryHandle) throws LensException {
     QueryStatus status = getClient().getQueryStatus(queryHandle);
-    return status.isFinished();
+    return status.finished();
   }
 
   /**
@@ -204,7 +204,7 @@ public class LensRDDClient {
    */
   public LensRDDResult getRDD(QueryHandle queryHandle) throws LensException {
     QueryStatus status = getClient().getQueryStatus(queryHandle);
-    if (!status.isFinished() && !status.isResultSetAvailable()) {
+    if (!status.finished() && !status.isResultSetAvailable()) {
       throw new LensException(queryHandle.getHandleId() + " query not finished or result unavailable");
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index b44ce7e..cd1fbd8 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -664,7 +664,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
       synchronized (ctx) {
         QueryStatus before = ctx.getStatus();
         if (!ctx.getStatus().getStatus().equals(QueryStatus.Status.QUEUED) && !ctx.getDriverStatus().isFinished()
-          && !ctx.getStatus().isFinished()) {
+          && !ctx.getStatus().finished()) {
           LOG.info("Updating status for " + ctx.getQueryHandle());
           try {
             ctx.getSelectedDriver().updateStatus(ctx);
@@ -681,7 +681,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
             || !ctx.isResultAvailableInDriver())) {
             setSuccessState(ctx);
           } else {
-            if (ctx.getStatus().isFinished()) {
+            if (ctx.getStatus().finished()) {
               updateFinishedQuery(ctx, before);
             }
             fireStatusChangeEvent(ctx, ctx.getStatus(), before);
@@ -1695,7 +1695,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
     } catch (InterruptedException e) {
       LOG.info("Waiting thread interrupted");
     }
-    if (getQueryContext(sessionHandle, handle).getStatus().isFinished()) {
+    if (getQueryContext(sessionHandle, handle).getStatus().finished()) {
       result.setResult(getResultset(handle).toQueryResult());
     }
     return result;
@@ -1830,7 +1830,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
       LOG.info("CancelQuery: " + sessionHandle.toString() + " query:" + queryHandle);
       acquire(sessionHandle);
       QueryContext ctx = getQueryContext(sessionHandle, queryHandle);
-      if (ctx.getStatus().isFinished()) {
+      if (ctx.getStatus().finished()) {
         return false;
       }
       synchronized (ctx) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java b/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
index 30f1cb0..5658d47 100644
--- a/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
+++ b/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
@@ -92,7 +92,7 @@ public final class LensTestUtil {
     LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
       .get(LensQuery.class);
     QueryStatus stat = ctx.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);
@@ -142,7 +142,7 @@ public final class LensTestUtil {
     LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
       .get(LensQuery.class);
     QueryStatus stat = ctx.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);
@@ -197,7 +197,7 @@ public final class LensTestUtil {
     LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
       .get(LensQuery.class);
     QueryStatus stat = ctx.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java b/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java
index 4ca0b35..86bec3a 100644
--- a/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java
+++ b/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java
@@ -204,7 +204,7 @@ public class TestServerMode extends LensAllApplicationJerseyTest {
 
       // wait till the query finishes
       QueryStatus stat = ctx.getStatus();
-      while (!stat.isFinished()) {
+      while (!stat.finished()) {
         ctx = queryTarget.path(qhandle.toString()).queryParam("sessionid", lensSessionId).request()
           .get(LensQuery.class);
         stat = ctx.getStatus();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java b/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
index a6927d2..b634701 100644
--- a/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
+++ b/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
@@ -188,7 +188,7 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
         LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
           .get(LensQuery.class);
         QueryStatus stat = ctx.getStatus();
-        while (!stat.isFinished()) {
+        while (!stat.finished()) {
           LOG.info("Polling query " + handle + " Status:" + stat);
           ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
           stat = ctx.getStatus();
@@ -282,14 +282,14 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
       .get(LensQuery.class);
     QueryStatus stat = ctx.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       LOG.info("Polling query " + handle + " Status:" + stat);
       ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);
     }
 
-    Assert.assertTrue(stat.isFinished());
+    Assert.assertTrue(stat.finished());
     LOG.info("Previous query status: " + stat.getStatusMessage());
 
     for (int i = 0; i < 5; i++) {
@@ -308,7 +308,7 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
       // Poll for second query, this should finish successfully
       ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
-      while (!stat.isFinished()) {
+      while (!stat.finished()) {
         LOG.info("Post restart polling query " + handle + " Status:" + stat);
         ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
         stat = ctx.getStatus();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java b/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java
index f0358cb..36f9d77 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java
@@ -128,7 +128,7 @@ public class TestLensDAO extends LensJerseyTest {
       for (QueryHandle handle : persistedHandles) {
         LensQuery query = service.getQuery(session, handle);
         if (!handle.getHandleId().toString().equals(finishedHandle)) {
-          Assert.assertTrue(query.getStatus().isFinished(), query.getQueryHandle() + " STATUS="
+          Assert.assertTrue(query.getStatus().finished(), query.getQueryHandle() + " STATUS="
             + query.getStatus().getStatus());
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
index 5726602..ca1b10b 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
@@ -176,7 +176,7 @@ public class TestQueryEndEmailNotifier extends LensJerseyTest {
     LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
       .get(LensQuery.class);
     QueryStatus stat = ctx.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index 0ab68c7..ed4749f 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -224,7 +224,7 @@ public class TestQueryService extends LensJerseyTest {
     LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
       .get(LensQuery.class);
     QueryStatus stat = ctx.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
       System.out.println("%% query " + ctx.getQueryHandle() + " status:" + stat);
@@ -308,7 +308,7 @@ public class TestQueryService extends LensJerseyTest {
 
     // wait till the query finishes
     QueryStatus stat = ctx.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
       switch (stat.getStatus()) {
@@ -534,7 +534,7 @@ public class TestQueryService extends LensJerseyTest {
     Assert.assertEquals(ctx1.getQueryName().toLowerCase(), "testquery1");
     // wait till the query finishes
     QueryStatus stat = ctx1.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx1 = target().path("queryapi/queries").path(handle1.toString()).queryParam("sessionid", lensSessionId)
         .request().get(LensQuery.class);
       stat = ctx1.getStatus();
@@ -548,7 +548,7 @@ public class TestQueryService extends LensJerseyTest {
     Assert.assertEquals(ctx2.getQueryName().toLowerCase(), "testqueryname2");
     // wait till the query finishes
     stat = ctx2.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx2 = target().path("queryapi/queries").path(handle1.toString()).queryParam("sessionid", lensSessionId)
         .request().get(LensQuery.class);
       stat = ctx2.getStatus();
@@ -626,7 +626,7 @@ public class TestQueryService extends LensJerseyTest {
       .request().get(LensQuery.class);
     // wait till the query finishes
     QueryStatus stat = ctx1.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx1 = target().path("queryapi/queries").path(handle1.toString()).queryParam("sessionid", lensSessionId)
         .request().get(LensQuery.class);
       stat = ctx1.getStatus();
@@ -638,7 +638,7 @@ public class TestQueryService extends LensJerseyTest {
       .request().get(LensQuery.class);
     // wait till the query finishes
     stat = ctx2.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx2 = target().path("queryapi/queries").path(handle1.toString()).queryParam("sessionid", lensSessionId)
         .request().get(LensQuery.class);
       stat = ctx2.getStatus();
@@ -697,7 +697,7 @@ public class TestQueryService extends LensJerseyTest {
 
     // wait till the query finishes
     QueryStatus stat = ctx.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
       switch (stat.getStatus()) {
@@ -760,7 +760,7 @@ public class TestQueryService extends LensJerseyTest {
     ctx = target.path(handle3.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
     // wait till the query finishes
     stat = ctx.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx = target.path(handle3.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);
@@ -981,7 +981,7 @@ public class TestQueryService extends LensJerseyTest {
 
     // wait till the query finishes
     QueryStatus stat = ctx.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);
@@ -1036,7 +1036,7 @@ public class TestQueryService extends LensJerseyTest {
 
     // wait till the query finishes
     QueryStatus stat = ctx.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx = target.path(dropHandle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);
@@ -1066,7 +1066,7 @@ public class TestQueryService extends LensJerseyTest {
 
     // wait till the query finishes
     stat = ctx.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);
@@ -1091,7 +1091,7 @@ public class TestQueryService extends LensJerseyTest {
 
     // wait till the query finishes
     stat = ctx.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx = target.path(handle2.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/cddfc844/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java b/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
index 0a4bb74..e18a90f 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
@@ -219,7 +219,7 @@ public class TestResultFormatting extends LensJerseyTest {
       .get(LensQuery.class);
     // wait till the query finishes
     QueryStatus stat = ctx.getStatus();
-    while (!stat.isFinished()) {
+    while (!stat.finished()) {
       ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);


[41/50] [abbrv] incubator-lens git commit: LENS-257: Altering fact table should alter its storage table descriptions also

Posted by am...@apache.org.
LENS-257: Altering fact table should alter its storage table descriptions also


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/20949e03
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/20949e03
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/20949e03

Branch: refs/heads/current-release-line
Commit: 20949e0312907de3dc8a7e3b725b322e44b5ab42
Parents: 8f716a7
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Fri Apr 10 19:56:42 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Fri Apr 10 19:56:42 2015 +0530

----------------------------------------------------------------------
 .../lens/cube/metadata/CubeMetastoreClient.java |  59 +++++----
 .../org/apache/lens/cube/metadata/Storage.java  |   3 +-
 .../cube/metadata/TestCubeMetastoreClient.java  | 130 +++++++++++++++----
 .../metastore/CubeMetastoreServiceImpl.java     |   8 +-
 .../server/metastore/TestMetastoreService.java  | 122 ++++++++++++++++-
 5 files changed, 265 insertions(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/20949e03/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index 11ef7ec..fa56213 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -395,12 +395,18 @@ public class CubeMetastoreClient {
     Hive.closeCurrent();
   }
 
-  private void createStorageHiveTable(Table parent, String storage, StorageTableDesc crtTblDesc) throws HiveException {
+  private void createOrAlterStorageHiveTable(Table parent, String storage, StorageTableDesc crtTblDesc)
+    throws HiveException {
     try {
       Table tbl = getStorage(storage).getStorageTable(getClient(), parent, crtTblDesc);
-      getClient().createTable(tbl);
-      // do get to update cache
-      getTable(tbl.getTableName());
+      if (tableExists(tbl.getTableName())) {
+        // alter table
+        alterHiveTable(tbl.getTableName(), tbl);
+      } else {
+        getClient().createTable(tbl);
+        // do get to update cache
+        getTable(tbl.getTableName());
+      }
     } catch (Exception e) {
       throw new HiveException("Exception creating table", e);
     }
@@ -624,7 +630,7 @@ public class CubeMetastoreClient {
     if (storageTableDescs != null) {
       // create tables for each storage
       for (Map.Entry<String, StorageTableDesc> entry : storageTableDescs.entrySet()) {
-        createStorageHiveTable(cTable, entry.getKey(), entry.getValue());
+        createOrAlterStorageHiveTable(cTable, entry.getKey(), entry.getValue());
       }
     }
   }
@@ -641,7 +647,7 @@ public class CubeMetastoreClient {
   public void addStorage(CubeFactTable fact, String storage, Set<UpdatePeriod> updatePeriods,
     StorageTableDesc storageTableDesc) throws HiveException {
     fact.addStorage(storage, updatePeriods);
-    createStorageHiveTable(getTable(fact.getName()), storage, storageTableDesc);
+    createOrAlterStorageHiveTable(getTable(fact.getName()), storage, storageTableDesc);
     alterCubeTable(fact.getName(), getTable(fact.getName()), fact);
     updateFactCache(fact.getName());
   }
@@ -658,7 +664,7 @@ public class CubeMetastoreClient {
   public void addStorage(CubeDimensionTable dim, String storage, UpdatePeriod dumpPeriod,
     StorageTableDesc storageTableDesc) throws HiveException {
     dim.alterSnapshotDumpPeriod(storage, dumpPeriod);
-    createStorageHiveTable(getTable(dim.getName()), storage, storageTableDesc);
+    createOrAlterStorageHiveTable(getTable(dim.getName()), storage, storageTableDesc);
     alterCubeTable(dim.getName(), getTable(dim.getName()), dim);
     updateDimCache(dim.getName());
   }
@@ -971,9 +977,9 @@ public class CubeMetastoreClient {
     return partSpec;
   }
 
-  public boolean tableExists(String cubeName) throws HiveException {
+  public boolean tableExists(String tblName) throws HiveException {
     try {
-      return (getClient().getTable(cubeName.toLowerCase(), false) != null);
+      return (getClient().getTable(tblName.toLowerCase(), false) != null);
     } catch (HiveException e) {
       throw new HiveException("Could not check whether table exists", e);
     }
@@ -1754,7 +1760,6 @@ public class CubeMetastoreClient {
     }
     if (enableCaching) {
       // refresh the table in cache
-      //TODO: don't need to fetch again. can put table -> hiveTable
       refreshTable(table);
     }
   }
@@ -1967,22 +1972,24 @@ public class CubeMetastoreClient {
   }
 
   /**
-   * Alter a cubefact with new definition
+   * Alter a cubefact with new definition and alter underlying storage tables as well.
    *
    * @param factTableName
    * @param cubeFactTable
+   * @param storageTableDescs
+   *
    * @throws HiveException
    * @throws InvalidOperationException
    */
-  public void alterCubeFactTable(String factTableName, CubeFactTable cubeFactTable) throws HiveException {
+  public void alterCubeFactTable(String factTableName, CubeFactTable cubeFactTable,
+    Map<String, StorageTableDesc> storageTableDescs) throws HiveException {
     Table factTbl = getTable(factTableName);
     if (isFactTable(factTbl)) {
-      boolean colsChanged = alterCubeTable(factTableName, factTbl, cubeFactTable);
-      if (colsChanged) {
-        // Change schema of all the storage tables
-        for (String storage : cubeFactTable.getStorages()) {
-          String storageTableName = MetastoreUtil.getFactStorageTableName(factTableName, storage);
-          alterHiveTable(storageTableName, getTable(storageTableName), cubeFactTable.getColumns());
+      alterCubeTable(factTableName, factTbl, cubeFactTable);
+      if (storageTableDescs != null) {
+        // create/alter tables for each storage
+        for (Map.Entry<String, StorageTableDesc> entry : storageTableDescs.entrySet()) {
+          createOrAlterStorageHiveTable(getTable(factTableName), entry.getKey(), entry.getValue());
         }
       }
       updateFactCache(factTableName);
@@ -2004,22 +2011,22 @@ public class CubeMetastoreClient {
   }
 
   /**
-   * Alter dimension table with new dimension definition
+   * Alter dimension table with new dimension definition and underlying storage tables as well
    *
    * @param dimTableName
    * @param cubeDimensionTable
    * @throws HiveException
    * @throws InvalidOperationException
    */
-  public void alterCubeDimensionTable(String dimTableName, CubeDimensionTable cubeDimensionTable) throws HiveException {
+  public void alterCubeDimensionTable(String dimTableName, CubeDimensionTable cubeDimensionTable,
+    Map<String, StorageTableDesc> storageTableDescs) throws HiveException {
     Table dimTbl = getTable(dimTableName);
     if (isDimensionTable(dimTbl)) {
-      boolean colsChanged = alterCubeTable(dimTableName, dimTbl, cubeDimensionTable);
-      if (colsChanged) {
-        // Change schema of all the storage tables
-        for (String storage : cubeDimensionTable.getStorages()) {
-          String storageTableName = MetastoreUtil.getDimStorageTableName(dimTableName, storage);
-          alterHiveTable(storageTableName, getTable(storageTableName), cubeDimensionTable.getColumns());
+      alterCubeTable(dimTableName, dimTbl, cubeDimensionTable);
+      if (storageTableDescs != null) {
+        // create/alter tables for each storage
+        for (Map.Entry<String, StorageTableDesc> entry : storageTableDescs.entrySet()) {
+          createOrAlterStorageHiveTable(getTable(dimTableName), entry.getKey(), entry.getValue());
         }
       }
       updateDimCache(dimTableName);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/20949e03/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
index da70535..50ec4ce 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
@@ -24,6 +24,7 @@ import java.util.*;
 import java.util.Map.Entry;
 
 import org.apache.commons.lang.NotImplementedException;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -142,7 +143,7 @@ public abstract class Storage extends AbstractCubeTable implements PartitionMeta
       tbl.setNumBuckets(crtTbl.getNumBuckets());
     }
 
-    if (crtTbl.getStorageHandler() != null) {
+    if (!StringUtils.isBlank(crtTbl.getStorageHandler())) {
       tbl.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE,
         crtTbl.getStorageHandler());
     }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/20949e03/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index baa659c..7337ee9 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -78,6 +78,7 @@ public class TestCubeMetastoreClient {
   private static String c1 = "C1";
   private static String c2 = "C2";
   private static String c3 = "C3";
+  private static String c4 = "C4";
   private static Dimension zipDim, cityDim, stateDim, countryDim;
   private static Set<CubeDimAttribute> zipAttrs = new HashSet<CubeDimAttribute>();
   private static Set<CubeDimAttribute> cityAttrs = new HashSet<CubeDimAttribute>();
@@ -327,9 +328,14 @@ public class TestCubeMetastoreClient {
     client.createStorage(hdfsStorage3);
     Assert.assertEquals(3, client.getAllStorages().size());
 
+    Storage hdfsStorage4 = new HDFSStorage(c4);
+    client.createStorage(hdfsStorage4);
+    Assert.assertEquals(4, client.getAllStorages().size());
+
     Assert.assertEquals(hdfsStorage, client.getStorage(c1));
     Assert.assertEquals(hdfsStorage2, client.getStorage(c2));
     Assert.assertEquals(hdfsStorage3, client.getStorage(c3));
+    Assert.assertEquals(hdfsStorage4, client.getStorage(c4));
   }
 
   @Test(priority = 1)
@@ -782,7 +788,7 @@ public class TestCubeMetastoreClient {
     // Partition with different schema
     FieldSchema newcol = new FieldSchema("newcol", "int", "new col for part");
     cubeFact.alterColumn(newcol);
-    client.alterCubeFactTable(cubeFact.getName(), cubeFact);
+    client.alterCubeFactTable(cubeFact.getName(), cubeFact, storageTables);
     String storageTableName = MetastoreUtil.getFactStorageTableName(factName, c1);
     Assert.assertEquals(client.getAllParts(storageTableName).size(), 1);
     List<Partition> parts = client.getPartitionsByFilter(storageTableName, "dt='latest'");
@@ -879,7 +885,7 @@ public class TestCubeMetastoreClient {
     alterupdates.add(UpdatePeriod.MONTHLY);
     factTable.alterStorage(c2, alterupdates);
 
-    client.alterCubeFactTable(factName, factTable);
+    client.alterCubeFactTable(factName, factTable, storageTables);
 
     Table factHiveTable = Hive.get(conf).getTable(factName);
     CubeFactTable altered = new CubeFactTable(factHiveTable);
@@ -901,9 +907,47 @@ public class TestCubeMetastoreClient {
     }
     Assert.assertTrue(contains);
 
-    client.addStorage(altered, c3, updates, s1);
-    Assert.assertTrue(altered.getStorages().contains("C3"));
-    Assert.assertTrue(altered.getUpdatePeriods().get("C3").equals(updates));
+    // alter storage table desc
+    String c1TableName = MetastoreUtil.getFactStorageTableName(factName, c1);
+    Table c1Table = client.getTable(c1TableName);
+    Assert.assertEquals(c1Table.getInputFormatClass().getCanonicalName(),
+      TextInputFormat.class.getCanonicalName());
+    s1 = new StorageTableDesc();
+    s1.setInputFormat(SequenceFileInputFormat.class.getCanonicalName());
+    s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    s1.setPartCols(partCols);
+    s1.setTimePartCols(timePartCols);
+    s1.setFieldDelim(":");
+    storageTables.put(c1, s1);
+    storageTables.put(c4, s1);
+    factTable.addStorage(c4, updates);
+    client.alterCubeFactTable(factName, factTable, storageTables);
+    CubeFactTable altered2 = client.getCubeFact(factName);
+    Assert.assertTrue(client.tableExists(c1TableName));
+    Table alteredC1Table = client.getTable(c1TableName);
+    Assert.assertEquals(alteredC1Table.getInputFormatClass().getCanonicalName(),
+      SequenceFileInputFormat.class.getCanonicalName());
+    Assert.assertEquals(alteredC1Table.getSerdeParam(serdeConstants.FIELD_DELIM), ":");
+
+    boolean storageTableColsAltered = false;
+    for (FieldSchema column : alteredC1Table.getAllCols()) {
+      if (column.getName().equals("testfactcoladd") && column.getType().equals("int")) {
+        storageTableColsAltered = true;
+        break;
+      }
+    }
+    Assert.assertTrue(storageTableColsAltered);
+
+    Assert.assertTrue(altered2.getStorages().contains("C4"));
+    Assert.assertTrue(altered2.getUpdatePeriods().get("C4").equals(updates));
+    String c4TableName = MetastoreUtil.getFactStorageTableName(factName, c4);
+    Assert.assertTrue(client.tableExists(c4TableName));
+
+      // add storage
+    client.addStorage(altered2, c3, updates, s1);
+    CubeFactTable altered3 = client.getCubeFact(factName);
+    Assert.assertTrue(altered3.getStorages().contains("C3"));
+    Assert.assertTrue(altered3.getUpdatePeriods().get("C3").equals(updates));
     String storageTableName = MetastoreUtil.getFactStorageTableName(factName, c3);
     Assert.assertTrue(client.tableExists(storageTableName));
     client.dropStorageFromFact(factName, c2);
@@ -1888,7 +1932,7 @@ public class TestCubeMetastoreClient {
 
     // Partition with different schema
     cubeDim.alterColumn(newcol);
-    client.alterCubeDimensionTable(cubeDim.getName(), cubeDim);
+    client.alterCubeDimensionTable(cubeDim.getName(), cubeDim, storageTables);
 
     Map<String, Date> timeParts2 = new HashMap<String, Date>();
     timeParts2.put(TestCubeMetastoreClient.getDatePartitionKey(), nowPlus1);
@@ -2100,7 +2144,7 @@ public class TestCubeMetastoreClient {
 
   @Test(priority = 2)
   public void testAlterDim() throws Exception {
-    String dimName = "test_alter_dim";
+    String dimTblName = "test_alter_dim";
 
     List<FieldSchema> dimColumns = new ArrayList<FieldSchema>();
     dimColumns.add(new FieldSchema("zipcode", "int", "code"));
@@ -2120,24 +2164,24 @@ public class TestCubeMetastoreClient {
     Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
     storageTables.put(c1, s1);
 
-    client.createCubeDimensionTable(zipDim.getName(), dimName, dimColumns, 100L, dumpPeriods, null, storageTables);
+    client.createCubeDimensionTable(zipDim.getName(), dimTblName, dimColumns, 100L, dumpPeriods, null, storageTables);
 
-    CubeDimensionTable dimTable = client.getDimensionTable(dimName);
+    CubeDimensionTable dimTable = client.getDimensionTable(dimTblName);
     dimTable.alterColumn(new FieldSchema("testAddDim", "string", "test add column"));
 
     List<CubeDimensionTable> tbls = client.getAllDimensionTables(zipDim);
     boolean found = false;
     for (CubeDimensionTable dim : tbls) {
-      if (dim.getName().equalsIgnoreCase(dimName)) {
+      if (dim.getName().equalsIgnoreCase(dimTblName)) {
         found = true;
         break;
       }
     }
     Assert.assertTrue(found);
 
-    client.alterCubeDimensionTable(dimName, dimTable);
+    client.alterCubeDimensionTable(dimTblName, dimTable, storageTables);
 
-    Table alteredHiveTable = Hive.get(conf).getTable(dimName);
+    Table alteredHiveTable = Hive.get(conf).getTable(dimTblName);
     CubeDimensionTable altered = new CubeDimensionTable(alteredHiveTable);
     List<FieldSchema> columns = altered.getColumns();
     boolean contains = false;
@@ -2151,9 +2195,9 @@ public class TestCubeMetastoreClient {
 
     // Test alter column
     dimTable.alterColumn(new FieldSchema("testAddDim", "int", "change type"));
-    client.alterCubeDimensionTable(dimName, dimTable);
+    client.alterCubeDimensionTable(dimTblName, dimTable, storageTables);
 
-    altered = new CubeDimensionTable(Hive.get(conf).getTable(dimName));
+    altered = new CubeDimensionTable(Hive.get(conf).getTable(dimTblName));
     boolean typeChanged = false;
     for (FieldSchema column : altered.getColumns()) {
       if (column.getName().equals("testadddim") && column.getType().equals("int")) {
@@ -2162,21 +2206,59 @@ public class TestCubeMetastoreClient {
       }
     }
     Assert.assertTrue(typeChanged);
+
+    // alter storage table desc
+    String c1TableName = MetastoreUtil.getDimStorageTableName(dimTblName, c1);
+    Table c1Table = client.getTable(c1TableName);
+    Assert.assertEquals(c1Table.getInputFormatClass().getCanonicalName(),
+      TextInputFormat.class.getCanonicalName());
+    s1 = new StorageTableDesc();
+    s1.setInputFormat(SequenceFileInputFormat.class.getCanonicalName());
+    s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    s1.setPartCols(partCols);
+    s1.setTimePartCols(timePartCols);
+    storageTables.put(c1, s1);
+    storageTables.put(c4, s1);
+    dimTable.alterSnapshotDumpPeriod(c4, null);
+    client.alterCubeDimensionTable(dimTblName, dimTable, storageTables);
+    CubeDimensionTable altered2 = client.getDimensionTable(dimTblName);
+    Assert.assertTrue(client.tableExists(c1TableName));
+    Table alteredC1Table = client.getTable(c1TableName);
+    Assert.assertEquals(alteredC1Table.getInputFormatClass().getCanonicalName(),
+      SequenceFileInputFormat.class.getCanonicalName());
+    boolean storageTblColAltered = false;
+    for (FieldSchema column : alteredC1Table.getAllCols()) {
+      if (column.getName().equals("testadddim") && column.getType().equals("int")) {
+        storageTblColAltered = true;
+        break;
+      }
+    }
+    Assert.assertTrue(storageTblColAltered);
+    String c4TableName = MetastoreUtil.getDimStorageTableName(dimTblName, c4);
+    Assert.assertTrue(client.tableExists(c4TableName));
+    Table c4Table = client.getTable(c4TableName);
+    Assert.assertEquals(c4Table.getInputFormatClass().getCanonicalName(),
+      SequenceFileInputFormat.class.getCanonicalName());
+    Assert.assertTrue(altered2.getStorages().contains("C4"));
+    Assert.assertFalse(altered2.hasStorageSnapshots("C4"));
+
     StorageTableDesc s2 = new StorageTableDesc();
     s2.setInputFormat(TextInputFormat.class.getCanonicalName());
     s2.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
     client.addStorage(dimTable, c2, null, s2);
     client.addStorage(dimTable, c3, UpdatePeriod.DAILY, s1);
-    Assert.assertTrue(client.tableExists(MetastoreUtil.getDimStorageTableName(dimName, c2)));
-    Assert.assertTrue(client.tableExists(MetastoreUtil.getDimStorageTableName(dimName, c3)));
-    Assert.assertFalse(dimTable.hasStorageSnapshots("C2"));
-    Assert.assertTrue(dimTable.hasStorageSnapshots("C3"));
-    client.dropStorageFromDim(dimName, "C1");
-    Assert.assertFalse(client.tableExists(MetastoreUtil.getDimStorageTableName(dimName, c1)));
-    client.dropDimensionTable(dimName, true);
-    Assert.assertFalse(client.tableExists(MetastoreUtil.getDimStorageTableName(dimName, c2)));
-    Assert.assertFalse(client.tableExists(MetastoreUtil.getDimStorageTableName(dimName, c3)));
-    Assert.assertFalse(client.tableExists(dimName));
+    Assert.assertTrue(client.tableExists(MetastoreUtil.getDimStorageTableName(dimTblName, c2)));
+    Assert.assertTrue(client.tableExists(MetastoreUtil.getDimStorageTableName(dimTblName, c3)));
+    CubeDimensionTable altered3 = client.getDimensionTable(dimTblName);
+    Assert.assertFalse(altered3.hasStorageSnapshots("C2"));
+    Assert.assertTrue(altered3.hasStorageSnapshots("C3"));
+    client.dropStorageFromDim(dimTblName, "C1");
+    Assert.assertFalse(client.tableExists(MetastoreUtil.getDimStorageTableName(dimTblName, c1)));
+    client.dropDimensionTable(dimTblName, true);
+    Assert.assertFalse(client.tableExists(MetastoreUtil.getDimStorageTableName(dimTblName, c2)));
+    Assert.assertFalse(client.tableExists(MetastoreUtil.getDimStorageTableName(dimTblName, c3)));
+    Assert.assertFalse(client.tableExists(dimTblName));
+    // alter storage tables
   }
 
   @Test(priority = 2)

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/20949e03/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
index 185972e..0e6d057 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
@@ -347,8 +347,8 @@ public class CubeMetastoreServiceImpl extends LensService implements CubeMetasto
     try {
       acquire(sessionid);
       getClient(sessionid).alterCubeDimensionTable(dimensionTable.getTableName(),
-        JAXBUtils.cubeDimTableFromDimTable(dimensionTable));
-      // TODO alter storage tables
+        JAXBUtils.cubeDimTableFromDimTable(dimensionTable),
+        JAXBUtils.storageTableMapFromXStorageTables(dimensionTable.getStorageTables()));
       LOG.info("Updated dimension table " + dimensionTable.getTableName());
     } catch (HiveException exc) {
       throw new LensException(exc);
@@ -526,8 +526,8 @@ public class CubeMetastoreServiceImpl extends LensService implements CubeMetasto
   public void updateFactTable(LensSessionHandle sessionid, XFactTable fact) throws LensException {
     try {
       acquire(sessionid);
-      getClient(sessionid).alterCubeFactTable(fact.getName(), JAXBUtils.cubeFactFromFactTable(fact));
-      // TODO alter storage tables
+      getClient(sessionid).alterCubeFactTable(fact.getName(), JAXBUtils.cubeFactFromFactTable(fact),
+        JAXBUtils.storageTableMapFromXStorageTables(fact.getStorageTables()));
       LOG.info("Updated fact table " + fact.getName());
     } catch (HiveException e) {
       throw new LensException(e);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/20949e03/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
index 933714c..7795c73 100644
--- a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
@@ -51,6 +51,7 @@ import org.apache.commons.lang.time.DateUtils;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.log4j.BasicConfigurator;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
@@ -887,7 +888,6 @@ public class TestMetastoreService extends LensJerseyTest {
     XStorageTableDesc xs1 = cubeObjectFactory.createXStorageTableDesc();
     xs1.setCollectionDelimiter(",");
     xs1.setEscapeChar("\\");
-    xs1.setFieldDelimiter("");
     xs1.setFieldDelimiter("\t");
     xs1.setLineDelimiter("\n");
     xs1.setMapKeyDelimiter("\r");
@@ -1274,6 +1274,46 @@ public class TestMetastoreService extends LensJerseyTest {
       Map<String, String> updProps = JAXBUtils.mapFromXProperties(dt3.getProperties());
       assertEquals(updProps.get("foodim"), "bardim1");
 
+      // Update storage tables
+      dt3.getStorageTables().getStorageTable().get(0).getTableDesc().setFieldDelimiter(":");
+      dt3.getStorageTables().getStorageTable().get(0).getTableDesc().setInputFormat(
+        SequenceFileInputFormat.class.getCanonicalName());
+      // add one more storage table
+      createStorage("testAlterDimStorage");
+      XStorageTableElement newStorage = createStorageTblElement("testAlterDimStorage", dt3.getTableName(),
+        (String[])null);
+      newStorage.getTableDesc().setFieldDelimiter(":");
+      dt3.getStorageTables().getStorageTable().add(newStorage);
+      // Update the table
+      result = target().path("metastore/dimtables")
+        .path(table)
+        .queryParam("sessionid", lensSessionId).request(mediaType)
+        .put(Entity.xml(cubeObjectFactory.createXDimensionTable(dt3)), APIResult.class);
+      assertEquals(result.getStatus(), Status.SUCCEEDED);
+
+      // Get the updated table
+      JAXBElement<XDimensionTable> dtElement4 = target().path("metastore/dimtables").path(table)
+        .queryParam("sessionid", lensSessionId).request(mediaType)
+        .get(new GenericType<JAXBElement<XDimensionTable>>() {});
+      XDimensionTable dt4 = dtElement4.getValue();
+      assertEquals(dt4.getStorageTables().getStorageTable().size(), 2);
+
+      WebTarget nativeTarget = target().path("metastore").path("nativetables");
+      // get all native tables
+      StringList nativetables = nativeTarget.queryParam("sessionid", lensSessionId).request(mediaType).get(
+        StringList.class);
+      assertTrue(nativetables.getElements().contains("test_" + table));
+      assertTrue(nativetables.getElements().contains("testalterdimstorage_" + table));
+
+      // get native table and validate altered property
+      XNativeTable newdNativeTable = nativeTarget.path("testalterdimstorage_" + table)
+        .queryParam("sessionid", lensSessionId)
+        .request(mediaType).get(XNativeTable.class);
+      assertEquals(newdNativeTable.getStorageDescriptor().getFieldDelimiter(), ":");
+      XNativeTable alteredNativeTable = nativeTarget.path("test_" + table).queryParam("sessionid", lensSessionId)
+        .request(mediaType).get(XNativeTable.class);
+      assertEquals(alteredNativeTable.getStorageDescriptor().getInputFormat(),
+        SequenceFileInputFormat.class.getCanonicalName());
       // Drop table
       result =
         target().path("metastore/dimtables").path(table)
@@ -1526,13 +1566,77 @@ public class TestMetastoreService extends LensJerseyTest {
       assertTrue(cf.getUpdatePeriods().get("S1").contains(UpdatePeriod.HOURLY));
       assertTrue(cf.getUpdatePeriods().get("S2").contains(UpdatePeriod.DAILY));
 
+      // Finally, drop the fact table
+      result = target().path("metastore").path("facts").path(table)
+        .queryParam("cascade", "true")
+        .queryParam("sessionid", lensSessionId).request(mediaType)
+        .delete(APIResult.class);
+
+      assertEquals(result.getStatus(), Status.SUCCEEDED);
+
+      // Drop again, this time it should give a 404
+      try {
+        result = target().path("metastore").path("facts").path(table)
+          .queryParam("cascade", "true")
+          .queryParam("sessionid", lensSessionId).request(mediaType)
+          .delete(APIResult.class);
+        fail("Expected 404");
+      } catch (NotFoundException nfe) {
+        // PASS
+      }
+    } finally {
+      setCurrentDatabase(prevDb);
+      dropDatabase(DB);
+    }
+  }
+
+  @Test
+  public void testUpdateFactTable() throws Exception {
+    final String table = "testUpdateFactTable";
+    final String DB = dbPFX + "testUpdateFactTable_DB";
+    String prevDb = getCurrentDatabase();
+    createDatabase(DB);
+    setCurrentDatabase(DB);
+    createStorage("S1");
+    createStorage("S2");
+    createStorage("S3");
+    try {
+
+      XFactTable f = createFactTable(table);
+      f.getStorageTables().getStorageTable().add(createStorageTblElement("S1", table, "HOURLY"));
+      f.getStorageTables().getStorageTable().add(createStorageTblElement("S2", table, "DAILY"));
+      final FormDataMultiPart mp = new FormDataMultiPart();
+      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(),
+        lensSessionId, medType));
+      mp.bodyPart(new FormDataBodyPart(
+        FormDataContentDisposition.name("fact").fileName("fact").build(),
+        cubeObjectFactory.createXFactTable(f), medType));
+      APIResult result = target()
+        .path("metastore")
+        .path("facts")
+        .request(mediaType)
+        .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
+      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+
+      // Get the created table
+      JAXBElement<XFactTable> gotFactElement = target().path("metastore/facts").path(table)
+        .queryParam("sessionid", lensSessionId).request(mediaType)
+        .get(new GenericType<JAXBElement<XFactTable>>() {});
+      XFactTable gotFact = gotFactElement.getValue();
+      assertTrue(gotFact.getName().equalsIgnoreCase(table));
+      assertEquals(gotFact.getWeight(), 10.0);
+      CubeFactTable cf = JAXBUtils.cubeFactFromFactTable(gotFact);
+
       // Do some changes to test update
       cf.alterWeight(20.0);
       cf.alterColumn(new FieldSchema("c2", "int", "changed to int"));
 
       XFactTable update = JAXBUtils.factTableFromCubeFactTable(cf);
-      update.getStorageTables().getStorageTable().add(createStorageTblElement("S1", table, "HOURLY"));
+      XStorageTableElement s1Tbl = createStorageTblElement("S1", table, "HOURLY");
+      s1Tbl.getTableDesc().setFieldDelimiter("#");
+      update.getStorageTables().getStorageTable().add(s1Tbl);
       update.getStorageTables().getStorageTable().add(createStorageTblElement("S2", table, "MONTHLY"));
+      update.getStorageTables().getStorageTable().add(createStorageTblElement("S3", table, "DAILY"));
 
       // Update
       result = target().path("metastore").path("facts").path(table)
@@ -1549,6 +1653,7 @@ public class TestMetastoreService extends LensJerseyTest {
 
       assertEquals(ucf.weight(), 20.0);
       assertTrue(ucf.getUpdatePeriods().get("S2").contains(UpdatePeriod.MONTHLY));
+      assertTrue(ucf.getUpdatePeriods().get("S3").contains(UpdatePeriod.DAILY));
 
       boolean foundC2 = false;
       for (FieldSchema fs : cf.getColumns()) {
@@ -1559,6 +1664,19 @@ public class TestMetastoreService extends LensJerseyTest {
       }
       assertTrue(foundC2);
 
+      WebTarget nativeTarget = target().path("metastore").path("nativetables");
+      // get all native tables
+      StringList nativetables = nativeTarget.queryParam("sessionid", lensSessionId).request(mediaType).get(
+        StringList.class);
+      assertTrue(nativetables.getElements().contains("s1_" + table.toLowerCase()));
+      assertTrue(nativetables.getElements().contains("s2_" + table.toLowerCase()));
+      assertTrue(nativetables.getElements().contains("s3_" + table.toLowerCase()));
+
+      // get native table and validate altered property
+      XNativeTable alteredNativeTable = nativeTarget.path("s1_" + table).queryParam("sessionid", lensSessionId)
+        .request(mediaType).get(XNativeTable.class);
+      assertEquals(alteredNativeTable.getStorageDescriptor().getFieldDelimiter(), "#");
+
       // Finally, drop the fact table
       result = target().path("metastore").path("facts").path(table)
         .queryParam("cascade", "true")


[07/50] [abbrv] incubator-lens git commit: LENS-430: Fixes Mailing List link in docs (Amareshwari Sriramadasu via prongs)

Posted by am...@apache.org.
LENS-430: Fixes Mailing List link in docs (Amareshwari Sriramadasu via prongs)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/7f6634a8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/7f6634a8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/7f6634a8

Branch: refs/heads/current-release-line
Commit: 7f6634a82b7eeba0b7d5ea3478ef225b9f32a7a3
Parents: 44f4faf
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Thu Mar 26 19:08:30 2015 +0530
Committer: Rajat Khandelwal <pr...@apache.org>
Committed: Thu Mar 26 19:08:30 2015 +0530

----------------------------------------------------------------------
 src/site/apt/developer/contribute.apt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/7f6634a8/src/site/apt/developer/contribute.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/developer/contribute.apt b/src/site/apt/developer/contribute.apt
index bcce161..821ddab 100644
--- a/src/site/apt/developer/contribute.apt
+++ b/src/site/apt/developer/contribute.apt
@@ -493,7 +493,7 @@ Developer Documentation : How to contribute to Apache Lens?
 
 * Stay involved
 
-  Contributors should join the Lens {{{./mail-lists.html}mailing lists}}. In particular, the commit list (to see
+  Contributors should join the Lens {{{../mail-lists.html}mailing lists}}. In particular, the commit list (to see
   changes as they are made), the dev list (to join discussions of changes) and the user list (to help others). Also
   refer to {{{http://www.apache.org/dev/contributors.html} Apache contributors guide}} and
   {{{http://www.apache.org/foundation/voting.html} Apache voting process}}.


[36/50] [abbrv] incubator-lens git commit: LENS-480 : Fix NPE while running Hive MR jobs (amareshwari)

Posted by am...@apache.org.
LENS-480 : Fix NPE while running Hive MR jobs (amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/8750e721
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/8750e721
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/8750e721

Branch: refs/heads/current-release-line
Commit: 8750e72169fa9b7c01b376bf165e9abcc9206e8d
Parents: 44f4664
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Thu Apr 9 06:46:45 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Thu Apr 9 06:46:45 2015 +0530

----------------------------------------------------------------------
 .../org/apache/lens/server/LensApplication.java |  4 +--
 .../java/org/apache/lens/server/LensServer.java |  6 +---
 .../org/apache/lens/server/LensServerConf.java  | 31 +++++++++++++++++---
 .../org/apache/lens/server/LensServices.java    |  2 --
 .../lens/server/LensServletContextListener.java |  2 +-
 .../server/query/QueryExecutionServiceImpl.java |  3 +-
 .../org/apache/lens/server/LensJerseyTest.java  |  4 +--
 .../org/apache/lens/server/TestLensServer.java  |  2 +-
 .../apache/lens/server/TestServiceProvider.java |  2 +-
 .../lens/server/query/TestEventService.java     |  2 +-
 .../lens/server/query/TestQueryService.java     | 16 ++++++++++
 .../server/session/TestSessionClassLoaders.java |  2 +-
 .../lens/server/session/TestSessionExpiry.java  |  2 +-
 .../server/stats/TestLogStatisticsStore.java    |  2 +-
 .../TestStatisticsLogPartitionHandler.java      |  4 +--
 .../lens/server/user/TestUserConfigLoader.java  |  2 +-
 .../src/test/resources/hivedriver-site.xml      |  5 ++++
 .../src/test/resources/jdbcdriver-site.xml      |  5 ++++
 18 files changed, 70 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/main/java/org/apache/lens/server/LensApplication.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/LensApplication.java b/lens-server/src/main/java/org/apache/lens/server/LensApplication.java
index dd3a4b3..cb452e8 100644
--- a/lens-server/src/main/java/org/apache/lens/server/LensApplication.java
+++ b/lens-server/src/main/java/org/apache/lens/server/LensApplication.java
@@ -28,7 +28,7 @@ import org.apache.lens.server.api.LensConfConstants;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.conf.Configuration;
 
 /**
  * The Class LensApplication.
@@ -40,7 +40,7 @@ public class LensApplication extends Application {
   public static final Log LOG = LogFactory.getLog(LensApplication.class);
 
   /** The conf. */
-  public static final HiveConf CONF = LensServerConf.get();
+  public static final Configuration CONF = LensServerConf.getConf();
 
   @Override
   public Set<Class<?>> getClasses() {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/main/java/org/apache/lens/server/LensServer.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/LensServer.java b/lens-server/src/main/java/org/apache/lens/server/LensServer.java
index fe348a1..2f83fe1 100644
--- a/lens-server/src/main/java/org/apache/lens/server/LensServer.java
+++ b/lens-server/src/main/java/org/apache/lens/server/LensServer.java
@@ -62,9 +62,6 @@ public class LensServer {
   @Getter
   private final List<HttpServer> serverList = new ArrayList<HttpServer>();
 
-  /** The conf. */
-  final HiveConf conf;
-
   /**
    * This flag indicates that the lens server can run, When this is set to false, main thread bails out.
    */
@@ -87,7 +84,6 @@ public class LensServer {
    * @throws IOException Signals that an I/O exception has occurred.
    */
   private LensServer(HiveConf conf) throws IOException {
-    this.conf = conf;
     startServices(conf);
     String baseURI = conf.get(LensConfConstants.SERVER_BASE_URL, LensConfConstants.DEFAULT_SERVER_BASE_URL);
     HttpServer server = GrizzlyHttpServerFactory.createHttpServer(UriBuilder.fromUri(baseURI).build(), getApp(),
@@ -187,7 +183,7 @@ public class LensServer {
 
     printStartupMessage();
     try {
-      final LensServer thisServer = LensServer.createLensServer(LensServerConf.get());
+      final LensServer thisServer = LensServer.createLensServer(LensServerConf.getHiveConf());
 
       registerShutdownHook(thisServer);
       registerDefaultExceptionHandler();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/main/java/org/apache/lens/server/LensServerConf.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/LensServerConf.java b/lens-server/src/main/java/org/apache/lens/server/LensServerConf.java
index 38cd14f..4d333c6 100644
--- a/lens-server/src/main/java/org/apache/lens/server/LensServerConf.java
+++ b/lens-server/src/main/java/org/apache/lens/server/LensServerConf.java
@@ -18,6 +18,7 @@
  */
 package org.apache.lens.server;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 
 /**
@@ -29,22 +30,44 @@ public final class LensServerConf {
   }
 
   private static final class ConfHolder {
-    public static final HiveConf CONF = new HiveConf();
+    public static final HiveConf HIVE_CONF = new HiveConf();
+    // configuration object which does not load defaults and loads only lens*.xml files.
+    public static final Configuration CONF = new Configuration(false);
 
     static {
+      HIVE_CONF.addResource("lensserver-default.xml");
+      HIVE_CONF.addResource("lens-site.xml");
       CONF.addResource("lensserver-default.xml");
       CONF.addResource("lens-site.xml");
     }
   }
 
   /**
+   * The HiveConf object with lensserver-default.xml and lens-site.xml added.
+   *
    * @return the hive conf
    */
-  public static HiveConf get() {
+  public static HiveConf getHiveConf() {
+    return ConfHolder.HIVE_CONF;
+  }
+
+  /**
+   * The configuration object which does not load any defaults and loads only lens*.xml files. This is passed to
+   * all drivers in configure
+   *
+   * @return the conf
+   */
+  public static Configuration getConf() {
     return ConfHolder.CONF;
   }
 
-  public static HiveConf create() {
-    return new HiveConf(ConfHolder.CONF);
+  /**
+   * Creates a new configuration object from Server HiveConf, Creation should would be called usually from tests
+   * to modify some configurations.
+   *
+   * @return
+   */
+  public static HiveConf createHiveConf() {
+    return new HiveConf(ConfHolder.HIVE_CONF);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/main/java/org/apache/lens/server/LensServices.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/LensServices.java b/lens-server/src/main/java/org/apache/lens/server/LensServices.java
index 1484e47..f6cb365 100644
--- a/lens-server/src/main/java/org/apache/lens/server/LensServices.java
+++ b/lens-server/src/main/java/org/apache/lens/server/LensServices.java
@@ -143,8 +143,6 @@ public class LensServices extends CompositeService implements ServiceProvider {
   public synchronized void init(HiveConf hiveConf) {
     if (getServiceState() == STATE.NOTINITED) {
       conf = hiveConf;
-      conf.addResource("lensserver-default.xml");
-      conf.addResource("lens-site.xml");
       conf.setVar(HiveConf.ConfVars.HIVE_SESSION_IMPL_CLASSNAME, LensSessionImpl.class.getCanonicalName());
       serviceMode = conf.getEnum(SERVER_MODE,
         SERVICE_MODE.valueOf(DEFAULT_SERVER_MODE));

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/main/java/org/apache/lens/server/LensServletContextListener.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/LensServletContextListener.java b/lens-server/src/main/java/org/apache/lens/server/LensServletContextListener.java
index a801e3c..61a0dd8 100644
--- a/lens-server/src/main/java/org/apache/lens/server/LensServletContextListener.java
+++ b/lens-server/src/main/java/org/apache/lens/server/LensServletContextListener.java
@@ -63,7 +63,7 @@ public class LensServletContextListener implements ServletContextListener {
     }
 
     // start up all lens services
-    HiveConf conf = LensServerConf.get();
+    HiveConf conf = LensServerConf.getHiveConf();
     LensServices services = LensServices.get();
     services.init(conf);
     services.start();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index 3cfd03b..d531010 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -40,6 +40,7 @@ import org.apache.lens.api.query.*;
 import org.apache.lens.api.query.QueryStatus.Status;
 import org.apache.lens.driver.cube.RewriteUtil;
 import org.apache.lens.driver.hive.HiveDriver;
+import org.apache.lens.server.LensServerConf;
 import org.apache.lens.server.LensService;
 import org.apache.lens.server.LensServices;
 import org.apache.lens.server.api.LensConfConstants;
@@ -304,7 +305,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
         try {
           Class<?> clazz = Class.forName(driverClass);
           LensDriver driver = (LensDriver) clazz.newInstance();
-          driver.configure(conf);
+          driver.configure(LensServerConf.getConf());
 
           if (driver instanceof HiveDriver) {
             driver.registerDriverEventListener(driverEventListener);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java b/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
index 1f3fe68..7913845 100644
--- a/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
+++ b/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
@@ -98,7 +98,7 @@ public abstract class LensJerseyTest extends JerseyTest {
   }
 
   public HiveConf getServerConf() {
-    return LensServerConf.get();
+    return LensServerConf.getHiveConf();
   }
 
   /**
@@ -119,7 +119,7 @@ public abstract class LensJerseyTest extends JerseyTest {
     LensTestUtil.createTestDatabaseResources(new String[]{LensTestUtil.DB_WITH_JARS, LensTestUtil.DB_WITH_JARS_2},
       hiveConf);
 
-    LensServices.get().init(LensServerConf.get());
+    LensServices.get().init(LensServerConf.getHiveConf());
     LensServices.get().start();
 
     // Check if mock service is started

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/test/java/org/apache/lens/server/TestLensServer.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/TestLensServer.java b/lens-server/src/test/java/org/apache/lens/server/TestLensServer.java
index d7ac06c..eb27f61 100644
--- a/lens-server/src/test/java/org/apache/lens/server/TestLensServer.java
+++ b/lens-server/src/test/java/org/apache/lens/server/TestLensServer.java
@@ -34,7 +34,7 @@ public class TestLensServer {
    */
   @Test
   public void testUIServer() throws IOException {
-    HiveConf conf = new HiveConf(LensServerConf.get());
+    HiveConf conf = LensServerConf.createHiveConf();
     LensServer thisServer = LensServer.createLensServer(conf);
     Assert.assertEquals(thisServer.getServerList().size(), 2);
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/test/java/org/apache/lens/server/TestServiceProvider.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/TestServiceProvider.java b/lens-server/src/test/java/org/apache/lens/server/TestServiceProvider.java
index 5dfe780..af14fd6 100644
--- a/lens-server/src/test/java/org/apache/lens/server/TestServiceProvider.java
+++ b/lens-server/src/test/java/org/apache/lens/server/TestServiceProvider.java
@@ -42,7 +42,7 @@ public class TestServiceProvider extends LensAllApplicationJerseyTest {
    */
   @Test
   public void testServiceProvider() throws Exception {
-    HiveConf conf = LensServerConf.get();
+    HiveConf conf = LensServerConf.getHiveConf();
     Class<? extends ServiceProviderFactory> spfClass = conf.getClass(LensConfConstants.SERVICE_PROVIDER_FACTORY, null,
       ServiceProviderFactory.class);
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
index 3684af1..ec752ff 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
@@ -179,7 +179,7 @@ public class TestEventService {
    */
   @BeforeTest
   public void setup() throws Exception {
-    LensServices.get().init(LensServerConf.get());
+    LensServices.get().init(LensServerConf.getHiveConf());
     LensServices.get().start();
     service = LensServices.get().getService(LensEventService.NAME);
     assertNotNull(service);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index bd2aa4c..0ab68c7 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -1291,6 +1291,22 @@ public class TestQueryService extends LensJerseyTest {
     Assert.assertEquals(queryService.getSession(lensSessionId).getHiveConf().getClassLoader(),
       ctx.getDriverContext().getDriverConf(queryService.getDrivers().iterator().next()).getClassLoader());
     Assert.assertTrue(ctx.isDriverQueryExplicitlySet());
+    for (LensDriver driver : queryService.getDrivers()) {
+      Configuration dconf = ctx.getDriverConf(driver);
+      Assert.assertEquals(dconf.get("test.session.key"), "svalue");
+      // query specific conf
+      Assert.assertEquals(dconf.get("test.query.conf"), "qvalue");
+      // lenssession default should be loaded
+      Assert.assertNotNull(dconf.get("lens.query.enable.persistent.resultset"));
+      // lens site should be loaded
+      Assert.assertEquals(dconf.get("test.lens.site.key"), "gsvalue");
+      // hive default variables should not be set
+      Assert.assertNull(conf.get("hive.exec.local.scratchdir"));
+      // driver site should be loaded
+      Assert.assertEquals(dconf.get("lens.driver.test.key"), "set");
+      // core default should not be loaded
+      Assert.assertNull(dconf.get("fs.default.name"));
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java b/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java
index 6f6208a..7dd4486 100644
--- a/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java
+++ b/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java
@@ -43,7 +43,7 @@ import org.testng.annotations.Test;
 public class TestSessionClassLoaders {
   private static final Log LOG = LogFactory.getLog(TestSessionClassLoaders.class);
 
-  private final HiveConf conf = LensServerConf.create();
+  private final HiveConf conf = LensServerConf.createHiveConf();
   private HiveSessionService sessionService;
 
   private static final String DB1 = TestSessionClassLoaders.class.getSimpleName() + "_db1";

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/test/java/org/apache/lens/server/session/TestSessionExpiry.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/session/TestSessionExpiry.java b/lens-server/src/test/java/org/apache/lens/server/session/TestSessionExpiry.java
index 6a30e90..c1cb9da 100644
--- a/lens-server/src/test/java/org/apache/lens/server/session/TestSessionExpiry.java
+++ b/lens-server/src/test/java/org/apache/lens/server/session/TestSessionExpiry.java
@@ -43,7 +43,7 @@ public class TestSessionExpiry {
    * @throws Exception the exception
    */
   public void testSessionExpiry() throws Exception {
-    HiveConf conf = LensServerConf.get();
+    HiveConf conf = LensServerConf.getHiveConf();
     conf.setVar(HiveConf.ConfVars.HIVE_SESSION_IMPL_CLASSNAME, LensSessionImpl.class.getName());
     conf.setLong(LensConfConstants.SESSION_TIMEOUT_SECONDS, 1L);
     CLIService cliService = new CLIService();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/test/java/org/apache/lens/server/stats/TestLogStatisticsStore.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/stats/TestLogStatisticsStore.java b/lens-server/src/test/java/org/apache/lens/server/stats/TestLogStatisticsStore.java
index ce74f9d..c8a0ea5 100644
--- a/lens-server/src/test/java/org/apache/lens/server/stats/TestLogStatisticsStore.java
+++ b/lens-server/src/test/java/org/apache/lens/server/stats/TestLogStatisticsStore.java
@@ -79,7 +79,7 @@ public class TestLogStatisticsStore {
   @Test
   public void testLogStatisticsStore() throws Exception {
     LogStatisticsStore store = new LogStatisticsStore();
-    store.initialize(LensServerConf.get());
+    store.initialize(LensServerConf.getHiveConf());
     // Do some initialization work
     StringWriter writer = new StringWriter();
     Logger l = Logger.getLogger(MyLoggableLens.class);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/test/java/org/apache/lens/server/stats/TestStatisticsLogPartitionHandler.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/stats/TestStatisticsLogPartitionHandler.java b/lens-server/src/test/java/org/apache/lens/server/stats/TestStatisticsLogPartitionHandler.java
index 2084ced..f59131b 100644
--- a/lens-server/src/test/java/org/apache/lens/server/stats/TestStatisticsLogPartitionHandler.java
+++ b/lens-server/src/test/java/org/apache/lens/server/stats/TestStatisticsLogPartitionHandler.java
@@ -89,7 +89,7 @@ public class TestStatisticsLogPartitionHandler {
   @Test
   public void testQueryExecutionStatisticsTableCreation() throws Exception {
     QueryExecutionStatistics stats = new QueryExecutionStatistics(System.currentTimeMillis());
-    HiveConf conf = LensServerConf.get();
+    HiveConf conf = LensServerConf.getHiveConf();
     Table t = stats.getHiveTable(conf);
     Hive h = getHiveClient(conf);
     h.createTable(t);
@@ -117,7 +117,7 @@ public class TestStatisticsLogPartitionHandler {
    */
   private HiveConf configureHiveTables() {
     assertNotNull(System.getProperty("hadoop.bin.path"));
-    HiveConf conf = LensServerConf.get();
+    HiveConf conf = LensServerConf.getHiveConf();
     try {
       Hive hive = getHiveClient(conf);
       Database database = new Database();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/test/java/org/apache/lens/server/user/TestUserConfigLoader.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/user/TestUserConfigLoader.java b/lens-server/src/test/java/org/apache/lens/server/user/TestUserConfigLoader.java
index 5187f2e..dd9351a 100644
--- a/lens-server/src/test/java/org/apache/lens/server/user/TestUserConfigLoader.java
+++ b/lens-server/src/test/java/org/apache/lens/server/user/TestUserConfigLoader.java
@@ -58,7 +58,7 @@ public class TestUserConfigLoader {
    */
   @BeforeTest(alwaysRun = true)
   public void init() {
-    conf = new HiveConf(LensServerConf.get());
+    conf = new HiveConf(LensServerConf.getHiveConf());
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/test/resources/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/hivedriver-site.xml b/lens-server/src/test/resources/hivedriver-site.xml
index e4208b4..cd9c655 100644
--- a/lens-server/src/test/resources/hivedriver-site.xml
+++ b/lens-server/src/test/resources/hivedriver-site.xml
@@ -72,4 +72,9 @@
     <value>3</value>
   </property>
 
+  <property>
+    <name>lens.driver.test.key</name>
+    <value>set</value>
+  </property>
+
 </configuration>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8750e721/lens-server/src/test/resources/jdbcdriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/jdbcdriver-site.xml b/lens-server/src/test/resources/jdbcdriver-site.xml
index 752372a..1b14f54 100644
--- a/lens-server/src/test/resources/jdbcdriver-site.xml
+++ b/lens-server/src/test/resources/jdbcdriver-site.xml
@@ -47,4 +47,9 @@
     <name>lens.driver.jdbc.explain.keyword</name>
     <value>explain plan for</value>
   </property>
+  <property>
+    <name>lens.driver.test.key</name>
+    <value>set</value>
+  </property>
+
 </configuration>


[24/50] [abbrv] incubator-lens git commit: Lens-465 : Refactor ml packages. (sharad)

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/LogitRegressionClassificationModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/LogitRegressionClassificationModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/LogitRegressionClassificationModel.java
deleted file mode 100644
index 1c5152b..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/LogitRegressionClassificationModel.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark.models;
-
-import org.apache.spark.mllib.classification.LogisticRegressionModel;
-
-/**
- * The Class LogitRegressionClassificationModel.
- */
-public class LogitRegressionClassificationModel extends BaseSparkClassificationModel<LogisticRegressionModel> {
-
-  /**
-   * Instantiates a new logit regression classification model.
-   *
-   * @param modelId the model id
-   * @param model   the model
-   */
-  public LogitRegressionClassificationModel(String modelId, LogisticRegressionModel model) {
-    super(modelId, model);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/NaiveBayesClassificationModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/NaiveBayesClassificationModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/NaiveBayesClassificationModel.java
deleted file mode 100644
index 8f4552c..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/NaiveBayesClassificationModel.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark.models;
-
-import org.apache.spark.mllib.classification.NaiveBayesModel;
-
-/**
- * The Class NaiveBayesClassificationModel.
- */
-public class NaiveBayesClassificationModel extends BaseSparkClassificationModel<NaiveBayesModel> {
-
-  /**
-   * Instantiates a new naive bayes classification model.
-   *
-   * @param modelId the model id
-   * @param model   the model
-   */
-  public NaiveBayesClassificationModel(String modelId, NaiveBayesModel model) {
-    super(modelId, model);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/SVMClassificationModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/SVMClassificationModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/SVMClassificationModel.java
deleted file mode 100644
index 4e504fb..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/SVMClassificationModel.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark.models;
-
-import org.apache.spark.mllib.classification.SVMModel;
-
-/**
- * The Class SVMClassificationModel.
- */
-public class SVMClassificationModel extends BaseSparkClassificationModel<SVMModel> {
-
-  /**
-   * Instantiates a new SVM classification model.
-   *
-   * @param modelId the model id
-   * @param model   the model
-   */
-  public SVMClassificationModel(String modelId, SVMModel model) {
-    super(modelId, model);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/SparkDecisionTreeModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/SparkDecisionTreeModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/SparkDecisionTreeModel.java
deleted file mode 100644
index 657070b..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/SparkDecisionTreeModel.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark.models;
-
-import org.apache.lens.ml.spark.DoubleValueMapper;
-
-import org.apache.spark.api.java.JavaRDD;
-import org.apache.spark.mllib.classification.ClassificationModel;
-import org.apache.spark.mllib.linalg.Vector;
-import org.apache.spark.mllib.tree.model.DecisionTreeModel;
-import org.apache.spark.rdd.RDD;
-
-/**
- * This class is created because the Spark decision tree model doesn't extend ClassificationModel.
- */
-public class SparkDecisionTreeModel implements ClassificationModel {
-
-  /** The model. */
-  private final DecisionTreeModel model;
-
-  /**
-   * Instantiates a new spark decision tree model.
-   *
-   * @param model the model
-   */
-  public SparkDecisionTreeModel(DecisionTreeModel model) {
-    this.model = model;
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.spark.mllib.classification.ClassificationModel#predict(org.apache.spark.rdd.RDD)
-   */
-  @Override
-  public RDD<Object> predict(RDD<Vector> testData) {
-    return model.predict(testData);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.spark.mllib.classification.ClassificationModel#predict(org.apache.spark.mllib.linalg.Vector)
-   */
-  @Override
-  public double predict(Vector testData) {
-    return model.predict(testData);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.spark.mllib.classification.ClassificationModel#predict(org.apache.spark.api.java.JavaRDD)
-   */
-  @Override
-  public JavaRDD<Double> predict(JavaRDD<Vector> testData) {
-    return model.predict(testData.rdd()).toJavaRDD().map(new DoubleValueMapper());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/task/MLTask.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/task/MLTask.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/task/MLTask.java
deleted file mode 100644
index e4bb329..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/task/MLTask.java
+++ /dev/null
@@ -1,286 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.task;
-
-import java.util.*;
-
-import org.apache.lens.client.LensMLClient;
-import org.apache.lens.ml.LensML;
-import org.apache.lens.ml.MLTestReport;
-import org.apache.lens.ml.MLUtils;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-
-import lombok.Getter;
-import lombok.ToString;
-
-/**
- * Run a complete cycle of train and test (evaluation) for an ML algorithm
- */
-@ToString
-public class MLTask implements Runnable {
-  private static final Log LOG = LogFactory.getLog(MLTask.class);
-
-  public enum State {
-    RUNNING, SUCCESSFUL, FAILED
-  }
-
-  @Getter
-  private State taskState;
-
-  /**
-   * Name of the algo/algorithm.
-   */
-  @Getter
-  private String algorithm;
-
-  /**
-   * Name of the table containing training data.
-   */
-  @Getter
-  private String trainingTable;
-
-  /**
-   * Name of the table containing test data. Optional, if not provided trainingTable itself is
-   * used for testing
-   */
-  @Getter
-  private String testTable;
-
-  /**
-   * Training table partition spec
-   */
-  @Getter
-  private String partitionSpec;
-
-  /**
-   * Name of the column which is a label for supervised algorithms.
-   */
-  @Getter
-  private String labelColumn;
-
-  /**
-   * Names of columns which are features in the training data.
-   */
-  @Getter
-  private List<String> featureColumns;
-
-  /**
-   * Configuration for the example.
-   */
-  @Getter
-  private HiveConf configuration;
-
-  private LensML ml;
-  private String taskID;
-
-  /**
-   * ml client
-   */
-  @Getter
-  private LensMLClient mlClient;
-
-  /**
-   * Output table name
-   */
-  @Getter
-  private String outputTable;
-
-  /**
-   * Extra params passed to the training algorithm
-   */
-  @Getter
-  private Map<String, String> extraParams;
-
-  @Getter
-  private String modelID;
-
-  @Getter
-  private String reportID;
-
-  /**
-   * Use ExampleTask.Builder to create an instance
-   */
-  private MLTask() {
-    // Use builder to construct the example
-    extraParams = new HashMap<String, String>();
-    taskID = UUID.randomUUID().toString();
-  }
-
-  /**
-   * Builder to create an example task
-   */
-  public static class Builder {
-    private MLTask task;
-
-    public Builder() {
-      task = new MLTask();
-    }
-
-    public Builder trainingTable(String trainingTable) {
-      task.trainingTable = trainingTable;
-      return this;
-    }
-
-    public Builder testTable(String testTable) {
-      task.testTable = testTable;
-      return this;
-    }
-
-    public Builder algorithm(String algorithm) {
-      task.algorithm = algorithm;
-      return this;
-    }
-
-    public Builder labelColumn(String labelColumn) {
-      task.labelColumn = labelColumn;
-      return this;
-    }
-
-    public Builder client(LensMLClient client) {
-      task.mlClient = client;
-      return this;
-    }
-
-    public Builder addFeatureColumn(String featureColumn) {
-      if (task.featureColumns == null) {
-        task.featureColumns = new ArrayList<String>();
-      }
-      task.featureColumns.add(featureColumn);
-      return this;
-    }
-
-    public Builder hiveConf(HiveConf hiveConf) {
-      task.configuration = hiveConf;
-      return this;
-    }
-
-
-
-    public Builder extraParam(String param, String value) {
-      task.extraParams.put(param, value);
-      return this;
-    }
-
-    public Builder partitionSpec(String partitionSpec) {
-      task.partitionSpec = partitionSpec;
-      return this;
-    }
-
-    public Builder outputTable(String outputTable) {
-      task.outputTable = outputTable;
-      return this;
-    }
-
-    public MLTask build() {
-      MLTask builtTask = task;
-      task = null;
-      return builtTask;
-    }
-
-  }
-
-  @Override
-  public void run() {
-    taskState = State.RUNNING;
-    LOG.info("Starting " + taskID);
-    try {
-      runTask();
-      taskState = State.SUCCESSFUL;
-      LOG.info("Complete " + taskID);
-    } catch (Exception e) {
-      taskState = State.FAILED;
-      LOG.info("Error running task " + taskID, e);
-    }
-  }
-
-  /**
-   * Train an ML model, with specified algorithm and input data. Do model evaluation using the evaluation data and print
-   * evaluation result
-   *
-   * @throws Exception
-   */
-  private void runTask() throws Exception {
-    if (mlClient != null) {
-      // Connect to a remote Lens server
-      ml = mlClient;
-      LOG.info("Working in client mode. Lens session handle " + mlClient.getSessionHandle().getPublicId());
-    } else {
-      // In server mode session handle has to be passed by the user as a request parameter
-      ml = MLUtils.getMLService();
-      LOG.info("Working in Lens server");
-    }
-
-    String[] algoArgs = buildTrainingArgs();
-    LOG.info("Starting task " + taskID + " algo args: " + Arrays.toString(algoArgs));
-
-    modelID = ml.train(trainingTable, algorithm, algoArgs);
-    printModelMetadata(taskID, modelID);
-
-    LOG.info("Starting test " + taskID);
-    testTable = (testTable != null) ? testTable : trainingTable;
-    MLTestReport testReport = ml.testModel(mlClient.getSessionHandle(), testTable, algorithm, modelID, outputTable);
-    reportID = testReport.getReportID();
-    printTestReport(taskID, testReport);
-    saveTask();
-  }
-
-  // Save task metadata to DB
-  private void saveTask() {
-    LOG.info("Saving task details to DB");
-  }
-
-  private void printTestReport(String exampleID, MLTestReport testReport) {
-    StringBuilder builder = new StringBuilder("Example: ").append(exampleID);
-    builder.append("\n\t");
-    builder.append("EvaluationReport: ").append(testReport.toString());
-    System.out.println(builder.toString());
-  }
-
-  private String[] buildTrainingArgs() {
-    List<String> argList = new ArrayList<String>();
-    argList.add("label");
-    argList.add(labelColumn);
-
-    // Add all the features
-    for (String featureCol : featureColumns) {
-      argList.add("feature");
-      argList.add(featureCol);
-    }
-
-    // Add extra params
-    for (String param : extraParams.keySet()) {
-      argList.add(param);
-      argList.add(extraParams.get(param));
-    }
-
-    return argList.toArray(new String[argList.size()]);
-  }
-
-  // Get the model instance and print its metadat to stdout
-  private void printModelMetadata(String exampleID, String modelID) throws Exception {
-    StringBuilder builder = new StringBuilder("Example: ").append(exampleID);
-    builder.append("\n\t");
-    builder.append("Model: ");
-    builder.append(ml.getModel(algorithm, modelID).toString());
-    System.out.println(builder.toString());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java b/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java
index 2c2d28b..ac89eee 100644
--- a/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java
+++ b/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java
@@ -29,7 +29,7 @@ import org.apache.lens.api.LensException;
 import org.apache.lens.api.query.*;
 import org.apache.lens.client.LensClient;
 import org.apache.lens.client.LensClientResultSet;
-import org.apache.lens.ml.spark.HiveTableRDD;
+import org.apache.lens.ml.algo.spark.HiveTableRDD;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLApp.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLApp.java b/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLApp.java
deleted file mode 100644
index 75d4f03..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLApp.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.server.ml;
-
-import java.util.HashSet;
-import java.util.Set;
-
-import javax.ws.rs.ApplicationPath;
-import javax.ws.rs.core.Application;
-
-import org.glassfish.jersey.filter.LoggingFilter;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
-
-@ApplicationPath("/ml")
-public class MLApp extends Application {
-
-  private final Set<Class<?>> classes;
-
-  /**
-   * Pass additional classes when running in test mode
-   *
-   * @param additionalClasses
-   */
-  public MLApp(Class<?>... additionalClasses) {
-    classes = new HashSet<Class<?>>();
-
-    // register root resource
-    classes.add(MLServiceResource.class);
-    classes.add(MultiPartFeature.class);
-    classes.add(LoggingFilter.class);
-    for (Class<?> cls : additionalClasses) {
-      classes.add(cls);
-    }
-
-  }
-
-  /**
-   * Get classes for this resource
-   */
-  @Override
-  public Set<Class<?>> getClasses() {
-    return classes;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLService.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLService.java b/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLService.java
deleted file mode 100644
index 0dac605..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLService.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.server.ml;
-
-import org.apache.lens.ml.LensML;
-
-/**
- * The Interface MLService.
- */
-public interface MLService extends LensML {
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLServiceImpl.java b/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLServiceImpl.java
deleted file mode 100644
index 0e8e9aa..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLServiceImpl.java
+++ /dev/null
@@ -1,324 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.server.ml;
-
-import java.util.List;
-import java.util.Map;
-
-import org.apache.lens.api.LensConf;
-import org.apache.lens.api.LensException;
-import org.apache.lens.api.LensSessionHandle;
-import org.apache.lens.api.query.LensQuery;
-import org.apache.lens.api.query.QueryHandle;
-import org.apache.lens.api.query.QueryStatus;
-import org.apache.lens.ml.*;
-import org.apache.lens.server.api.LensConfConstants;
-import org.apache.lens.server.api.ServiceProvider;
-import org.apache.lens.server.api.ServiceProviderFactory;
-import org.apache.lens.server.api.query.QueryExecutionService;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hive.service.CompositeService;
-
-/**
- * The Class MLServiceImpl.
- */
-public class MLServiceImpl extends CompositeService implements MLService {
-
-  /** The Constant LOG. */
-  public static final Log LOG = LogFactory.getLog(LensMLImpl.class);
-
-  /** The ml. */
-  private LensMLImpl ml;
-
-  /** The service provider. */
-  private ServiceProvider serviceProvider;
-
-  /** The service provider factory. */
-  private ServiceProviderFactory serviceProviderFactory;
-
-  /**
-   * Instantiates a new ML service impl.
-   */
-  public MLServiceImpl() {
-    this(NAME);
-  }
-
-  /**
-   * Instantiates a new ML service impl.
-   *
-   * @param name the name
-   */
-  public MLServiceImpl(String name) {
-    super(name);
-  }
-
-  @Override
-  public List<String> getAlgorithms() {
-    return ml.getAlgorithms();
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#getAlgoForName(java.lang.String)
-   */
-  @Override
-  public MLAlgo getAlgoForName(String algorithm) throws LensException {
-    return ml.getAlgoForName(algorithm);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#train(java.lang.String, java.lang.String, java.lang.String[])
-   */
-  @Override
-  public String train(String table, String algorithm, String[] args) throws LensException {
-    return ml.train(table, algorithm, args);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#getModels(java.lang.String)
-   */
-  @Override
-  public List<String> getModels(String algorithm) throws LensException {
-    return ml.getModels(algorithm);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#getModel(java.lang.String, java.lang.String)
-   */
-  @Override
-  public MLModel getModel(String algorithm, String modelId) throws LensException {
-    return ml.getModel(algorithm, modelId);
-  }
-
-  private ServiceProvider getServiceProvider() {
-    if (serviceProvider == null) {
-      serviceProvider = serviceProviderFactory.getServiceProvider();
-    }
-    return serviceProvider;
-  }
-
-  /**
-   * Gets the service provider factory.
-   *
-   * @param conf the conf
-   * @return the service provider factory
-   */
-  private ServiceProviderFactory getServiceProviderFactory(HiveConf conf) {
-    Class<?> spfClass = conf.getClass(LensConfConstants.SERVICE_PROVIDER_FACTORY, ServiceProviderFactory.class);
-    try {
-      return (ServiceProviderFactory) spfClass.newInstance();
-    } catch (InstantiationException e) {
-      throw new RuntimeException(e);
-    } catch (IllegalAccessException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.hive.service.CompositeService#init(org.apache.hadoop.hive.conf.HiveConf)
-   */
-  @Override
-  public synchronized void init(HiveConf hiveConf) {
-    ml = new LensMLImpl(hiveConf);
-    ml.init(hiveConf);
-    super.init(hiveConf);
-    serviceProviderFactory = getServiceProviderFactory(hiveConf);
-    LOG.info("Inited ML service");
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.hive.service.CompositeService#start()
-   */
-  @Override
-  public synchronized void start() {
-    ml.start();
-    super.start();
-    LOG.info("Started ML service");
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.hive.service.CompositeService#stop()
-   */
-  @Override
-  public synchronized void stop() {
-    ml.stop();
-    super.stop();
-    LOG.info("Stopped ML service");
-  }
-
-  /**
-   * Clear models.
-   */
-  public void clearModels() {
-    ModelLoader.clearCache();
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#getModelPath(java.lang.String, java.lang.String)
-   */
-  @Override
-  public String getModelPath(String algorithm, String modelID) {
-    return ml.getModelPath(algorithm, modelID);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#testModel(org.apache.lens.api.LensSessionHandle, java.lang.String, java.lang.String,
-   * java.lang.String)
-   */
-  @Override
-  public MLTestReport testModel(LensSessionHandle sessionHandle, String table, String algorithm, String modelID,
-    String outputTable) throws LensException {
-    return ml.testModel(sessionHandle, table, algorithm, modelID, new DirectQueryRunner(sessionHandle), outputTable);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#getTestReports(java.lang.String)
-   */
-  @Override
-  public List<String> getTestReports(String algorithm) throws LensException {
-    return ml.getTestReports(algorithm);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#getTestReport(java.lang.String, java.lang.String)
-   */
-  @Override
-  public MLTestReport getTestReport(String algorithm, String reportID) throws LensException {
-    return ml.getTestReport(algorithm, reportID);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#predict(java.lang.String, java.lang.String, java.lang.Object[])
-   */
-  @Override
-  public Object predict(String algorithm, String modelID, Object[] features) throws LensException {
-    return ml.predict(algorithm, modelID, features);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#deleteModel(java.lang.String, java.lang.String)
-   */
-  @Override
-  public void deleteModel(String algorithm, String modelID) throws LensException {
-    ml.deleteModel(algorithm, modelID);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#deleteTestReport(java.lang.String, java.lang.String)
-   */
-  @Override
-  public void deleteTestReport(String algorithm, String reportID) throws LensException {
-    ml.deleteTestReport(algorithm, reportID);
-  }
-
-  /**
-   * Run the test model query directly in the current lens server process.
-   */
-  private class DirectQueryRunner extends QueryRunner {
-
-    /**
-     * Instantiates a new direct query runner.
-     *
-     * @param sessionHandle the session handle
-     */
-    public DirectQueryRunner(LensSessionHandle sessionHandle) {
-      super(sessionHandle);
-    }
-
-    /*
-     * (non-Javadoc)
-     *
-     * @see org.apache.lens.ml.TestQueryRunner#runQuery(java.lang.String)
-     */
-    @Override
-    public QueryHandle runQuery(String testQuery) throws LensException {
-      FunctionRegistry.registerTemporaryFunction("predict", HiveMLUDF.class);
-      LOG.info("Registered predict UDF");
-      // Run the query in query executions service
-      QueryExecutionService queryService = (QueryExecutionService) getServiceProvider().getService("query");
-
-      LensConf queryConf = new LensConf();
-      queryConf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, false + "");
-      queryConf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false + "");
-
-      QueryHandle testQueryHandle = queryService.executeAsync(sessionHandle, testQuery, queryConf, queryName);
-
-      // Wait for test query to complete
-      LensQuery query = queryService.getQuery(sessionHandle, testQueryHandle);
-      LOG.info("Submitted query " + testQueryHandle.getHandleId());
-      while (!query.getStatus().isFinished()) {
-        try {
-          Thread.sleep(500);
-        } catch (InterruptedException e) {
-          throw new LensException(e);
-        }
-
-        query = queryService.getQuery(sessionHandle, testQueryHandle);
-      }
-
-      if (query.getStatus().getStatus() != QueryStatus.Status.SUCCESSFUL) {
-        throw new LensException("Failed to run test query: " + testQueryHandle.getHandleId() + " reason= "
-          + query.getStatus().getErrorMessage());
-      }
-
-      return testQueryHandle;
-    }
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.LensML#getAlgoParamDescription(java.lang.String)
-   */
-  @Override
-  public Map<String, String> getAlgoParamDescription(String algorithm) {
-    return ml.getAlgoParamDescription(algorithm);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLServiceResource.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLServiceResource.java b/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLServiceResource.java
deleted file mode 100644
index c0b32d3..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/server/ml/MLServiceResource.java
+++ /dev/null
@@ -1,415 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.server.ml;
-
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import javax.ws.rs.*;
-import javax.ws.rs.core.*;
-
-import org.apache.lens.api.LensException;
-import org.apache.lens.api.LensSessionHandle;
-import org.apache.lens.api.StringList;
-import org.apache.lens.api.ml.ModelMetadata;
-import org.apache.lens.api.ml.TestReport;
-import org.apache.lens.ml.MLModel;
-import org.apache.lens.ml.MLTestReport;
-import org.apache.lens.ml.ModelLoader;
-import org.apache.lens.server.api.LensConfConstants;
-import org.apache.lens.server.api.ServiceProvider;
-import org.apache.lens.server.api.ServiceProviderFactory;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-
-import org.glassfish.jersey.media.multipart.FormDataParam;
-
-/**
- * Machine Learning service.
- */
-@Path("/ml")
-@Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
-public class MLServiceResource {
-
-  /** The Constant LOG. */
-  public static final Log LOG = LogFactory.getLog(MLServiceResource.class);
-
-  /** The ml service. */
-  MLService mlService;
-
-  /** The service provider. */
-  ServiceProvider serviceProvider;
-
-  /** The service provider factory. */
-  ServiceProviderFactory serviceProviderFactory;
-
-  private static final HiveConf HIVE_CONF;
-
-  /**
-   * Message indicating if ML service is up
-   */
-  public static final String ML_UP_MESSAGE = "ML service is up";
-
-  static {
-    HIVE_CONF = new HiveConf();
-    // Add default config so that we know the service provider implementation
-    HIVE_CONF.addResource("lensserver-default.xml");
-    HIVE_CONF.addResource("lens-site.xml");
-  }
-
-  /**
-   * Instantiates a new ML service resource.
-   */
-  public MLServiceResource() {
-    serviceProviderFactory = getServiceProviderFactory(HIVE_CONF);
-  }
-
-  private ServiceProvider getServiceProvider() {
-    if (serviceProvider == null) {
-      serviceProvider = serviceProviderFactory.getServiceProvider();
-    }
-    return serviceProvider;
-  }
-
-  /**
-   * Gets the service provider factory.
-   *
-   * @param conf the conf
-   * @return the service provider factory
-   */
-  private ServiceProviderFactory getServiceProviderFactory(HiveConf conf) {
-    Class<?> spfClass = conf.getClass(LensConfConstants.SERVICE_PROVIDER_FACTORY, ServiceProviderFactory.class);
-    try {
-      return (ServiceProviderFactory) spfClass.newInstance();
-    } catch (InstantiationException e) {
-      throw new RuntimeException(e);
-    } catch (IllegalAccessException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  private MLService getMlService() {
-    if (mlService == null) {
-      mlService = (MLService) getServiceProvider().getService(MLService.NAME);
-    }
-    return mlService;
-  }
-
-  /**
-   * Indicates if ML resource is up
-   *
-   * @return
-   */
-  @GET
-  public String mlResourceUp() {
-    return ML_UP_MESSAGE;
-  }
-
-  /**
-   * Get a list of algos available
-   *
-   * @return
-   */
-  @GET
-  @Path("algos")
-  public StringList getAlgoNames() {
-    List<String> algos = getMlService().getAlgorithms();
-    StringList result = new StringList(algos);
-    return result;
-  }
-
-  /**
-   * Gets the human readable param description of an algorithm
-   *
-   * @param algorithm the algorithm
-   * @return the param description
-   */
-  @GET
-  @Path("algos/{algorithm}")
-  public StringList getParamDescription(@PathParam("algorithm") String algorithm) {
-    Map<String, String> paramDesc = getMlService().getAlgoParamDescription(algorithm);
-    if (paramDesc == null) {
-      throw new NotFoundException("Param description not found for " + algorithm);
-    }
-
-    List<String> descriptions = new ArrayList<String>();
-    for (String key : paramDesc.keySet()) {
-      descriptions.add(key + " : " + paramDesc.get(key));
-    }
-    return new StringList(descriptions);
-  }
-
-  /**
-   * Get model ID list for a given algorithm.
-   *
-   * @param algorithm algorithm name
-   * @return the models for algo
-   * @throws LensException the lens exception
-   */
-  @GET
-  @Path("models/{algorithm}")
-  public StringList getModelsForAlgo(@PathParam("algorithm") String algorithm) throws LensException {
-    List<String> models = getMlService().getModels(algorithm);
-    if (models == null || models.isEmpty()) {
-      throw new NotFoundException("No models found for algorithm " + algorithm);
-    }
-    return new StringList(models);
-  }
-
-  /**
-   * Get metadata of the model given algorithm and model ID.
-   *
-   * @param algorithm algorithm name
-   * @param modelID   model ID
-   * @return model metadata
-   * @throws LensException the lens exception
-   */
-  @GET
-  @Path("models/{algorithm}/{modelID}")
-  public ModelMetadata getModelMetadata(@PathParam("algorithm") String algorithm, @PathParam("modelID") String modelID)
-    throws LensException {
-    MLModel model = getMlService().getModel(algorithm, modelID);
-    if (model == null) {
-      throw new NotFoundException("Model not found " + modelID + ", algo=" + algorithm);
-    }
-
-    ModelMetadata meta = new ModelMetadata(model.getId(), model.getTable(), model.getAlgoName(), StringUtils.join(
-      model.getParams(), ' '), model.getCreatedAt().toString(), getMlService().getModelPath(algorithm, modelID),
-      model.getLabelColumn(), StringUtils.join(model.getFeatureColumns(), ","));
-    return meta;
-  }
-
-  /**
-   * Delete a model given model ID and algorithm name.
-   *
-   * @param algorithm the algorithm
-   * @param modelID   the model id
-   * @return confirmation text
-   * @throws LensException the lens exception
-   */
-  @DELETE
-  @Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_PLAIN})
-  @Path("models/{algorithm}/{modelID}")
-  public String deleteModel(@PathParam("algorithm") String algorithm, @PathParam("modelID") String modelID)
-    throws LensException {
-    getMlService().deleteModel(algorithm, modelID);
-    return "DELETED model=" + modelID + " algorithm=" + algorithm;
-  }
-
-  /**
-   * Train a model given an algorithm name and algorithm parameters
-   * <p>
-   * Following parameters are mandatory and must be passed as part of the form
-   * <p/>
-   * <ol>
-   * <li>table - input Hive table to load training data from</li>
-   * <li>label - name of the labelled column</li>
-   * <li>feature - one entry per feature column. At least one feature column is required</li>
-   * </ol>
-   * <p/>
-   * </p>
-   *
-   * @param algorithm algorithm name
-   * @param form      form data
-   * @return if model is successfully trained, the model ID will be returned
-   * @throws LensException the lens exception
-   */
-  @POST
-  @Consumes(MediaType.APPLICATION_FORM_URLENCODED)
-  @Path("{algorithm}/train")
-  public String train(@PathParam("algorithm") String algorithm, MultivaluedMap<String, String> form)
-    throws LensException {
-
-    // Check if algo is valid
-    if (getMlService().getAlgoForName(algorithm) == null) {
-      throw new NotFoundException("Algo for algo: " + algorithm + " not found");
-    }
-
-    if (isBlank(form.getFirst("table"))) {
-      throw new BadRequestException("table parameter is rquired");
-    }
-
-    String table = form.getFirst("table");
-
-    if (isBlank(form.getFirst("label"))) {
-      throw new BadRequestException("label parameter is required");
-    }
-
-    // Check features
-    List<String> featureNames = form.get("feature");
-    if (featureNames.size() < 1) {
-      throw new BadRequestException("At least one feature is required");
-    }
-
-    List<String> algoArgs = new ArrayList<String>();
-    Set<Map.Entry<String, List<String>>> paramSet = form.entrySet();
-
-    for (Map.Entry<String, List<String>> e : paramSet) {
-      String p = e.getKey();
-      List<String> values = e.getValue();
-      if ("algorithm".equals(p) || "table".equals(p)) {
-        continue;
-      } else if ("feature".equals(p)) {
-        for (String feature : values) {
-          algoArgs.add("feature");
-          algoArgs.add(feature);
-        }
-      } else if ("label".equals(p)) {
-        algoArgs.add("label");
-        algoArgs.add(values.get(0));
-      } else {
-        algoArgs.add(p);
-        algoArgs.add(values.get(0));
-      }
-    }
-    LOG.info("Training table " + table + " with algo " + algorithm + " params=" + algoArgs.toString());
-    String modelId = getMlService().train(table, algorithm, algoArgs.toArray(new String[]{}));
-    LOG.info("Done training " + table + " modelid = " + modelId);
-    return modelId;
-  }
-
-  /**
-   * Clear model cache (for admin use).
-   *
-   * @return OK if the cache was cleared
-   */
-  @DELETE
-  @Path("clearModelCache")
-  @Produces(MediaType.TEXT_PLAIN)
-  public Response clearModelCache() {
-    ModelLoader.clearCache();
-    LOG.info("Cleared model cache");
-    return Response.ok("Cleared cache", MediaType.TEXT_PLAIN_TYPE).build();
-  }
-
-  /**
-   * Run a test on a model for an algorithm.
-   *
-   * @param algorithm algorithm name
-   * @param modelID   model ID
-   * @param table     Hive table to run test on
-   * @param session   Lens session ID. This session ID will be used to run the test query
-   * @return Test report ID
-   * @throws LensException the lens exception
-   */
-  @POST
-  @Path("test/{table}/{algorithm}/{modelID}")
-  @Consumes(MediaType.MULTIPART_FORM_DATA)
-  public String test(@PathParam("algorithm") String algorithm, @PathParam("modelID") String modelID,
-    @PathParam("table") String table, @FormDataParam("sessionid") LensSessionHandle session,
-    @FormDataParam("outputTable") String outputTable) throws LensException {
-    MLTestReport testReport = getMlService().testModel(session, table, algorithm, modelID, outputTable);
-    return testReport.getReportID();
-  }
-
-  /**
-   * Get list of reports for a given algorithm.
-   *
-   * @param algoritm the algoritm
-   * @return the reports for algorithm
-   * @throws LensException the lens exception
-   */
-  @GET
-  @Path("reports/{algorithm}")
-  public StringList getReportsForAlgorithm(@PathParam("algorithm") String algoritm) throws LensException {
-    List<String> reports = getMlService().getTestReports(algoritm);
-    if (reports == null || reports.isEmpty()) {
-      throw new NotFoundException("No test reports found for " + algoritm);
-    }
-    return new StringList(reports);
-  }
-
-  /**
-   * Get a single test report given the algorithm name and report id.
-   *
-   * @param algorithm the algorithm
-   * @param reportID  the report id
-   * @return the test report
-   * @throws LensException the lens exception
-   */
-  @GET
-  @Path("reports/{algorithm}/{reportID}")
-  public TestReport getTestReport(@PathParam("algorithm") String algorithm, @PathParam("reportID") String reportID)
-    throws LensException {
-    MLTestReport report = getMlService().getTestReport(algorithm, reportID);
-
-    if (report == null) {
-      throw new NotFoundException("Test report: " + reportID + " not found for algorithm " + algorithm);
-    }
-
-    TestReport result = new TestReport(report.getTestTable(), report.getOutputTable(), report.getOutputColumn(),
-      report.getLabelColumn(), StringUtils.join(report.getFeatureColumns(), ","), report.getAlgorithm(),
-      report.getModelID(), report.getReportID(), report.getLensQueryID());
-    return result;
-  }
-
-  /**
-   * DELETE a report given the algorithm name and report ID.
-   *
-   * @param algorithm the algorithm
-   * @param reportID  the report id
-   * @return the string
-   * @throws LensException the lens exception
-   */
-  @DELETE
-  @Path("reports/{algorithm}/{reportID}")
-  @Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_PLAIN})
-  public String deleteTestReport(@PathParam("algorithm") String algorithm, @PathParam("reportID") String reportID)
-    throws LensException {
-    getMlService().deleteTestReport(algorithm, reportID);
-    return "DELETED report=" + reportID + " algorithm=" + algorithm;
-  }
-
-  /**
-   * Predict.
-   *
-   * @param algorithm the algorithm
-   * @param modelID   the model id
-   * @param uriInfo   the uri info
-   * @return the string
-   * @throws LensException the lens exception
-   */
-  @GET
-  @Path("/predict/{algorithm}/{modelID}")
-  @Produces({MediaType.APPLICATION_ATOM_XML, MediaType.APPLICATION_JSON})
-  public String predict(@PathParam("algorithm") String algorithm, @PathParam("modelID") String modelID,
-    @Context UriInfo uriInfo) throws LensException {
-    // Load the model instance
-    MLModel<?> model = getMlService().getModel(algorithm, modelID);
-
-    // Get input feature names
-    MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
-    String[] features = new String[model.getFeatureColumns().size()];
-    // Assuming that feature name parameters are same
-    int i = 0;
-    for (String feature : model.getFeatureColumns()) {
-      features[i++] = params.getFirst(feature);
-    }
-
-    // TODO needs a 'prediction formatter'
-    return getMlService().predict(algorithm, modelID, features).toString();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/test/java/org/apache/lens/ml/ExampleUtils.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/test/java/org/apache/lens/ml/ExampleUtils.java b/lens-ml-lib/src/test/java/org/apache/lens/ml/ExampleUtils.java
new file mode 100644
index 0000000..9fe1ea0
--- /dev/null
+++ b/lens-ml-lib/src/test/java/org/apache/lens/ml/ExampleUtils.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.mapred.TextInputFormat;
+
+/**
+ * The Class ExampleUtils.
+ */
+public final class ExampleUtils {
+  private ExampleUtils() {
+  }
+
+  private static final Log LOG = LogFactory.getLog(ExampleUtils.class);
+
+  /**
+   * Creates the example table.
+   *
+   * @param conf           the conf
+   * @param database       the database
+   * @param tableName      the table name
+   * @param sampleDataFile the sample data file
+   * @param labelColumn    the label column
+   * @param features       the features
+   * @throws HiveException the hive exception
+   */
+  public static void createTable(HiveConf conf, String database, String tableName, String sampleDataFile,
+    String labelColumn, Map<String, String> tableParams, String... features) throws HiveException {
+
+    Path dataFilePath = new Path(sampleDataFile);
+    Path partDir = dataFilePath.getParent();
+
+    // Create table
+    List<FieldSchema> columns = new ArrayList<FieldSchema>();
+
+    // Label is optional. Not used for unsupervised models.
+    // If present, label will be the first column, followed by features
+    if (labelColumn != null) {
+      columns.add(new FieldSchema(labelColumn, "double", "Labelled Column"));
+    }
+
+    for (String feature : features) {
+      columns.add(new FieldSchema(feature, "double", "Feature " + feature));
+    }
+
+    Table tbl = Hive.get(conf).newTable(database + "." + tableName);
+    tbl.setTableType(TableType.MANAGED_TABLE);
+    tbl.getTTable().getSd().setCols(columns);
+    tbl.getTTable().getParameters().putAll(tableParams);
+    tbl.setInputFormatClass(TextInputFormat.class);
+    tbl.setSerdeParam(serdeConstants.LINE_DELIM, "\n");
+    tbl.setSerdeParam(serdeConstants.FIELD_DELIM, " ");
+
+    List<FieldSchema> partCols = new ArrayList<FieldSchema>(1);
+    partCols.add(new FieldSchema("dummy_partition_col", "string", ""));
+    tbl.setPartCols(partCols);
+
+    Hive.get(conf).createTable(tbl, false);
+    LOG.info("Created table " + tableName);
+
+    // Add partition for the data file
+    AddPartitionDesc partitionDesc = new AddPartitionDesc(database, tableName, false);
+    Map<String, String> partSpec = new HashMap<String, String>();
+    partSpec.put("dummy_partition_col", "dummy_val");
+    partitionDesc.addPartition(partSpec, partDir.toUri().toString());
+    Hive.get(conf).createPartitions(partitionDesc);
+    LOG.info(tableName + ": Added partition " + partDir.toUri().toString());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java
index f712481..8b7e3f3 100644
--- a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java
+++ b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java
@@ -33,15 +33,16 @@ import javax.ws.rs.core.UriBuilder;
 import org.apache.lens.client.LensClient;
 import org.apache.lens.client.LensClientConfig;
 import org.apache.lens.client.LensMLClient;
-import org.apache.lens.ml.spark.algos.DecisionTreeAlgo;
-import org.apache.lens.ml.spark.algos.LogisticRegressionAlgo;
-import org.apache.lens.ml.spark.algos.NaiveBayesAlgo;
-import org.apache.lens.ml.spark.algos.SVMAlgo;
-import org.apache.lens.ml.task.MLTask;
+import org.apache.lens.ml.algo.spark.dt.DecisionTreeAlgo;
+import org.apache.lens.ml.algo.spark.lr.LogisticRegressionAlgo;
+import org.apache.lens.ml.algo.spark.nb.NaiveBayesAlgo;
+import org.apache.lens.ml.algo.spark.svm.SVMAlgo;
+import org.apache.lens.ml.impl.MLTask;
+import org.apache.lens.ml.impl.MLUtils;
+import org.apache.lens.ml.server.MLApp;
+import org.apache.lens.ml.server.MLServiceResource;
 import org.apache.lens.server.LensJerseyTest;
 import org.apache.lens.server.api.LensConfConstants;
-import org.apache.lens.server.ml.MLApp;
-import org.apache.lens.server.ml.MLServiceResource;
 import org.apache.lens.server.query.QueryServiceResource;
 import org.apache.lens.server.session.SessionResource;
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
index d7f2f8f..655b55e 100644
--- a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
+++ b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
@@ -26,23 +26,24 @@ import javax.ws.rs.core.UriBuilder;
 import org.apache.lens.client.LensClient;
 import org.apache.lens.client.LensClientConfig;
 import org.apache.lens.client.LensMLClient;
-import org.apache.lens.ml.task.MLTask;
+import org.apache.lens.ml.impl.MLRunner;
+import org.apache.lens.ml.impl.MLTask;
+import org.apache.lens.ml.server.MLApp;
 import org.apache.lens.server.LensJerseyTest;
 import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.metastore.MetastoreResource;
-import org.apache.lens.server.ml.MLApp;
 import org.apache.lens.server.query.QueryServiceResource;
 import org.apache.lens.server.session.SessionResource;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 
 import org.glassfish.jersey.client.ClientConfig;
 import org.glassfish.jersey.media.multipart.MultiPartFeature;
+
 import org.testng.Assert;
 import org.testng.annotations.AfterTest;
 import org.testng.annotations.BeforeTest;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/test/resources/lens-site.xml
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/test/resources/lens-site.xml b/lens-ml-lib/src/test/resources/lens-site.xml
index 9ce4703..2e1ddab 100644
--- a/lens-ml-lib/src/test/resources/lens-site.xml
+++ b/lens-ml-lib/src/test/resources/lens-site.xml
@@ -103,7 +103,7 @@
 
   <property>
     <name>lens.server.ml.ws.resource.impl</name>
-    <value>org.apache.lens.server.ml.MLServiceResource</value>
+    <value>org.apache.lens.ml.server.MLServiceResource</value>
     <description>Implementation class for ML Service Resource</description>
   </property>
 
@@ -138,13 +138,13 @@
 
   <property>
     <name>lens.server.ml.service.impl</name>
-    <value>org.apache.lens.server.ml.MLServiceImpl</value>
+    <value>org.apache.lens.ml.server.MLServiceImpl</value>
     <description>Implementation class for ML service</description>
   </property>
 
   <property>
     <name>lens.ml.drivers</name>
-    <value>org.apache.lens.ml.spark.SparkMLDriver</value>
+    <value>org.apache.lens.ml.algo.spark.SparkMLDriver</value>
   </property>
 
   <property>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/tools/conf-pseudo-distr/server/lens-site.xml
----------------------------------------------------------------------
diff --git a/tools/conf-pseudo-distr/server/lens-site.xml b/tools/conf-pseudo-distr/server/lens-site.xml
index f11c6d6..ce3e753 100644
--- a/tools/conf-pseudo-distr/server/lens-site.xml
+++ b/tools/conf-pseudo-distr/server/lens-site.xml
@@ -59,19 +59,19 @@
 
 <property>
   <name>lens.server.ml.ws.resource.impl</name>
-  <value>org.apache.lens.server.ml.MLServiceResource</value>
+  <value>org.apache.lens.ml.server.MLServiceResource</value>
   <description>Implementation class for ML Service Resource</description>
 </property>
 
 <property>
   <name>lens.server.ml.service.impl</name>
-  <value>org.apache.lens.server.ml.MLServiceImpl</value>
+  <value>org.apache.lens.ml.server.MLServiceImpl</value>
   <description>Implementation class for ML service</description>
 </property>
 
 <property>
   <name>lens.ml.drivers</name>
-  <value>org.apache.lens.ml.spark.SparkMLDriver</value>
+  <value>org.apache.lens.ml.algo.spark.SparkMLDriver</value>
 </property>
 
 <property>


[08/50] [abbrv] incubator-lens git commit: LENS-402: In Query End Notification mail, Send status message along with of errorMessage (Amareshwari Sriramadasu via prongs)

Posted by am...@apache.org.
LENS-402: In Query End Notification mail, Send status message along with of errorMessage (Amareshwari Sriramadasu via prongs)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/36764bb7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/36764bb7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/36764bb7

Branch: refs/heads/current-release-line
Commit: 36764bb7414f395da0b725c46ab11e954e3dbab0
Parents: 7f6634a
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Thu Mar 26 19:11:15 2015 +0530
Committer: Rajat Khandelwal <pr...@apache.org>
Committed: Thu Mar 26 19:11:15 2015 +0530

----------------------------------------------------------------------
 .../org/apache/lens/driver/hive/HiveDriver.java |   3 +-
 .../org/apache/lens/driver/jdbc/JDBCDriver.java |   3 +-
 .../lens/server/query/QueryEndNotifier.java     |  10 +-
 .../lens/server/query/ResultFormatter.java      |   2 +-
 .../server/query/TestQueryEndEmailNotifier.java | 293 +++++++++++++++++++
 .../lens/server/query/TestQueryService.java     |  63 ----
 6 files changed, 307 insertions(+), 67 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/36764bb7/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index 9e3c723..1dc87b6 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -544,7 +544,8 @@ public class HiveDriver implements LensDriver {
         break;
       case ERROR:
         context.getDriverStatus().setState(DriverQueryState.FAILED);
-        context.getDriverStatus().setStatusMessage(
+        context.getDriverStatus().setStatusMessage("Query execution failed!");
+        context.getDriverStatus().setErrorMessage(
           "Query failed with errorCode:" + opStatus.getOperationException().getErrorCode() + " with errorMessage: "
             + opStatus.getOperationException().getMessage());
         break;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/36764bb7/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
index 35a9067..121b56b 100644
--- a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
@@ -922,7 +922,8 @@ public class JDBCDriver implements LensDriver {
         context.getDriverStatus().setStatusMessage(context.getQueryHandle() + " cancelled");
       } else if (ctx.getQueryResult() != null && ctx.getQueryResult().error != null) {
         context.getDriverStatus().setState(DriverQueryState.FAILED);
-        context.getDriverStatus().setStatusMessage(ctx.getQueryResult().error.getMessage());
+        context.getDriverStatus().setStatusMessage("Query execution failed!");
+        context.getDriverStatus().setErrorMessage(ctx.getQueryResult().error.getMessage());
       } else {
         context.getDriverStatus().setState(DriverQueryState.SUCCESSFUL);
         context.getDriverStatus().setStatusMessage(context.getQueryHandle() + " successful");

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/36764bb7/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java
index 70f9dba..1c6f186 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java
@@ -38,6 +38,7 @@ import org.apache.lens.server.api.metrics.MetricsService;
 import org.apache.lens.server.api.query.QueryContext;
 import org.apache.lens.server.api.query.QueryEnded;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -153,15 +154,22 @@ public class QueryEndNotifier extends AsyncEventListener<QueryEnded> {
       msgBuilder.append("/httpresultset");
       break;
     case FAILED:
-      msgBuilder.append(queryContext.getStatus().getErrorMessage());
+      msgBuilder.append(queryContext.getStatus().getStatusMessage());
+      if (!StringUtils.isBlank(queryContext.getStatus().getErrorMessage())) {
+        msgBuilder.append("\n Reason:\n");
+        msgBuilder.append(queryContext.getStatus().getErrorMessage());
+      }
       break;
     case CANCELED:
+      msgBuilder.append(queryContext.getStatus().getStatusMessage());
+      break;
     case CLOSED:
     default:
       break;
     }
     return msgBuilder.toString();
   }
+
   @Data
   public static class Email {
     private final String from;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/36764bb7/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java b/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java
index 0b215d8..b40f949 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java
@@ -134,7 +134,7 @@ public class ResultFormatter extends AsyncEventListener<QueryExecuted> {
       metricsService.incrCounter(ResultFormatter.class, "formatting-errors");
       LOG.warn("Exception while formatting result for " + queryHandle, e);
       try {
-        queryService.setFailedStatus(ctx, "Result formatting failed!", e.getLocalizedMessage());
+        queryService.setFailedStatus(ctx, "Result formatting failed!", e.getMessage());
       } catch (LensException e1) {
         LOG.error("Exception while setting failure for " + queryHandle, e1);
       }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/36764bb7/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
new file mode 100644
index 0000000..5726602
--- /dev/null
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
@@ -0,0 +1,293 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.server.query;
+
+import java.util.*;
+
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.Application;
+import javax.ws.rs.core.MediaType;
+
+import org.apache.lens.api.LensConf;
+import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.api.query.*;
+import org.apache.lens.api.query.QueryStatus.Status;
+import org.apache.lens.server.LensJerseyTest;
+import org.apache.lens.server.LensServices;
+import org.apache.lens.server.LensTestUtil;
+import org.apache.lens.server.api.LensConfConstants;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+import org.glassfish.jersey.client.ClientConfig;
+import org.glassfish.jersey.media.multipart.FormDataBodyPart;
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.glassfish.jersey.media.multipart.FormDataMultiPart;
+import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.subethamail.wiser.Wiser;
+import org.subethamail.wiser.WiserMessage;
+import org.testng.Assert;
+import org.testng.annotations.AfterTest;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+/**
+ * The query completion email notifier
+ */
+@Test(groups = "unit-test")
+public class TestQueryEndEmailNotifier extends LensJerseyTest {
+
+  /** The Constant LOG. */
+  public static final Log LOG = LogFactory.getLog(TestQueryEndEmailNotifier.class);
+
+  /** The query service. */
+  QueryExecutionServiceImpl queryService;
+
+  /** The lens session id. */
+  LensSessionHandle lensSessionId;
+
+  /** The wiser. */
+  private Wiser wiser;
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.glassfish.jersey.test.JerseyTest#setUp()
+   */
+  @BeforeTest
+  public void setUp() throws Exception {
+    super.setUp();
+    wiser = new Wiser();
+    wiser.setHostname("localhost");
+    wiser.setPort(25000);
+    queryService = (QueryExecutionServiceImpl) LensServices.get().getService("query");
+    Map<String, String> sessionconf = new HashMap<String, String>();
+    sessionconf.put("test.session.key", "svalue");
+    sessionconf.put(LensConfConstants.QUERY_MAIL_NOTIFY, "true");
+    sessionconf.put(LensConfConstants.QUERY_RESULT_EMAIL_CC, "foo1@localhost,foo2@localhost,foo3@localhost");
+    lensSessionId = queryService.openSession("foo@localhost", "bar", sessionconf); // @localhost should be removed
+    // automatically
+    createTable(TEST_TABLE);
+    loadData(TEST_TABLE, TEST_DATA_FILE);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.glassfish.jersey.test.JerseyTest#tearDown()
+   */
+  @AfterTest
+  public void tearDown() throws Exception {
+    dropTable(TEST_TABLE);
+    queryService.closeSession(lensSessionId);
+    super.tearDown();
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.glassfish.jersey.test.JerseyTest#configure()
+   */
+  @Override
+  protected Application configure() {
+    return new QueryApp();
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.glassfish.jersey.test.JerseyTest#configureClient(org.glassfish.jersey.client.ClientConfig)
+   */
+  @Override
+  protected void configureClient(ClientConfig config) {
+    config.register(MultiPartFeature.class);
+  }
+
+  /** The test table. */
+  public static final String TEST_TABLE = "EMAIL_NOTIFIER_TEST_TABLE";
+
+  /** The Constant TEST_DATA_FILE. */
+  public static final String TEST_DATA_FILE = "./testdata/testdata2.data";
+
+  /**
+   * Creates the table.
+   *
+   * @param tblName the tbl name
+   * @throws InterruptedException the interrupted exception
+   */
+  private void createTable(String tblName) throws InterruptedException {
+    LensTestUtil.createTable(tblName, target(), lensSessionId);
+  }
+
+  /**
+   * Load data.
+   *
+   * @param tblName      the tbl name
+   * @param testDataFile the test data file
+   * @throws InterruptedException the interrupted exception
+   */
+  private void loadData(String tblName, final String testDataFile) throws InterruptedException {
+    LensTestUtil.loadData(tblName, testDataFile, target(), lensSessionId);
+  }
+
+  /**
+   * Drop table.
+   *
+   * @param tblName the tbl name
+   * @throws InterruptedException the interrupted exception
+   */
+  private void dropTable(String tblName) throws InterruptedException {
+    LensTestUtil.dropTable(tblName, target(), lensSessionId);
+  }
+
+  private QueryHandle launchAndWaitForQuery(LensConf conf, String query, Status expectedStatus)
+    throws InterruptedException {
+    final WebTarget target = target().path("queryapi/queries");
+    final FormDataMultiPart mp = new FormDataMultiPart();
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
+      MediaType.APPLICATION_XML_TYPE));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), query));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
+      MediaType.APPLICATION_XML_TYPE));
+    final QueryHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+      QueryHandle.class);
+
+    Assert.assertNotNull(handle);
+    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
+      .get(LensQuery.class);
+    QueryStatus stat = ctx.getStatus();
+    while (!stat.isFinished()) {
+      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+      stat = ctx.getStatus();
+      Thread.sleep(1000);
+    }
+    Assert.assertEquals(ctx.getStatus().getStatus(), expectedStatus);
+    return handle;
+  }
+  /**
+   * Test launch fail.
+   *
+   * @throws InterruptedException the interrupted exception
+   */
+  @Test
+  public void testEmailNotification() throws InterruptedException {
+    wiser.start();
+    LensConf conf = new LensConf();
+    // launch failure
+    QueryHandle handle = launchAndWaitForQuery(conf, "select ID from non_exist_table", QueryStatus.Status.FAILED);
+    List<WiserMessage> messages = new ArrayList<WiserMessage>();
+    for (int i = 0; i < 20; i++) {
+      messages = wiser.getMessages();
+      if (messages.size() > 0) {
+        break;
+      }
+      Thread.sleep(10000);
+    }
+
+    Assert.assertEquals(messages.size(), 4);
+    Assert.assertTrue(messages.get(0).toString().contains(handle.toString()));
+    Assert.assertEquals(messages.size(), 4);
+    Assert.assertTrue(messages.get(0).toString().contains(handle.toString()));
+    Assert.assertTrue(messages.get(0).toString().contains("Launching query failed"));
+    Assert.assertTrue(messages.get(0).toString().contains("Reason"));
+
+    // rewriter failure
+    handle = launchAndWaitForQuery(conf, "cube select ID from nonexist", QueryStatus.Status.FAILED);
+    messages = new ArrayList<WiserMessage>();
+    for (int i = 0; i < 20; i++) {
+      messages = wiser.getMessages();
+      if (messages.size() > 4) {
+        break;
+      }
+      Thread.sleep(10000);
+    }
+
+    Assert.assertEquals(messages.size(), 8);
+    Assert.assertTrue(messages.get(4).toString().contains(handle.toString()));
+    Assert.assertTrue(messages.get(4).toString().contains("Launching query failed"));
+    Assert.assertTrue(messages.get(4).toString().contains("Reason"));
+
+    // formatting failure
+    conf = new LensConf();
+    conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, "true");
+    conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
+    conf.addProperty(LensConfConstants.QUERY_OUTPUT_SERDE, "NonexistentSerde.class");
+    handle = launchAndWaitForQuery(conf, "select ID, IDSTR from " + TEST_TABLE,
+      QueryStatus.Status.FAILED);
+    messages = new ArrayList<WiserMessage>();
+    for (int i = 0; i < 20; i++) {
+      messages = wiser.getMessages();
+      if (messages.size() > 8) {
+        break;
+      }
+      Thread.sleep(10000);
+    }
+
+    Assert.assertEquals(messages.size(), 12);
+    Assert.assertTrue(messages.get(8).toString().contains(handle.toString()));
+    Assert.assertTrue(messages.get(8).toString().contains("Result formatting failed!"));
+    Assert.assertTrue(messages.get(8).toString().contains("Reason"));
+
+    // execution failure
+    conf = new LensConf();
+    conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "true");
+    conf.addProperty(HiveConf.ConfVars.COMPRESSRESULT.name(), "true");
+    conf.addProperty("mapred.compress.map.output", "true");
+    conf.addProperty("mapred.map.output.compression.codec", "nonexisting");
+    handle = launchAndWaitForQuery(conf, "select count(ID) from " + TEST_TABLE, QueryStatus.Status.FAILED);
+    messages = new ArrayList<WiserMessage>();
+    for (int i = 0; i < 20; i++) {
+      messages = wiser.getMessages();
+      if (messages.size() > 12) {
+        break;
+      }
+      Thread.sleep(10000);
+    }
+
+    Assert.assertEquals(messages.size(), 16);
+    Assert.assertTrue(messages.get(12).toString().contains(handle.toString()));
+    Assert.assertTrue(messages.get(12).toString().contains("Query execution failed!"));
+    Assert.assertTrue(messages.get(12).toString().contains("Reason"));
+
+    // successful query
+    conf = new LensConf();
+    conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, "true");
+    conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "true");
+    handle = launchAndWaitForQuery(conf, "select ID, IDSTR from " + TEST_TABLE, QueryStatus.Status.SUCCESSFUL);
+    messages = new ArrayList<WiserMessage>();
+    for (int i = 0; i < 20; i++) {
+      messages = wiser.getMessages();
+      if (messages.size() > 16) {
+        break;
+      }
+      Thread.sleep(10000);
+    }
+
+    Assert.assertEquals(messages.size(), 20);
+    Assert.assertTrue(messages.get(16).toString().contains(handle.toString()));
+    Assert.assertTrue(messages.get(16).toString().contains("Query  SUCCESSFUL"));
+    Assert.assertTrue(messages.get(16).toString().contains("Result available at"));
+
+    wiser.stop();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/36764bb7/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index 14a4eb2..e3e3d4b 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -66,8 +66,6 @@ import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 import org.glassfish.jersey.media.multipart.FormDataMultiPart;
 import org.glassfish.jersey.media.multipart.MultiPartFeature;
-import org.subethamail.wiser.Wiser;
-import org.subethamail.wiser.WiserMessage;
 import org.testng.Assert;
 import org.testng.annotations.AfterTest;
 import org.testng.annotations.BeforeTest;
@@ -93,9 +91,6 @@ public class TestQueryService extends LensJerseyTest {
   /** The lens session id. */
   LensSessionHandle lensSessionId;
 
-  /** The wiser. */
-  private Wiser wiser;
-
   /*
    * (non-Javadoc)
    *
@@ -104,9 +99,6 @@ public class TestQueryService extends LensJerseyTest {
   @BeforeTest
   public void setUp() throws Exception {
     super.setUp();
-    wiser = new Wiser();
-    wiser.setHostname("localhost");
-    wiser.setPort(25000);
     queryService = (QueryExecutionServiceImpl) LensServices.get().getService("query");
     metricsSvc = (MetricsService) LensServices.get().getService(MetricsService.NAME);
     Map<String, String> sessionconf = new HashMap<String, String>();
@@ -779,61 +771,6 @@ public class TestQueryService extends LensJerseyTest {
   }
 
   /**
-   * Test notification.
-   *
-   * @throws IOException          Signals that an I/O exception has occurred.
-   * @throws InterruptedException the interrupted exception
-   */
-  @Test
-  public void testNotification() throws IOException, InterruptedException {
-    wiser.start();
-    final WebTarget target = target().path("queryapi/queries");
-    final FormDataMultiPart mp2 = new FormDataMultiPart();
-    LensConf conf = new LensConf();
-    conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
-    conf.addProperty(LensConfConstants.QUERY_MAIL_NOTIFY, "true");
-    conf.addProperty(LensConfConstants.QUERY_RESULT_EMAIL_CC, "foo1@localhost,foo2@localhost,foo3@localhost");
-    mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
-    mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID, IDSTR from "
-      + TEST_TABLE));
-    mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
-    mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
-    final QueryHandle handle = target.request().post(Entity.entity(mp2, MediaType.MULTIPART_FORM_DATA_TYPE),
-      QueryHandle.class);
-
-    Assert.assertNotNull(handle);
-
-    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
-      .get(LensQuery.class);
-    Assert.assertTrue(ctx.getStatus().getStatus().equals(Status.QUEUED)
-      || ctx.getStatus().getStatus().equals(Status.LAUNCHED) || ctx.getStatus().getStatus().equals(Status.RUNNING)
-      || ctx.getStatus().getStatus().equals(Status.SUCCESSFUL));
-
-    // wait till the query finishes
-    QueryStatus stat = ctx.getStatus();
-    while (!stat.isFinished()) {
-      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
-      stat = ctx.getStatus();
-      Thread.sleep(1000);
-    }
-    Assert.assertEquals(ctx.getStatus().getStatus(), QueryStatus.Status.SUCCESSFUL);
-    List<WiserMessage> messages = new ArrayList<WiserMessage>();
-    for (int i = 0; i < 20; i++) {
-      messages = wiser.getMessages();
-      if (messages.size() > 0) {
-        break;
-      }
-      Thread.sleep(10000);
-    }
-
-    Assert.assertEquals(messages.size(), 4);
-    Assert.assertTrue(messages.get(0).toString().contains(handle.toString()));
-    wiser.stop();
-  }
-
-  /**
    * Validate persisted result.
    *
    * @param handle        the handle


[18/50] [abbrv] incubator-lens git commit: LENS-469 : Remove locking on HiveDriver.updateStatus (Jaideep Dhok via amareshwari)

Posted by am...@apache.org.
LENS-469 : Remove locking on HiveDriver.updateStatus (Jaideep Dhok via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/72691f12
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/72691f12
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/72691f12

Branch: refs/heads/current-release-line
Commit: 72691f12f7b153be209dea09b02bbab34b1d41a4
Parents: e3e45ae
Author: Jaideep Dhok <jd...@apache.org>
Authored: Tue Mar 31 10:10:53 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Mar 31 10:10:53 2015 +0530

----------------------------------------------------------------------
 .../org/apache/lens/driver/hive/HiveDriver.java | 92 ++++++++------------
 1 file changed, 38 insertions(+), 54 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/72691f12/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index 11ab47a..3edce4d 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -26,6 +26,7 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.DelayQueue;
 import java.util.concurrent.Delayed;
 import java.util.concurrent.TimeUnit;
@@ -104,18 +105,15 @@ public class HiveDriver implements LensDriver {
   private HiveConf hiveConf;
 
   /** The hive handles. */
-  private Map<QueryHandle, OperationHandle> hiveHandles = new HashMap<QueryHandle, OperationHandle>();
+  private Map<QueryHandle, OperationHandle> hiveHandles = new ConcurrentHashMap<QueryHandle, OperationHandle>();
 
   /** The session lock. */
   private final Lock sessionLock;
 
-  /** The connection lock. */
-  private final Lock connectionLock;
-
   // connections need to be separate for each user and each thread
   /** The thread connections. */
-  private final Map<String, Map<Long, ExpirableConnection>> threadConnections =
-    new HashMap<String, Map<Long, ExpirableConnection>>();
+  private final Map<String, ExpirableConnection> threadConnections =
+    new HashMap<String, ExpirableConnection>();
 
   /** The thrift conn expiry queue. */
   private final DelayQueue<ExpirableConnection> thriftConnExpiryQueue = new DelayQueue<ExpirableConnection>();
@@ -305,7 +303,6 @@ public class HiveDriver implements LensDriver {
    */
   public HiveDriver() throws LensException {
     this.sessionLock = new ReentrantLock();
-    this.connectionLock = new ReentrantLock();
     lensToHiveSession = new HashMap<String, SessionHandle>();
     resourcesAddedForSession = new HashMap<SessionHandle, Boolean>();
     connectionExpiryThread.setDaemon(true);
@@ -532,7 +529,7 @@ public class HiveDriver implements LensDriver {
    * @see org.apache.lens.server.api.driver.LensDriver#updateStatus(org.apache.lens.server.api.query.QueryContext)
    */
   @Override
-  public synchronized void updateStatus(QueryContext context) throws LensException {
+  public void updateStatus(QueryContext context) throws LensException {
     LOG.debug("GetStatus: " + context.getQueryHandle());
     if (context.getDriverStatus().isFinished()) {
       return;
@@ -677,6 +674,9 @@ public class HiveDriver implements LensDriver {
    */
   @Override
   public void closeQuery(QueryHandle handle) throws LensException {
+    if (handle == null) {
+      return;
+    }
     LOG.info("CloseQuery: " + handle);
     OperationHandle opHandle = hiveHandles.remove(handle);
     if (opHandle != null) {
@@ -758,41 +758,32 @@ public class HiveDriver implements LensDriver {
       }
       return embeddedConnection.getClient();
     } else {
-      connectionLock.lock();
-      try {
-        String user = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_USER);
-        if (SessionState.get() != null && SessionState.get().getUserName() != null) {
-          user = SessionState.get().getUserName();
-        }
-        Map<Long, ExpirableConnection> userThreads = threadConnections.get(user.toLowerCase());
-        if (userThreads == null) {
-          userThreads = new HashMap<Long, ExpirableConnection>();
-          threadConnections.put(user.toLowerCase(), userThreads);
+      String user = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_USER);
+      if (SessionState.get() != null && SessionState.get().getUserName() != null) {
+        user = SessionState.get().getUserName();
+      }
+
+      String connectionKey = user.toLowerCase() + Thread.currentThread().getId();
+      ExpirableConnection connection = threadConnections.get(connectionKey);
+      if (connection == null || connection.isExpired()) {
+        try {
+          ThriftConnection tconn = connectionClass.newInstance();
+          tconn.init(hiveConf, user);
+          connection = new ExpirableConnection(tconn, connectionExpiryTimeout);
+          thriftConnExpiryQueue.offer(connection);
+          threadConnections.put(connectionKey, connection);
+          LOG.info("New thrift connection " + connectionClass + " for thread:" + Thread.currentThread().getId()
+            + " for user:" + user + " connection ID=" + connection.getConnId());
+        } catch (Exception e) {
+          throw new LensException(e);
         }
-        ExpirableConnection connection = userThreads.get(Thread.currentThread().getId());
-        if (connection == null || connection.isExpired()) {
-          try {
-            ThriftConnection tconn = connectionClass.newInstance();
-            tconn.init(hiveConf, user);
-            connection = new ExpirableConnection(tconn, connectionExpiryTimeout);
-            thriftConnExpiryQueue.offer(connection);
-            userThreads.put(Thread.currentThread().getId(), connection);
-            LOG.info("New thrift connection " + connectionClass + " for thread:" + Thread.currentThread().getId()
-              + " for user:" + user + " connection ID=" + connection.getConnId());
-          } catch (Exception e) {
-            throw new LensException(e);
-          }
-        } else {
-          synchronized (thriftConnExpiryQueue) {
-            thriftConnExpiryQueue.remove(connection);
-            thriftConnExpiryQueue.offer(connection);
-          }
+      } else {
+        synchronized (thriftConnExpiryQueue) {
+          thriftConnExpiryQueue.remove(connection);
+          thriftConnExpiryQueue.offer(connection);
         }
-        return connection.getConnection().getClient();
-      } finally {
-        connectionLock.unlock();
       }
-
+      return connection.getConnection().getClient();
     }
   }
 
@@ -1220,22 +1211,15 @@ public class HiveDriver implements LensDriver {
    * Close all connections.
    */
   private void closeAllConnections() {
-    connectionLock.lock();
-    try {
-      synchronized (thriftConnExpiryQueue) {
-        for (Map<Long, ExpirableConnection> connections : threadConnections.values()) {
-          for (ExpirableConnection connection : connections.values()) {
-            try {
-              connection.getConnection().close();
-            } catch (Exception ce) {
-              LOG.warn("Error closing connection to hive server");
-            }
-          }
+    synchronized (thriftConnExpiryQueue) {
+      for (ExpirableConnection connection : threadConnections.values()) {
+        try {
+          connection.getConnection().close();
+        } catch (Exception ce) {
+          LOG.warn("Error closing connection to hive server");
         }
-        threadConnections.clear();
       }
-    } finally {
-      connectionLock.unlock();
+      threadConnections.clear();
     }
   }
 


[23/50] [abbrv] incubator-lens git commit: LENS-480 : Priority is not getting set for the HIVE MR jobs

Posted by am...@apache.org.
LENS-480 : Priority is not getting set for the HIVE MR jobs


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/278e0e85
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/278e0e85
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/278e0e85

Branch: refs/heads/current-release-line
Commit: 278e0e857ccc02f7b673147e5fe0fafa28ea2245
Parents: 019eb94
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Fri Apr 3 20:54:42 2015 +0530
Committer: jdhok <jd...@apache.org>
Committed: Fri Apr 3 20:54:42 2015 +0530

----------------------------------------------------------------------
 .../org/apache/lens/driver/hive/HiveDriver.java  | 17 ++++++++++-------
 .../apache/lens/driver/hive/TestHiveDriver.java  | 19 ++++++++++++++-----
 .../lens/driver/hive/TestRemoteHiveDriver.java   | 16 ++++++++--------
 3 files changed, 32 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/278e0e85/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index a230515..cc944b7 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -457,9 +457,10 @@ public class HiveDriver implements LensDriver {
   public LensResultSet execute(QueryContext ctx) throws LensException {
     try {
       addPersistentPath(ctx);
-      ctx.getConf().set("mapred.job.name", ctx.getQueryHandle().toString());
+      Configuration qdconf = ctx.getDriverConf(this);
+      qdconf.set("mapred.job.name", ctx.getQueryHandle().toString());
       OperationHandle op = getClient().executeStatement(getSession(ctx), ctx.getSelectedDriverQuery(),
-        ctx.getSelectedDriverConf().getValByRegex(".*"));
+        qdconf.getValByRegex(".*"));
       LOG.info("The hive operation handle: " + op);
       ctx.setDriverOpHandle(op.toString());
       hiveHandles.put(ctx.getQueryHandle(), op);
@@ -494,14 +495,15 @@ public class HiveDriver implements LensDriver {
   public void executeAsync(QueryContext ctx) throws LensException {
     try {
       addPersistentPath(ctx);
-      ctx.getConf().set("mapred.job.name", ctx.getQueryHandle().toString());
+      Configuration qdconf = ctx.getDriverConf(this);
+      qdconf.set("mapred.job.name", ctx.getQueryHandle().toString());
       //Query is already explained.
       LOG.info("whetherCalculatePriority: " + whetherCalculatePriority);
       if (whetherCalculatePriority) {
         try {
           // Inside try since non-data fetching queries can also be executed by async method.
           String priority = queryPriorityDecider.decidePriority(ctx).toString();
-          ctx.getSelectedDriverConf().set("mapred.job.priority", priority);
+          qdconf.set("mapred.job.priority", priority);
           LOG.info("set priority to " + priority);
         } catch (Exception e) {
           // not failing query launch when setting priority fails
@@ -511,7 +513,7 @@ public class HiveDriver implements LensDriver {
         }
       }
       OperationHandle op = getClient().executeStatementAsync(getSession(ctx), ctx.getSelectedDriverQuery(),
-        ctx.getConf().getValByRegex(".*"));
+        qdconf.getValByRegex(".*"));
       ctx.setDriverOpHandle(op.toString());
       LOG.info("QueryHandle: " + ctx.getQueryHandle() + " HiveHandle:" + op);
       hiveHandles.put(ctx.getQueryHandle(), op);
@@ -823,7 +825,8 @@ public class HiveDriver implements LensDriver {
    */
   void addPersistentPath(QueryContext context) throws IOException {
     String hiveQuery;
-    boolean addInsertOverwrite = context.getConf().getBoolean(
+    Configuration qdconf = context.getDriverConf(this);
+    boolean addInsertOverwrite = qdconf.getBoolean(
       LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, LensConfConstants.DEFAULT_ADD_INSERT_OVEWRITE);
     if (context.isDriverPersistent() && addInsertOverwrite
       && (context.getSelectedDriverQuery().startsWith("SELECT")
@@ -835,7 +838,7 @@ public class HiveDriver implements LensDriver {
       StringBuilder builder = new StringBuilder("INSERT OVERWRITE DIRECTORY ");
       context.setHdfsoutPath(resultSetPath.makeQualified(resultSetPath.getFileSystem(context.getConf())).toString());
       builder.append('"').append(resultSetPath).append("\" ");
-      String outputDirFormat = context.getConf().get(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT);
+      String outputDirFormat = qdconf.get(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT);
       if (outputDirFormat != null) {
         builder.append(outputDirFormat);
       }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/278e0e85/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
index 36594e9..dfc178f 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
@@ -120,7 +120,7 @@ public class TestHiveDriver {
     conf.addResource("hivedriver-site.xml");
     conf.setClass(HiveDriver.HIVE_CONNECTION_CLASS, EmbeddedThriftConnection.class, ThriftConnection.class);
     conf.set("hive.lock.manager", "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager");
-    conf.setBoolean(HiveDriver.HS2_CALCULATE_PRIORITY, false);
+    conf.setBoolean(HiveDriver.HS2_CALCULATE_PRIORITY, true);
     driver = new HiveDriver();
     driver.configure(conf);
     drivers = new ArrayList<LensDriver>() {
@@ -143,6 +143,13 @@ public class TestHiveDriver {
     return context;
   }
 
+  protected QueryContext createContext(final String query, Configuration conf, LensDriver driver) throws LensException {
+    QueryContext context = new QueryContext(query, "testuser", new LensConf(), conf, Arrays.asList(driver));
+    // session id has to be set before calling setDriverQueriesAndPlans
+    context.setLensSessionIdentifier(sessionid);
+    return context;
+  }
+
   protected QueryContext createContext(PreparedQueryContext query, Configuration conf) {
     QueryContext context = new QueryContext(query, "testuser", new LensConf(), conf);
     context.setLensSessionIdentifier(sessionid);
@@ -273,6 +280,7 @@ public class TestHiveDriver {
     String select = "SELECT ID FROM test_execute";
     QueryContext context = createContext(select, conf);
     resultSet = driver.execute(context);
+    Assert.assertNotNull(context.getDriverConf(driver).get("mapred.job.name"));
     validateInMemoryResult(resultSet);
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
     context = createContext(select, conf);
@@ -387,21 +395,22 @@ public class TestHiveDriver {
     // Now run a command that would fail
     String expectFail = "SELECT ID FROM test_execute_sync";
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
-    QueryContext context = createContext(expectFail, conf);
-    context.getConf().set("hive.exec.driver.run.hooks", FailHook.class.getCanonicalName());
+    Configuration failConf = new Configuration(conf);
+    failConf.set("hive.exec.driver.run.hooks", FailHook.class.getCanonicalName());
+    QueryContext context = createContext(expectFail, failConf);
     driver.executeAsync(context);
     Assert.assertEquals(1, driver.getHiveHandleSize());
     validateExecuteAsync(context, DriverQueryState.FAILED, true, false);
     Assert.assertEquals(1, driver.getHiveHandleSize());
     driver.closeQuery(context.getQueryHandle());
     Assert.assertEquals(0, driver.getHiveHandleSize());
-
-    conf.set("hive.exec.driver.run.hooks", "");
     // Async select query
     String select = "SELECT ID FROM test_execute_sync";
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
     context = createContext(select, conf);
     driver.executeAsync(context);
+    Assert.assertNotNull(context.getDriverConf(driver).get("mapred.job.name"));
+    Assert.assertNotNull(context.getDriverConf(driver).get("mapred.job.priority"));
     Assert.assertEquals(1, driver.getHiveHandleSize());
     validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, false, false);
     driver.closeQuery(context.getQueryHandle());

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/278e0e85/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
index 5c70a89..b9fb247 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
@@ -134,7 +134,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
     conf = new HiveConf(remoteConf);
     conf.addResource("hivedriver-site.xml");
     driver = new HiveDriver();
-    conf.setBoolean(HiveDriver.HS2_CALCULATE_PRIORITY, false);
+    conf.setBoolean(HiveDriver.HS2_CALCULATE_PRIORITY, true);
     driver.configure(conf);
     drivers = new ArrayList<LensDriver>() {
       {
@@ -158,7 +158,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
     thConf.setLong(HiveDriver.HS2_CONNECTION_EXPIRY_DELAY, 10000);
     final HiveDriver thrDriver = new HiveDriver();
     thrDriver.configure(thConf);
-    QueryContext ctx = createContext("USE " + dataBase, conf);
+    QueryContext ctx = createContext("USE " + dataBase, conf, thrDriver);
     thrDriver.execute(ctx);
 
     // Launch a select query
@@ -171,7 +171,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
     for (int q = 0; q < QUERIES; q++) {
       final QueryContext qctx;
       try {
-        qctx = createContext("SELECT * FROM test_multithreads", conf);
+        qctx = createContext("SELECT * FROM test_multithreads", conf, thrDriver);
         thrDriver.executeAsync(qctx);
       } catch (LensException e) {
         errCount.incrementAndGet();
@@ -247,7 +247,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
 
     driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
     driverConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
-    QueryContext ctx = createContext("USE " + dataBase, driverConf);
+    QueryContext ctx = createContext("USE " + dataBase, driverConf, oldDriver);
     oldDriver.execute(ctx);
     Assert.assertEquals(0, oldDriver.getHiveHandleSize());
 
@@ -255,20 +255,20 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
 
     // Create some ops with a driver
     String createTable = "CREATE TABLE IF NOT EXISTS " + tableName + "(ID STRING)";
-    ctx = createContext(createTable, driverConf);
+    ctx = createContext(createTable, driverConf, oldDriver);
     oldDriver.execute(ctx);
 
     // Load some data into the table
     String dataLoad = "LOAD DATA LOCAL INPATH '" + TEST_DATA_FILE + "' OVERWRITE INTO TABLE " + tableName;
-    ctx = createContext(dataLoad, driverConf);
+    ctx = createContext(dataLoad, driverConf, oldDriver);
     oldDriver.execute(ctx);
 
     driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
     driverConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
     // Fire two queries
-    QueryContext ctx1 = createContext("SELECT * FROM " + tableName, driverConf);
+    QueryContext ctx1 = createContext("SELECT * FROM " + tableName, driverConf, oldDriver);
     oldDriver.executeAsync(ctx1);
-    QueryContext ctx2 = createContext("SELECT ID FROM " + tableName, driverConf);
+    QueryContext ctx2 = createContext("SELECT ID FROM " + tableName, driverConf, oldDriver);
     oldDriver.executeAsync(ctx2);
     Assert.assertEquals(2, oldDriver.getHiveHandleSize());
 


[02/50] [abbrv] incubator-lens git commit: LENS-435 : TestQueryRunner renamed to QueryRunner. (sharad)

Posted by am...@apache.org.
LENS-435 : TestQueryRunner renamed to QueryRunner. (sharad)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/b01f726c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/b01f726c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/b01f726c

Branch: refs/heads/current-release-line
Commit: b01f726cfbaee779be5a0b729a197a6458830a4f
Parents: 8d18591
Author: Sharad Agarwal <sh...@flipkarts-MacBook-Pro.local>
Authored: Tue Mar 24 14:37:27 2015 +0530
Committer: Sharad Agarwal <sh...@flipkarts-MacBook-Pro.local>
Committed: Tue Mar 24 14:37:27 2015 +0530

----------------------------------------------------------------------
 .../java/org/apache/lens/ml/QueryRunner.java    | 56 ++++++++++++++++++++
 .../org/apache/lens/ml/TestQueryRunner.java     | 56 --------------------
 2 files changed, 56 insertions(+), 56 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b01f726c/lens-ml-lib/src/main/java/org/apache/lens/ml/QueryRunner.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/QueryRunner.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/QueryRunner.java
new file mode 100644
index 0000000..56f9a88
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/QueryRunner.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml;
+
+import org.apache.lens.api.LensException;
+import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.api.query.QueryHandle;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * Run a query against a Lens server.
+ */
+public abstract class QueryRunner {
+
+  /** The session handle. */
+  protected final LensSessionHandle sessionHandle;
+
+  @Getter @Setter
+  protected String queryName;
+
+  /**
+   * Instantiates a new query runner.
+   *
+   * @param sessionHandle the session handle
+   */
+  public QueryRunner(LensSessionHandle sessionHandle) {
+    this.sessionHandle = sessionHandle;
+  }
+
+  /**
+   * Run query.
+   *
+   * @param query the query
+   * @return the query handle
+   * @throws LensException the lens exception
+   */
+  public abstract QueryHandle runQuery(String query) throws LensException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b01f726c/lens-ml-lib/src/main/java/org/apache/lens/ml/TestQueryRunner.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/TestQueryRunner.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/TestQueryRunner.java
deleted file mode 100644
index 56f9a88..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/TestQueryRunner.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import org.apache.lens.api.LensException;
-import org.apache.lens.api.LensSessionHandle;
-import org.apache.lens.api.query.QueryHandle;
-
-import lombok.Getter;
-import lombok.Setter;
-
-/**
- * Run a query against a Lens server.
- */
-public abstract class QueryRunner {
-
-  /** The session handle. */
-  protected final LensSessionHandle sessionHandle;
-
-  @Getter @Setter
-  protected String queryName;
-
-  /**
-   * Instantiates a new query runner.
-   *
-   * @param sessionHandle the session handle
-   */
-  public QueryRunner(LensSessionHandle sessionHandle) {
-    this.sessionHandle = sessionHandle;
-  }
-
-  /**
-   * Run query.
-   *
-   * @param query the query
-   * @return the query handle
-   * @throws LensException the lens exception
-   */
-  public abstract QueryHandle runQuery(String query) throws LensException;
-}


[20/50] [abbrv] incubator-lens git commit: LENS-470 : Add commons-collection dependency (Jaideep Dhok via amareshwari)

Posted by am...@apache.org.
LENS-470 : Add commons-collection dependency (Jaideep Dhok via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/1820fe08
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/1820fe08
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/1820fe08

Branch: refs/heads/current-release-line
Commit: 1820fe0882084520f1f855e2ae4b58bdd4d41101
Parents: d180c2c
Author: Jaideep Dhok <jd...@apache.org>
Authored: Tue Mar 31 18:18:10 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Mar 31 18:18:10 2015 +0530

----------------------------------------------------------------------
 lens-server/pom.xml | 4 ++++
 pom.xml             | 6 ++++++
 2 files changed, 10 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/1820fe08/lens-server/pom.xml
----------------------------------------------------------------------
diff --git a/lens-server/pom.xml b/lens-server/pom.xml
index 748be1d..efac8b5 100644
--- a/lens-server/pom.xml
+++ b/lens-server/pom.xml
@@ -190,6 +190,10 @@
       <artifactId>subethasmtp</artifactId>
     </dependency>
     <dependency>
+      <groupId>commons-collections</groupId>
+      <artifactId>commons-collections</artifactId>
+    </dependency>
+    <dependency>
       <groupId>commons-dbcp</groupId>
       <artifactId>commons-dbcp</artifactId>
     </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/1820fe08/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index a5dd80f..f306ee0 100644
--- a/pom.xml
+++ b/pom.xml
@@ -53,6 +53,7 @@
     <slf4j.version>1.7.6</slf4j.version>
     <log4j.version>1.2.16</log4j.version>
     <commons.lang.version>2.4</commons.lang.version>
+    <commons.collections.version>3.2.1</commons.collections.version>
     <joda.time.version>2.0</joda.time.version>
     <guava.version>13.0.1</guava.version>
     <lombok.version>1.12.4</lombok.version>
@@ -679,6 +680,11 @@
         <version>${commons.lang.version}</version>
       </dependency>
       <dependency>
+        <groupId>commons-collections</groupId>
+        <artifactId>commons-collections</artifactId>
+        <version>${commons.collections.version}</version>
+      </dependency>
+      <dependency>
         <groupId>joda-time</groupId>
         <artifactId>joda-time</artifactId>
         <version>${joda.time.version}</version>


[30/50] [abbrv] incubator-lens git commit: LENS-412 : Fix error messaging for update periods available (Rajat Khandelwal via amareshwari

Posted by am...@apache.org.
LENS-412 : Fix error messaging for update periods available (Rajat Khandelwal via amareshwari


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/5e492d93
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/5e492d93
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/5e492d93

Branch: refs/heads/current-release-line
Commit: 5e492d93f013ad533b0865ed74155ed7c5940224
Parents: 0f5ea4c
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Tue Apr 7 11:01:54 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Apr 7 11:01:54 2015 +0530

----------------------------------------------------------------------
 .../lens/cube/metadata/CubeFactTable.java       |  17 +--
 .../org/apache/lens/cube/parse/DateUtil.java    | 153 ++++++++++---------
 .../lens/cube/parse/StorageTableResolver.java   |   7 +-
 .../org/apache/lens/cube/parse/TimeRange.java   |   6 +
 .../lens/cube/parse/TestCubeRewriter.java       |   2 +-
 .../apache/lens/cube/parse/TestDateUtil.java    |  91 +++++++----
 6 files changed, 152 insertions(+), 124 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/5e492d93/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
index cb84f66..9daccec 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
@@ -183,22 +183,7 @@ public final class CubeFactTable extends AbstractCubeTable {
     for (UpdatePeriod i : updatePeriods) {
       if (UpdatePeriod.YEARLY == i || UpdatePeriod.QUARTERLY == i || UpdatePeriod.MONTHLY == i
         || UpdatePeriod.WEEKLY == i) {
-        int intervals = 0;
-        switch (i) {
-        case YEARLY:
-          intervals = DateUtil.getYearsBetween(from, to);
-          break;
-        case QUARTERLY:
-          intervals = DateUtil.getQuartersBetween(from, to);
-          break;
-        case MONTHLY:
-          intervals = DateUtil.getMonthsBetween(from, to);
-          break;
-        case WEEKLY:
-          intervals = DateUtil.getWeeksBetween(from, to);
-          break;
-        }
-
+        int intervals = DateUtil.getTimeDiff(from, to, i);
         if (intervals > 0) {
           if (cmp.compare(i, max) > 0) {
             max = i;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/5e492d93/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
index 1094e44..a4988d4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
@@ -23,6 +23,7 @@ import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Date;
+import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -34,6 +35,8 @@ import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.log4j.Logger;
 
+import lombok.Data;
+
 public final class DateUtil {
   private DateUtil() {
 
@@ -287,15 +290,18 @@ public final class DateUtil {
     return calendar.getActualMaximum(Calendar.DAY_OF_MONTH);
   }
 
-  public static int getMonthsBetween(Date from, Date to) {
+  public static CoveringInfo getMonthlyCoveringInfo(Date from, Date to) {
     // Move 'from' to end of month, unless its the first day of month
+    boolean coverable = true;
     if (!from.equals(DateUtils.truncate(from, Calendar.MONTH))) {
       from = DateUtils.addMonths(DateUtils.truncate(from, Calendar.MONTH), 1);
+      coverable = false;
     }
 
-    // Move 'to' to beginning of month, unless its the last day of the month
-    if (!to.equals(DateUtils.round(to, Calendar.MONTH))) {
+    // Move 'to' to beginning of next month, unless its the first day of the month
+    if (!to.equals(DateUtils.truncate(to, Calendar.MONTH))) {
       to = DateUtils.truncate(to, Calendar.MONTH);
+      coverable = false;
     }
 
     int months = 0;
@@ -303,77 +309,55 @@ public final class DateUtil {
       from = DateUtils.addMonths(from, 1);
       months++;
     }
-    return months;
+    return new CoveringInfo(months, coverable);
   }
 
-  public static int getQuartersBetween(Date from, Date to) {
-    int months = getMonthsBetween(from, to);
-    if (months < 3) {
-      return 0;
+  public static CoveringInfo getQuarterlyCoveringInfo(Date from, Date to) {
+    CoveringInfo monthlyCoveringInfo = getMonthlyCoveringInfo(from, to);
+    if (monthlyCoveringInfo.getCountBetween() < 3) {
+      return new CoveringInfo(0, false);
+    }
+    boolean coverable = monthlyCoveringInfo.isCoverable();
+    if (!from.equals(DateUtils.truncate(from, Calendar.MONTH))) {
+      from = DateUtils.addMonths(DateUtils.truncate(from, Calendar.MONTH), 1);
+      coverable = false;
     }
-
     Calendar cal = Calendar.getInstance();
     cal.setTime(from);
     int fromMonth = cal.get(Calendar.MONTH);
-    int fromYear = cal.get(Calendar.YEAR);
 
     // Get the start date of the quarter
-    int qtrStartMonth;
-    if (fromMonth % 3 == 0) {
-      qtrStartMonth = fromMonth;
-    } else {
-      qtrStartMonth = fromMonth - (fromMonth % 3);
+    int beginOffset = (3 - fromMonth % 3) % 3;
+    int endOffset = (monthlyCoveringInfo.getCountBetween() - beginOffset) % 3;
+    if (beginOffset > 0 || endOffset > 0) {
+      coverable = false;
     }
+    return new CoveringInfo((monthlyCoveringInfo.getCountBetween() - beginOffset - endOffset) / 3, coverable);
+  }
 
-    cal.clear();
-    cal.set(Calendar.MONTH, qtrStartMonth);
-    cal.set(Calendar.YEAR, fromYear);
-    cal.set(Calendar.DAY_OF_MONTH, 1);
-    Date fromQtrStartDate = cal.getTime();
-
-    int moveUp = 0;
-    if (fromQtrStartDate.before(from)) {
-      moveUp = 3 - (fromMonth % 3);
-    }
 
-    if (months % 3 != 0) {
-      months = months - (months % 3);
+  public static CoveringInfo getYearlyCoveringInfo(Date from, Date to) {
+    CoveringInfo monthlyCoveringInfo = getMonthlyCoveringInfo(from, to);
+    if (monthlyCoveringInfo.getCountBetween() < 12) {
+      return new CoveringInfo(0, false);
     }
-    return (months - moveUp) / 3;
-  }
-
-  public static int getYearsBetween(Date from, Date to) {
-    int months = getMonthsBetween(from, to);
-    if (months < 12) {
-      return 0;
+    boolean coverable = monthlyCoveringInfo.isCoverable();
+    if (!from.equals(DateUtils.truncate(from, Calendar.MONTH))) {
+      from = DateUtils.addMonths(DateUtils.truncate(from, Calendar.MONTH), 1);
+      coverable = false;
     }
-
-    // Get start of year for 'from' date
     Calendar cal = Calendar.getInstance();
     cal.setTime(from);
     int fromMonth = cal.get(Calendar.MONTH);
-    int fromYear = cal.get(Calendar.YEAR);
-
-    cal.clear();
-    cal.set(Calendar.MONTH, Calendar.JANUARY);
-    cal.set(Calendar.YEAR, fromYear);
-    cal.set(Calendar.DAY_OF_MONTH, 1);
-
-    Date yearStartDate = cal.getTime();
-
-    int moveUp = 0;
-    if (yearStartDate.before(from)) {
-      moveUp = 12 - (fromMonth % 12);
-    }
-
-    if (months % 12 != 0) {
-      months = months - (months % 12);
+    int beginOffset = (12 - fromMonth % 12) % 12;
+    int endOffset = (monthlyCoveringInfo.getCountBetween() - beginOffset) % 12;
+    if (beginOffset > 0 || endOffset > 0) {
+      coverable = false;
     }
-
-    return (months - moveUp) / 12;
+    return new CoveringInfo((monthlyCoveringInfo.getCountBetween() - beginOffset - endOffset) / 12, coverable);
   }
 
-  public static int getWeeksBetween(Date from, Date to) {
+  public static CoveringInfo getWeeklyCoveringInfo(Date from, Date to) {
     int dayDiff = 0;
     Date tmpFrom = from;
     while (tmpFrom.before(to)) {
@@ -382,7 +366,7 @@ public final class DateUtil {
     }
 
     if (dayDiff < 7) {
-      return 0;
+      return new CoveringInfo(0, false);
     }
 
     Calendar cal = Calendar.getInstance();
@@ -397,40 +381,65 @@ public final class DateUtil {
     cal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
     int maxDayInWeek = cal.getActualMaximum(Calendar.DAY_OF_WEEK);
     Date fromWeekStartDate = cal.getTime();
-
+    boolean coverable = dayDiff % 7 == 0;
     if (fromWeekStartDate.before(from)) {
       // Count from the start of next week
       dayDiff -= (maxDayInWeek - (fromDay - Calendar.SUNDAY));
+      coverable = false;
     }
 
-    return dayDiff / 7;
+    return new CoveringInfo(dayDiff / 7, coverable);
   }
 
-  static long getTimeDiff(Date from, Date to, UpdatePeriod interval) {
-    long diff = to.getTime() - from.getTime();
+  static CoveringInfo getCoveringInfo(Date from, Date to, UpdatePeriod interval) {
     switch (interval) {
     case SECONDLY:
-      return diff / 1000;
+      return getMilliSecondCoveringInfo(from, to, 1000);
     case MINUTELY:
-      return diff / (1000 * 60);
+      return getMilliSecondCoveringInfo(from, to, 1000 * 60);
     case HOURLY:
-      return diff / (1000 * 60 * 60);
+      return getMilliSecondCoveringInfo(from, to, 1000 * 60 * 60);
     case DAILY:
-      return diff / (1000 * 60 * 60 * 24);
+      return getMilliSecondCoveringInfo(from, to, 1000 * 60 * 60 * 24);
     case WEEKLY:
-      // return diff/(1000 * 60 * 60 * 24 * 7);
-      return getWeeksBetween(from, to);
+      return getWeeklyCoveringInfo(from, to);
     case MONTHLY:
-      // return (long) (diff/(60 * 60 * 1000 * 24 * 30.41666666));
-      return getMonthsBetween(from, to);
+      return getMonthlyCoveringInfo(from, to);
     case QUARTERLY:
-      return getQuartersBetween(from, to);
+      return getQuarterlyCoveringInfo(from, to);
     case YEARLY:
-      // return (diff/(60 * 60 * 1000 * 24 * 365));
-      return getYearsBetween(from, to);
+      return getYearlyCoveringInfo(from, to);
     default:
-      return -1;
+      return new CoveringInfo(0, false);
+    }
+  }
+
+  private static CoveringInfo getMilliSecondCoveringInfo(Date from, Date to, int millisInInterval) {
+    long diff = to.getTime() - from.getTime();
+    return new CoveringInfo((int) (diff / millisInInterval), diff % millisInInterval == 0);
+  }
+
+  static boolean isCoverableBy(Date from, Date to, Set<UpdatePeriod> intervals) {
+    for (UpdatePeriod period : intervals) {
+      if (getCoveringInfo(from, to, period).isCoverable()) {
+        return true;
+      }
     }
+    return false;
   }
 
+  public static int getTimeDiff(Date fromDate, Date toDate, UpdatePeriod updatePeriod) {
+    return getCoveringInfo(fromDate, toDate, updatePeriod).getCountBetween();
+  }
+
+  @Data
+  public static class CoveringInfo {
+    int countBetween;
+    boolean coverable;
+
+    public CoveringInfo(int countBetween, boolean coverable) {
+      this.countBetween = countBetween;
+      this.coverable = coverable;
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/5e492d93/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index 6e63483..96ca82c 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -351,7 +351,7 @@ class StorageTableResolver implements ContextRewriter {
          * 2. All Storage tables were skipped for some reasons.
          * 3. Storage tables do not have the update period for the timerange queried.
          */
-        if (!nonExistingParts.isEmpty()) {
+        if (failOnPartialData && !nonExistingParts.isEmpty()) {
           cubeql.addFactPruningMsgs(cfact.fact, CandidateTablePruneCause.missingPartitions(nonExistingParts));
         } else if (!skipStorageCauses.isEmpty()) {
           CandidateTablePruneCause cause = CandidateTablePruneCause.noCandidateStorages(skipStorageCauses);
@@ -405,8 +405,9 @@ class StorageTableResolver implements ContextRewriter {
     Map<UpdatePeriod, RangesPartitionTimeline> nonExistingParts)
     throws Exception {
     Set<FactPartition> partitions = new TreeSet<FactPartition>();
-    if (getPartitions(fact, range.getFromDate(), range.getToDate(), range.getPartitionColumn(), partitions,
-      updatePeriods, addNonExistingParts, skipStorageCauses, nonExistingParts)) {
+    if (range.isCoverableBy(updatePeriods)
+      && getPartitions(fact, range.getFromDate(), range.getToDate(), range.getPartitionColumn(), partitions,
+        updatePeriods, addNonExistingParts, skipStorageCauses, nonExistingParts)) {
       return partitions;
     } else {
       return new TreeSet<FactPartition>();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/5e492d93/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
index ecd7b76..612cdf9 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
@@ -20,6 +20,7 @@ package org.apache.lens.cube.parse;
 
 import java.util.Calendar;
 import java.util.Date;
+import java.util.TreeSet;
 
 import org.apache.lens.cube.metadata.UpdatePeriod;
 
@@ -40,6 +41,11 @@ public class TimeRange {
   private ASTNode parent;
   private int childIndex;
 
+  public boolean isCoverableBy(TreeSet<UpdatePeriod> updatePeriods) {
+    return DateUtil.isCoverableBy(fromDate, toDate, updatePeriods);
+  }
+
+
   public static class TimeRangeBuilder {
     private final TimeRange range;
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/5e492d93/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index f9a3762..6c65953 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -839,7 +839,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     Assert.assertEquals(pruneCauses.getDetails().get("testfact").iterator().next().getCause(),
       CandidateTablePruneCode.MISSING_PARTITIONS);
     Assert.assertEquals(pruneCauses.getDetails().get("testfactmonthly").iterator().next().getCause(),
-      CandidateTablePruneCode.MISSING_PARTITIONS);
+      CandidateTablePruneCode.NO_FACT_UPDATE_PERIODS_FOR_GIVEN_RANGE);
     Assert.assertEquals(pruneCauses.getDetails().get("testfact2").iterator().next().getCause(),
         CandidateTablePruneCode.MISSING_PARTITIONS);
     Assert.assertEquals(pruneCauses.getDetails().get("testfact2_raw").iterator().next().getCause(),

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/5e492d93/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
index fa2503e..9efd1f7 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
@@ -16,7 +16,6 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-
 package org.apache.lens.cube.parse;
 
 import static org.apache.lens.cube.parse.DateUtil.resolveDate;
@@ -62,93 +61,121 @@ public class TestDateUtil {
   @Test
   public void testMonthsBetween() throws Exception {
     int i = 0;
-    Assert.assertEquals(1, DateUtil.getMonthsBetween(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)),
+    Assert.assertEquals(DateUtil.getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)),
+      new DateUtil.CoveringInfo(1, true),
       "2013-Jan-01 to 2013-Jan-31");
 
     i += 2;
-    Assert.assertEquals(5, DateUtil.getMonthsBetween(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)),
+    Assert.assertEquals(DateUtil.getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)),
+      new DateUtil.CoveringInfo(5, true),
       "2013-Jan-01 to 2013-May-31");
 
     i += 2;
-    Assert.assertEquals(12, DateUtil.getMonthsBetween(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)),
+    Assert.assertEquals(DateUtil.getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)),
+      new DateUtil.CoveringInfo(12, true),
       "2013-Jan-01 to 2013-Dec-31");
 
     i += 2;
-    Assert.assertEquals(2, DateUtil.getMonthsBetween(pairs[i], pairs[i + 1]), "2013-Feb-01 to 2013-Apr-25");
+    Assert.assertEquals(DateUtil.getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(2, false),
+      "2013-Feb-01 to 2013-Apr-25");
 
     i += 2;
-    Assert.assertEquals(12, DateUtil.getMonthsBetween(pairs[i], pairs[i + 1]), "2012-Feb-01 to 2013-Feb-01");
+    Assert.assertEquals(DateUtil.getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(12, true),
+      "2012-Feb-01 to 2013-Feb-01");
 
     i += 2;
-    Assert.assertEquals(24, DateUtil.getMonthsBetween(pairs[i], pairs[i + 1]), "2011-Feb-01 to 2013-Feb-01");
+    Assert.assertEquals(DateUtil.getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(24, true),
+      "2011-Feb-01 to 2013-Feb-01");
 
     i += 2;
-    Assert.assertEquals(0, DateUtil.getMonthsBetween(pairs[i], pairs[i + 1]), "2013-Jan-02 to 2013-Feb-02");
+    Assert.assertEquals(DateUtil.getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(0, false),
+      "2013-Jan-02 to 2013-Feb-02");
 
     i += 2;
-    Assert.assertEquals(1, DateUtil.getMonthsBetween(pairs[i], pairs[i + 1]), "2013-Jan-02 to 2013-Mar-02");
+    Assert.assertEquals(DateUtil.getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(1, false),
+      "2013-Jan-02 to 2013-Mar-02");
   }
 
   @Test
   public void testQuartersBetween() throws Exception {
     int i = 0;
-    Assert.assertEquals(0, DateUtil.getQuartersBetween(pairs[i], pairs[i + 1]), "2013-Jan-01 to 2013-Jan-31");
+    Assert.assertEquals(DateUtil.getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(0, false),
+      "2013-Jan-01 to 2013-Jan-31");
 
     i += 2;
-    Assert.assertEquals(1, DateUtil.getQuartersBetween(pairs[i], pairs[i + 1]), "2013-Jan-01 to 2013-May-31");
+    Assert.assertEquals(DateUtil.getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(1, false),
+      "2013-Jan-01 to 2013-May-31");
 
     i += 2;
-    Assert.assertEquals(4, DateUtil.getQuartersBetween(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)),
+    Assert.assertEquals(DateUtil.getQuarterlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)),
+      new DateUtil.CoveringInfo(4, true),
       "2013-Jan-01 to 2013-Dec-31");
 
     i += 2;
-    Assert.assertEquals(0, DateUtil.getQuartersBetween(pairs[i], pairs[i + 1]), "2013-Feb-01 to 2013-Apr-25");
+    Assert.assertEquals(DateUtil.getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(0, false),
+      "2013-Feb-01 to 2013-Apr-25");
 
     i += 2;
-    Assert.assertEquals(3, DateUtil.getQuartersBetween(pairs[i], pairs[i + 1]), "2012-Feb-01 to 2013-Feb-01");
+    Assert.assertEquals(DateUtil.getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(3, false),
+      "2012-Feb-01 to 2013-Feb-01");
 
     i += 2;
-    Assert.assertEquals(7, DateUtil.getQuartersBetween(pairs[i], pairs[i + 1]), "2011-Feb-01 to 2013-Feb-01");
+    Assert.assertEquals(DateUtil.getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(7, false),
+      "2011-Feb-01 to 2013-Feb-01");
   }
 
   @Test
   public void testYearsBetween() throws Exception {
     int i = 0;
-    Assert.assertEquals(0, DateUtil.getYearsBetween(pairs[i], pairs[i + 1]), "" + pairs[i] + "->" + pairs[i + 1]);
+    Assert.assertEquals(DateUtil.getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(0, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
 
     i += 2;
-    Assert.assertEquals(0, DateUtil.getYearsBetween(pairs[i], pairs[i + 1]), "" + pairs[i] + "->" + pairs[i + 1]);
+    Assert.assertEquals(DateUtil.getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(0, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
 
     i += 2;
-    Assert.assertEquals(1, DateUtil.getYearsBetween(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)), ""
-      + pairs[i] + "->" + pairs[i + 1]);
+    Assert.assertEquals(DateUtil.getYearlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)),
+      new DateUtil.CoveringInfo(1, true), ""
+        + pairs[i] + "->" + pairs[i + 1]);
 
     i += 2;
-    Assert.assertEquals(0, DateUtil.getYearsBetween(pairs[i], pairs[i + 1]), "" + pairs[i] + "->" + pairs[i + 1]);
+    Assert.assertEquals(DateUtil.getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(0, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
 
     i += 2;
-    Assert.assertEquals(0, DateUtil.getYearsBetween(pairs[i], pairs[i + 1]), "" + pairs[i] + "->" + pairs[i + 1]);
+    Assert.assertEquals(DateUtil.getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(0, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
 
     i += 2;
-    Assert.assertEquals(1, DateUtil.getYearsBetween(pairs[i], pairs[i + 1]), "" + pairs[i] + "->" + pairs[i + 1]);
+    Assert.assertEquals(DateUtil.getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new DateUtil.CoveringInfo(1, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
   }
 
   @Test
   public void testWeeksBetween() throws Exception {
-    int weeks = DateUtil.getWeeksBetween(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-2"));
-    Assert.assertEquals(1, weeks, "2013-May-26 to 2013-Jun-2");
+    DateUtil.CoveringInfo weeks;
+
+    weeks = DateUtil.getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-2"));
+    Assert.assertEquals(weeks, new DateUtil.CoveringInfo(1, true), "2013-May-26 to 2013-Jun-2");
+
+    weeks = DateUtil.getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-3"));
+    Assert.assertEquals(weeks, new DateUtil.CoveringInfo(0, false), "2013-May-26 to 2013-Jun-2");
+
+    weeks = DateUtil.getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-9"));
+    Assert.assertEquals(weeks, new DateUtil.CoveringInfo(1, false), "2013-May-26 to 2013-Jun-2");
 
-    weeks = DateUtil.getWeeksBetween(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-1"));
-    Assert.assertEquals(0, weeks, "2013-May-27 to 2013-Jun-1");
+    weeks = DateUtil.getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-1"));
+    Assert.assertEquals(weeks, new DateUtil.CoveringInfo(0, false), "2013-May-27 to 2013-Jun-1");
 
-    weeks = DateUtil.getWeeksBetween(DATE_FMT.parse("2013-May-25"), DATE_FMT.parse("2013-Jun-2"));
-    Assert.assertEquals(1, weeks, "2013-May-25 to 2013-Jun-1");
+    weeks = DateUtil.getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-25"), DATE_FMT.parse("2013-Jun-2"));
+    Assert.assertEquals(weeks, new DateUtil.CoveringInfo(1, false), "2013-May-25 to 2013-Jun-1");
 
-    weeks = DateUtil.getWeeksBetween(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-9"));
-    Assert.assertEquals(2, weeks, "2013-May-26 to 2013-Jun-8");
+    weeks = DateUtil.getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-9"));
+    Assert.assertEquals(weeks, new DateUtil.CoveringInfo(2, true), "2013-May-26 to 2013-Jun-8");
 
-    weeks = DateUtil.getWeeksBetween(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-10"));
-    Assert.assertEquals(2, weeks, "2013-May-26 to 2013-Jun-10");
+    weeks = DateUtil.getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-10"));
+    Assert.assertEquals(weeks, new DateUtil.CoveringInfo(2, false), "2013-May-26 to 2013-Jun-10");
   }
 
   @Test


[15/50] [abbrv] incubator-lens git commit: LENS-440 : Remove rewriter cache and synchronization on rewrite in JDBCDruver (amareshwari)

Posted by am...@apache.org.
LENS-440 : Remove rewriter cache and synchronization on rewrite in JDBCDruver (amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/4a5c02a0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/4a5c02a0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/4a5c02a0

Branch: refs/heads/current-release-line
Commit: 4a5c02a02ae85ecab7d52d9f59c6238923d41593
Parents: 56c5813
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Tue Mar 31 10:03:09 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Mar 31 10:03:09 2015 +0530

----------------------------------------------------------------------
 .../lens/driver/jdbc/ColumnarSQLRewriter.java   |  2 +-
 .../org/apache/lens/driver/jdbc/JDBCDriver.java | 36 +++++++-------------
 2 files changed, 14 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/4a5c02a0/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
index d5dc9a3..749e0dd 100644
--- a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
@@ -1026,7 +1026,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
    * @see org.apache.lens.server.api.query.QueryRewriter#rewrite(java.lang.String, org.apache.hadoop.conf.Configuration)
    */
   @Override
-  public synchronized String rewrite(String query, Configuration conf, HiveConf metastoreConf) throws LensException {
+  public String rewrite(String query, Configuration conf, HiveConf metastoreConf) throws LensException {
     this.query = query;
     StringBuilder mergedQuery;
     rewrittenQuery.setLength(0);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/4a5c02a0/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
index 92f7b96..2346b52 100644
--- a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
@@ -82,9 +82,6 @@ public class JDBCDriver implements LensDriver {
   /** The query context map. */
   private ConcurrentHashMap<QueryHandle, JdbcQueryContext> queryContextMap;
 
-  /** The rewriter cache. */
-  private ConcurrentHashMap<Class<? extends QueryRewriter>, QueryRewriter> rewriterCache;
-
   /** The conf. */
   private Configuration conf;
 
@@ -418,7 +415,6 @@ public class JDBCDriver implements LensDriver {
    */
   protected void init(Configuration conf) throws LensException {
     queryContextMap = new ConcurrentHashMap<QueryHandle, JdbcQueryContext>();
-    rewriterCache = new ConcurrentHashMap<Class<? extends QueryRewriter>, QueryRewriter>();
     asyncQueryPool = Executors.newCachedThreadPool(new ThreadFactory() {
       @Override
       public Thread newThread(Runnable runnable) {
@@ -466,23 +462,18 @@ public class JDBCDriver implements LensDriver {
    * @return the query rewriter
    * @throws LensException the lens exception
    */
-  protected synchronized QueryRewriter getQueryRewriter() throws LensException {
+  protected QueryRewriter getQueryRewriter() throws LensException {
     QueryRewriter rewriter;
     Class<? extends QueryRewriter> queryRewriterClass = conf.getClass(JDBC_QUERY_REWRITER_CLASS,
       DummyQueryRewriter.class, QueryRewriter.class);
-    if (rewriterCache.containsKey(queryRewriterClass)) {
-      rewriter = rewriterCache.get(queryRewriterClass);
-    } else {
-      try {
-        rewriter = queryRewriterClass.newInstance();
-        LOG.info("Initialized :" + queryRewriterClass);
-      } catch (Exception e) {
-        LOG.error("Unable to create rewriter object", e);
-        throw new LensException(e);
-      }
-      rewriter.init(conf);
-      rewriterCache.put(queryRewriterClass, rewriter);
+    try {
+      rewriter = queryRewriterClass.newInstance();
+      LOG.info("Initialized :" + queryRewriterClass);
+    } catch (Exception e) {
+      LOG.error("Unable to create rewriter object", e);
+      throw new LensException(e);
     }
+    rewriter.init(conf);
     return rewriter;
   }
 
@@ -633,7 +624,7 @@ public class JDBCDriver implements LensDriver {
       throw new NullPointerException("Null driver query for " + pContext.getUserQuery());
     }
     boolean validateThroughPrepare = pContext.getDriverConf(this).getBoolean(JDBC_VALIDATE_THROUGH_PREPARE,
-        DEFAULT_JDBC_VALIDATE_THROUGH_PREPARE);
+      DEFAULT_JDBC_VALIDATE_THROUGH_PREPARE);
     if (validateThroughPrepare) {
       PreparedStatement stmt = null;
       // Estimate queries need to get connection from estimate pool to make sure
@@ -686,7 +677,7 @@ public class JDBCDriver implements LensDriver {
       tmpConf.set(JDBC_POOL_IDLE_TIME, getKeyOrFallBack(tmpConf, getEstimateKey(JDBC_POOL_IDLE_TIME),
         JDBC_POOL_IDLE_TIME));
       tmpConf.set(JDBC_MAX_STATEMENTS_PER_CONNECTION, getKeyOrFallBack(tmpConf,
-          getEstimateKey(JDBC_MAX_STATEMENTS_PER_CONNECTION), JDBC_MAX_STATEMENTS_PER_CONNECTION));
+        getEstimateKey(JDBC_MAX_STATEMENTS_PER_CONNECTION), JDBC_MAX_STATEMENTS_PER_CONNECTION));
       tmpConf.set(JDBC_GET_CONNECTION_TIMEOUT, getKeyOrFallBack(tmpConf,
         getEstimateKey(JDBC_GET_CONNECTION_TIMEOUT), JDBC_GET_CONNECTION_TIMEOUT));
 
@@ -710,7 +701,7 @@ public class JDBCDriver implements LensDriver {
   }
 
   private final Map<QueryPrepareHandle, PreparedStatement> preparedQueries =
-      new HashMap<QueryPrepareHandle, PreparedStatement>();
+    new HashMap<QueryPrepareHandle, PreparedStatement>();
 
   /**
    * Internally prepare the query
@@ -905,9 +896,8 @@ public class JDBCDriver implements LensDriver {
    * @throws LensException the lens exception
    */
   @Override
-  public void registerForCompletionNotification(
-    QueryHandle handle, long timeoutMillis, QueryCompletionListener listener)
-    throws LensException {
+  public void registerForCompletionNotification(QueryHandle handle, long timeoutMillis,
+    QueryCompletionListener listener) throws LensException {
     checkConfigured();
     getQueryContext(handle).setListener(listener);
   }


[48/50] [abbrv] incubator-lens git commit: LENS-441 : Add command to get all dimtables of a dimension and get all facts of a cube (Rajat Khandelwal via amareshwari)

Posted by am...@apache.org.
LENS-441 : Add command to get all dimtables of a dimension and get all facts of a cube (Rajat Khandelwal via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/309f62cc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/309f62cc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/309f62cc

Branch: refs/heads/current-release-line
Commit: 309f62cc0863615383ccb69f815bf55887831414
Parents: e3486e0
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Wed Apr 15 06:03:20 2015 -0500
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Apr 15 06:03:20 2015 -0500

----------------------------------------------------------------------
 .../commands/LensDimensionTableCommands.java    |  5 +-
 .../lens/cli/commands/LensFactCommands.java     |  5 +-
 .../cli/TestLensDimensionTableCommands.java     | 47 ++++++++++++--
 .../apache/lens/cli/TestLensFactCommands.java   | 22 +++++--
 lens-cli/src/test/resources/dim_table2.xml      |  2 +-
 .../java/org/apache/lens/client/LensClient.java |  9 ++-
 .../apache/lens/client/LensMetadataClient.java  | 34 ++++++++--
 .../lens/cube/metadata/CubeMetastoreClient.java | 39 ++++++-----
 .../lens/cube/parse/CandidateTableResolver.java |  2 +-
 .../cube/metadata/TestCubeMetastoreClient.java  | 12 ++--
 .../api/metastore/CubeMetastoreService.java     | 19 ++----
 .../metastore/CubeMetastoreServiceImpl.java     | 68 +++++++++-----------
 .../server/metastore/MetastoreResource.java     | 39 +++++++++--
 13 files changed, 202 insertions(+), 101 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/309f62cc/lens-cli/src/main/java/org/apache/lens/cli/commands/LensDimensionTableCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensDimensionTableCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensDimensionTableCommands.java
index c61be09..c3b7f37 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensDimensionTableCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensDimensionTableCommands.java
@@ -45,8 +45,9 @@ public class LensDimensionTableCommands extends BaseLensCommand implements Comma
    * @return the string
    */
   @CliCommand(value = "show dimtables", help = "show list of dimension tables in database")
-  public String showDimensionTables() {
-    List<String> dims = getClient().getAllDimensionTables();
+  public String showDimensionTables(
+    @CliOption(key = {"", "dimension"}, mandatory = false, help = "<optional dimension name>") String dimensionName) {
+    List<String> dims = getClient().getAllDimensionTables(dimensionName);
     if (dims != null) {
       return Joiner.on("\n").join(dims);
     } else {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/309f62cc/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java
index a69d361..1e8abf9 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java
@@ -45,8 +45,9 @@ public class LensFactCommands extends BaseLensCommand implements CommandMarker {
    * @return the string
    */
   @CliCommand(value = "show facts", help = "display list of fact tables in database")
-  public String showFacts() {
-    List<String> facts = getClient().getAllFactTables();
+  public String showFacts(
+    @CliOption(key = {"", "cube"}, mandatory = false, help = "<optional cube name>") String cubeName) {
+    List<String> facts = getClient().getAllFactTables(cubeName);
     if (facts != null) {
       return Joiner.on("\n").join(facts);
     } else {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/309f62cc/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java
index 5430c7f..5d8d453 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java
@@ -22,6 +22,9 @@ import java.io.*;
 import java.net.URISyntaxException;
 import java.net.URL;
 
+import javax.ws.rs.NotFoundException;
+
+import org.apache.lens.cli.commands.LensDimensionCommands;
 import org.apache.lens.cli.commands.LensDimensionTableCommands;
 import org.apache.lens.client.LensClient;
 
@@ -43,6 +46,7 @@ public class TestLensDimensionTableCommands extends LensCliApplicationTest {
 
   /** The command. */
   private static LensDimensionTableCommands command = null;
+  private static LensDimensionCommands dimensionCommand = null;
 
   private static LensDimensionTableCommands getCommand() {
     if (command == null) {
@@ -53,6 +57,16 @@ public class TestLensDimensionTableCommands extends LensCliApplicationTest {
     return command;
   }
 
+  private static LensDimensionCommands getDimensionCommand() {
+    if (dimensionCommand == null) {
+      LensClient client = new LensClient();
+      dimensionCommand = new LensDimensionCommands();
+      dimensionCommand.setClient(client);
+    }
+    return dimensionCommand;
+  }
+
+
   /**
    * Test dim table commands.
    *
@@ -61,11 +75,23 @@ public class TestLensDimensionTableCommands extends LensCliApplicationTest {
    */
   @Test
   public void testDimTableCommands() throws IOException, URISyntaxException {
+    createDimension();
     addDim1Table("dim_table2", "dim_table2.xml", DIM_LOCAL);
     updateDim1Table();
     testDimStorageActions();
     testDimPartitionActions();
     dropDim1Table();
+    dropDimension();
+  }
+
+  private void dropDimension() {
+    getDimensionCommand().dropDimension("test_dim");
+  }
+
+  private void createDimension() throws URISyntaxException {
+    URL dimensionSpec = TestLensDimensionTableCommands.class.getClassLoader().getResource("test-dimension.xml");
+    getDimensionCommand().createDimension(new File(dimensionSpec.toURI()).getAbsolutePath());
+
   }
 
   /**
@@ -79,7 +105,9 @@ public class TestLensDimensionTableCommands extends LensCliApplicationTest {
   public static synchronized void addDim1Table(String tableName, String specName, String storageName)
     throws IOException {
     LensDimensionTableCommands command = getCommand();
-    String dimList = command.showDimensionTables();
+    String dimList = command.showDimensionTables(null);
+    Assert.assertEquals(command.showDimensionTables("test_dim"), dimList);
+
     // add local storage before adding fact table
     TestLensStorageCommands.addLocalStorage(storageName);
     URL dimSpec = TestLensDimensionTableCommands.class.getClassLoader().getResource(specName);
@@ -91,7 +119,18 @@ public class TestLensDimensionTableCommands extends LensCliApplicationTest {
       Assert.fail("Unable to create dimtable" + e.getMessage());
     }
 
-    dimList = command.showDimensionTables();
+    dimList = command.showDimensionTables(null);
+    Assert.assertEquals(command.showDimensionTables("test_dim"), dimList);
+    try {
+      Assert.assertEquals(command.showDimensionTables("blah"), dimList);
+      Assert.fail();
+    } catch (NotFoundException e) {
+    }
+    try {
+      Assert.assertEquals(command.showDimensionTables("dim_table2"), dimList);
+      Assert.fail();
+    } catch (NotFoundException e) {
+    }
     Assert.assertTrue(dimList.contains(tableName), "dim_table table should be found");
   }
 
@@ -220,10 +259,10 @@ public class TestLensDimensionTableCommands extends LensCliApplicationTest {
    */
   public static void dropDim1Table() {
     LensDimensionTableCommands command = getCommand();
-    String dimList = command.showDimensionTables();
+    String dimList = command.showDimensionTables(null);
     Assert.assertEquals("dim_table2", dimList, "dim_table table should be found");
     command.dropDimensionTable("dim_table2", false);
-    dimList = command.showDimensionTables();
+    dimList = command.showDimensionTables(null);
     Assert.assertEquals("No Dimensions Found", dimList, "Dim tables should not be found");
     TestLensStorageCommands.dropStorage(DIM_LOCAL);
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/309f62cc/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
index 244b9ec..b906f18 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
@@ -22,6 +22,8 @@ import java.io.*;
 import java.net.URISyntaxException;
 import java.net.URL;
 
+import javax.ws.rs.NotFoundException;
+
 import org.apache.lens.cli.commands.LensCubeCommands;
 import org.apache.lens.cli.commands.LensFactCommands;
 import org.apache.lens.client.LensClient;
@@ -98,7 +100,8 @@ public class TestLensFactCommands extends LensCliApplicationTest {
    */
   public static void addFact1Table() throws IOException {
     LensFactCommands command = getCommand();
-    String factList = command.showFacts();
+    String factList = command.showFacts(null);
+    Assert.assertEquals(command.showFacts("sample_cube"), factList);
     Assert.assertEquals("No Facts Found", factList, "Fact tables should not be found");
     // add local storage before adding fact table
     TestLensStorageCommands.addLocalStorage(FACT_LOCAL);
@@ -108,7 +111,18 @@ public class TestLensFactCommands extends LensCliApplicationTest {
     } catch (Exception e) {
       Assert.fail("Unable to create fact table" + e.getMessage());
     }
-    factList = command.showFacts();
+    factList = command.showFacts(null);
+    Assert.assertEquals(command.showFacts("sample_cube"), factList);
+    try {
+      Assert.assertEquals(command.showFacts("blah"), factList);
+      Assert.fail();
+    } catch (NotFoundException e) {
+    }
+    try {
+      Assert.assertEquals(command.showFacts("fact1"), factList);
+      Assert.fail();
+    } catch (NotFoundException e) {
+    }
     Assert.assertEquals("fact1", factList, "Fact1 table should be found");
   }
 
@@ -257,10 +271,10 @@ public class TestLensFactCommands extends LensCliApplicationTest {
    */
   public static void dropFact1Table() {
     LensFactCommands command = getCommand();
-    String factList = command.showFacts();
+    String factList = command.showFacts(null);
     Assert.assertEquals("fact1", factList, "Fact1 table should be found");
     command.dropFact("fact1", false);
-    factList = command.showFacts();
+    factList = command.showFacts(null);
     Assert.assertEquals("No Facts Found", factList, "Fact tables should not be found");
     TestLensStorageCommands.dropStorage(FACT_LOCAL);
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/309f62cc/lens-cli/src/test/resources/dim_table2.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/dim_table2.xml b/lens-cli/src/test/resources/dim_table2.xml
index 3631065..1d6e138 100644
--- a/lens-cli/src/test/resources/dim_table2.xml
+++ b/lens-cli/src/test/resources/dim_table2.xml
@@ -19,7 +19,7 @@
   under the License.
 
 -->
-<x_dimension_table dimension_name="test_dim2" table_name="dim_table2" weight="0.0" xmlns="uri:lens:cube:0.1"
+<x_dimension_table dimension_name="test_dim" table_name="dim_table2" weight="0.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
     <column comment="ID" name="id" type="INT" />

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/309f62cc/lens-client/src/main/java/org/apache/lens/client/LensClient.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensClient.java b/lens-client/src/main/java/org/apache/lens/client/LensClient.java
index 449c8ec..016e4ab 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensClient.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensClient.java
@@ -212,11 +212,18 @@ public class LensClient {
     return mc.getAllFactTables();
   }
 
-
+  public List<String> getAllFactTables(String cubeName) {
+    LOG.debug("Getting all fact table");
+    return mc.getAllFactTables(cubeName);
+  }
   public List<String> getAllDimensionTables() {
     LOG.debug("Getting all dimension table");
     return mc.getAllDimensionTables();
   }
+  public List<String> getAllDimensionTables(String dimensionName) {
+    LOG.debug("Getting all dimension table");
+    return mc.getAllDimensionTables(dimensionName);
+  }
 
   public List<String> getAllCubes() {
     LOG.debug("Getting all cubes in database");

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/309f62cc/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java b/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
index 1e243e8..5e406b5 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
@@ -406,25 +406,31 @@ public class LensMetadataClient {
     return result;
   }
 
-  public List<XFactTable> getAllFactTables(String cubeName) {
+  public List<String> getAllFactTables(String cubeName) {
+    if (cubeName == null) {
+      return getAllFactTables();
+    }
     WebTarget target = getMetastoreWebTarget();
-    List<XFactTable> factTables = target.path("cubes").path(cubeName).path("facts")
+    StringList factTables;
+    factTables = target.path("cubes").path(cubeName).path("facts")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .get(new GenericType<List<XFactTable>>() {
-      });
-    return factTables;
+      .get(StringList.class);
+    return factTables.getElements();
   }
 
   public List<String> getAllFactTables() {
     WebTarget target = getMetastoreWebTarget();
-    StringList factTables = target.path("facts")
+    StringList factTables;
+    factTables = target.path("facts")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
       .get(StringList.class);
+
     return factTables.getElements();
   }
 
+
   public APIResult deleteAllFactTables(boolean cascade) {
     WebTarget target = getMetastoreWebTarget();
     APIResult result = target.path("facts")
@@ -636,9 +642,23 @@ public class LensMetadataClient {
   }
 
 
+  public List<String> getAllDimensionTables(String dimensionName) {
+    if (dimensionName == null) {
+      return getAllDimensionTables();
+    }
+    WebTarget target = getMetastoreWebTarget();
+    StringList dimtables;
+    dimtables = target.path("dimensions").path(dimensionName).path("dimtables")
+      .queryParam("sessionid", this.connection.getSessionHandle())
+      .request(MediaType.APPLICATION_XML)
+      .get(StringList.class);
+    return dimtables.getElements();
+  }
+
   public List<String> getAllDimensionTables() {
     WebTarget target = getMetastoreWebTarget();
-    StringList dimtables = target.path("dimtables")
+    StringList dimtables;
+    dimtables = target.path("dimtables")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
       .get(StringList.class);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/309f62cc/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index fa56213..324002f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -111,7 +111,7 @@ public class CubeMetastoreClient {
     String partCol = cube.getPartitionColumnOfTimeDim(timeDimension);
     Date max = new Date(Long.MIN_VALUE);
     boolean updated = false;
-    for (CubeFactTable fact : getAllFactTables(cube)) {
+    for (CubeFactTable fact : getAllFacts(cube)) {
       for (String storage : fact.getStorages()) {
         for (UpdatePeriod updatePeriod : fact.getUpdatePeriods().get(storage)) {
           PartitionTimeline timeline = partitionTimelineCache.get(fact.getName(), storage, updatePeriod,
@@ -1380,6 +1380,9 @@ public class CubeMetastoreClient {
    * @throws HiveException
    */
   public CubeInterface getCube(String tableName) throws HiveException {
+    if (tableName == null) {
+      return null;
+    }
     tableName = tableName.trim().toLowerCase();
     CubeInterface cube = allCubes.get(tableName);
     if (cube == null) {
@@ -1408,6 +1411,9 @@ public class CubeMetastoreClient {
    * @throws HiveException
    */
   public Dimension getDimension(String tableName) throws HiveException {
+    if (tableName == null) {
+      return null;
+    }
     tableName = tableName.trim().toLowerCase();
     Dimension dim = allDims.get(tableName);
     if (dim == null) {
@@ -1617,22 +1623,25 @@ public class CubeMetastoreClient {
    * @return List of fact tables
    * @throws HiveException
    */
-  public List<CubeFactTable> getAllFactTables(CubeInterface cube) throws HiveException {
-    if (cube instanceof Cube) {
-      List<CubeFactTable> cubeFacts = new ArrayList<CubeFactTable>();
-      try {
-        for (CubeFactTable fact : getAllFacts()) {
-          if (fact.getCubeName().equalsIgnoreCase(((Cube) cube).getName())) {
-            cubeFacts.add(fact);
-          }
+  public List<CubeFactTable> getAllFacts(CubeInterface cube) throws HiveException {
+    String cubeName = null;
+    if (cube != null) {
+      if (cube instanceof DerivedCube) {
+        cube = ((DerivedCube) cube).getParent();
+      }
+      cubeName = cube.getName();
+    }
+    List<CubeFactTable> cubeFacts = new ArrayList<CubeFactTable>();
+    try {
+      for (CubeFactTable fact : getAllFacts()) {
+        if (cubeName == null || fact.getCubeName().equalsIgnoreCase(cubeName)) {
+          cubeFacts.add(fact);
         }
-      } catch (HiveException e) {
-        throw new HiveException("Could not get all fact tables of " + cube, e);
       }
-      return cubeFacts;
-    } else {
-      return getAllFactTables(((DerivedCube) cube).getParent());
+    } catch (HiveException e) {
+      throw new HiveException("Could not get all fact tables of " + cube, e);
     }
+    return cubeFacts;
   }
 
   /**
@@ -1689,7 +1698,7 @@ public class CubeMetastoreClient {
     List<CubeDimensionTable> dimTables = new ArrayList<CubeDimensionTable>();
     try {
       for (CubeDimensionTable dimTbl : getAllDimensionTables()) {
-        if (dimTbl.getDimName().equalsIgnoreCase(dim.getName().toLowerCase())) {
+        if (dim == null || dimTbl.getDimName().equalsIgnoreCase(dim.getName().toLowerCase())) {
           dimTables.add(dimTbl);
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/309f62cc/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
index a1022fd..6b6a09b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
@@ -89,7 +89,7 @@ class CandidateTableResolver implements ContextRewriter {
   private void populateCandidateTables(CubeQueryContext cubeql) throws SemanticException {
     try {
       if (cubeql.getCube() != null) {
-        List<CubeFactTable> factTables = cubeql.getMetastoreClient().getAllFactTables(cubeql.getCube());
+        List<CubeFactTable> factTables = cubeql.getMetastoreClient().getAllFacts(cubeql.getCube());
         if (factTables.isEmpty()) {
           throw new SemanticException(ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE, cubeql.getCube().getName()
             + " does not have any facts");

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/309f62cc/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index 9ceea48..96f2b9b 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -690,7 +690,7 @@ public class TestCubeMetastoreClient {
     addedMsr = altered.getMeasureByName("testaddmsr1");
     Assert.assertNotNull(addedMsr);
     Assert.assertEquals(addedMsr.getType(), "double");
-    Assert.assertTrue(client.getAllFactTables(altered).isEmpty());
+    Assert.assertTrue(client.getAllFacts(altered).isEmpty());
   }
 
   @Test(priority = 2)
@@ -765,8 +765,8 @@ public class TestCubeMetastoreClient {
     Table cubeTbl = client.getHiveTable(factName);
     Assert.assertTrue(client.isFactTable(cubeTbl));
     Assert.assertTrue(client.isFactTableForCube(cubeTbl, CUBE_NAME));
-    Assert.assertEquals(client.getAllFactTables(client.getCube(CUBE_NAME)).get(0).getName(), factName.toLowerCase());
-    Assert.assertEquals(client.getAllFactTables(client.getCube(DERIVED_CUBE_NAME)).get(0).getName(),
+    Assert.assertEquals(client.getAllFacts(client.getCube(CUBE_NAME)).get(0).getName(), factName.toLowerCase());
+    Assert.assertEquals(client.getAllFacts(client.getCube(DERIVED_CUBE_NAME)).get(0).getName(),
       factName.toLowerCase());
     CubeFactTable cubeFact2 = new CubeFactTable(cubeTbl);
     Assert.assertTrue(cubeFact.equals(cubeFact2));
@@ -955,13 +955,13 @@ public class TestCubeMetastoreClient {
     client.dropStorageFromFact(factName, c2);
     storageTableName = MetastoreUtil.getFactStorageTableName(factName, c2);
     Assert.assertFalse(client.tableExists(storageTableName));
-    List<CubeFactTable> cubeFacts = client.getAllFactTables(client.getCube(CUBE_NAME));
+    List<CubeFactTable> cubeFacts = client.getAllFacts(client.getCube(CUBE_NAME));
     List<String> cubeFactNames = new ArrayList<String>();
     for (CubeFactTable cfact : cubeFacts) {
       cubeFactNames.add(cfact.getName());
     }
     Assert.assertTrue(cubeFactNames.contains(factName.toLowerCase()));
-    cubeFacts = client.getAllFactTables(client.getCube(DERIVED_CUBE_NAME));
+    cubeFacts = client.getAllFacts(client.getCube(DERIVED_CUBE_NAME));
     cubeFactNames = new ArrayList<String>();
     for (CubeFactTable cfact : cubeFacts) {
       cubeFactNames.add(cfact.getName());
@@ -971,7 +971,7 @@ public class TestCubeMetastoreClient {
     Assert.assertFalse(client.tableExists(MetastoreUtil.getFactStorageTableName(factName, c1)));
     Assert.assertFalse(client.tableExists(MetastoreUtil.getFactStorageTableName(factName, c3)));
     Assert.assertFalse(client.tableExists(factName));
-    cubeFacts = client.getAllFactTables(cube);
+    cubeFacts = client.getAllFacts(cube);
     cubeFactNames = new ArrayList<String>();
     for (CubeFactTable cfact : cubeFacts) {
       cubeFactNames.add(cfact.getName());

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/309f62cc/lens-server-api/src/main/java/org/apache/lens/server/api/metastore/CubeMetastoreService.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/metastore/CubeMetastoreService.java b/lens-server-api/src/main/java/org/apache/lens/server/api/metastore/CubeMetastoreService.java
index 9de5f79..090e0d7 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/metastore/CubeMetastoreService.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/metastore/CubeMetastoreService.java
@@ -322,13 +322,15 @@ public interface CubeMetastoreService {
   void dropStorageOfDimTable(LensSessionHandle sessionid, String dimTblName, String storage) throws LensException;
 
   /**
-   * Get all dimension tables
+   * Get all dimension tables. dimensionName is an optional filter of dimension name.
+   * If provided, only the dimension tables belonging to given dimension will be returned
    *
    * @param sessionid
+   * @param dimensionName dimension name to be filtered with. Optional
    * @return
    * @throws LensException
    */
-  List<String> getAllDimTableNames(LensSessionHandle sessionid) throws LensException;
+  List<String> getAllDimTableNames(LensSessionHandle sessionid, String dimensionName) throws LensException;
 
   /**
    * Get all partitions of a dimension table in a storage
@@ -367,16 +369,6 @@ public interface CubeMetastoreService {
     XPartitionList partitions) throws LensException;
 
   /**
-   * Get all facts of cube. Cube can also be a derived cube
-   *
-   * @param sessionid The session id
-   * @param cubeName  The cube name
-   * @return List of FactTable objects
-   * @throws LensException
-   */
-  List<XFactTable> getAllFactsOfCube(LensSessionHandle sessionid, String cubeName) throws LensException;
-
-  /**
    * Get fact table given by name
    *
    * @param sessionid The sessionid
@@ -418,10 +410,11 @@ public interface CubeMetastoreService {
    * Get all fact names
    *
    * @param sessionid The sessionid
+   * @param cubeName optional filter filter facts by cube name.
    * @return List of fact table names
    * @throws LensException
    */
-  List<String> getAllFactNames(LensSessionHandle sessionid) throws LensException;
+  List<String> getAllFactNames(LensSessionHandle sessionid, String cubeName) throws LensException;
 
   /**
    * Get all storages of fact

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/309f62cc/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
index 0e6d057..1a2b5b5 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
@@ -416,6 +416,28 @@ public class CubeMetastoreServiceImpl extends LensService implements CubeMetasto
   }
 
   @Override
+  public List<String> getAllDimTableNames(LensSessionHandle sessionid, String dimensionName) throws LensException {
+    try {
+      acquire(sessionid);
+      CubeMetastoreClient client = getClient(sessionid);
+      Dimension dimension = client.getDimension(dimensionName);
+      if (dimensionName != null && dimension == null) {
+        throw new LensException("Could not get table: " + dimensionName + " as a dimension");
+      }
+      Collection<CubeDimensionTable> dims = client.getAllDimensionTables(dimension);
+      List<String> dimNames = new ArrayList<String>(dims.size());
+      for (CubeDimensionTable cdt : dims) {
+        dimNames.add(cdt.getName());
+      }
+      return dimNames;
+    } catch (HiveException e) {
+      throw new LensException(e);
+    } finally {
+      release(sessionid);
+    }
+  }
+
+  @Override
   public void dropAllStoragesOfFact(LensSessionHandle sessionid, String factName) throws LensException {
     try {
       acquire(sessionid);
@@ -458,27 +480,6 @@ public class CubeMetastoreServiceImpl extends LensService implements CubeMetasto
   }
 
   @Override
-  public List<XFactTable> getAllFactsOfCube(LensSessionHandle sessionid, String cubeName) throws LensException {
-    try {
-      acquire(sessionid);
-      CubeMetastoreClient msClient = getClient(sessionid);
-      List<CubeFactTable> cubeFacts = msClient.getAllFactTables(msClient.getCube(cubeName));
-      if (cubeFacts != null && !cubeFacts.isEmpty()) {
-        List<XFactTable> facts = new ArrayList<XFactTable>(cubeFacts.size());
-        for (CubeFactTable cft : cubeFacts) {
-          facts.add(JAXBUtils.factTableFromCubeFactTable(cft));
-        }
-        return facts;
-      }
-    } catch (HiveException e) {
-      throw new LensException(e);
-    } finally {
-      release(sessionid);
-    }
-    return null;
-  }
-
-  @Override
   public XFactTable getFactTable(LensSessionHandle sessionid, String fact) throws LensException {
     try {
       acquire(sessionid);
@@ -550,10 +551,15 @@ public class CubeMetastoreServiceImpl extends LensService implements CubeMetasto
   }
 
   @Override
-  public List<String> getAllFactNames(LensSessionHandle sessionid) throws LensException {
+  public List<String> getAllFactNames(LensSessionHandle sessionid, String cubeName) throws LensException {
     try {
       acquire(sessionid);
-      Collection<CubeFactTable> facts = getClient(sessionid).getAllFacts();
+      CubeMetastoreClient client = getClient(sessionid);
+      CubeInterface fact = client.getCube(cubeName);
+      if (cubeName != null && fact == null) {
+        throw new LensException("Could not get table: " + cubeName + " as a cube");
+      }
+      Collection<CubeFactTable> facts = client.getAllFacts(fact);
       List<String> factNames = new ArrayList<String>(facts.size());
       for (CubeFactTable cft : facts) {
         factNames.add(cft.getName());
@@ -566,22 +572,6 @@ public class CubeMetastoreServiceImpl extends LensService implements CubeMetasto
     }
   }
 
-  @Override
-  public List<String> getAllDimTableNames(LensSessionHandle sessionid) throws LensException {
-    try {
-      acquire(sessionid);
-      Collection<CubeDimensionTable> dims = getClient(sessionid).getAllDimensionTables();
-      List<String> dimNames = new ArrayList<String>(dims.size());
-      for (CubeDimensionTable cdt : dims) {
-        dimNames.add(cdt.getName());
-      }
-      return dimNames;
-    } catch (HiveException e) {
-      throw new LensException(e);
-    } finally {
-      release(sessionid);
-    }
-  }
 
   @Override
   public List<String> getStoragesOfFact(LensSessionHandle sessionid, String fact) throws LensException {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/309f62cc/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java b/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
index 06c352e..a6178e2 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
@@ -37,6 +37,8 @@ import org.apache.log4j.Logger;
 
 import org.glassfish.jersey.media.multipart.FormDataParam;
 
+import com.google.common.collect.Lists;
+
 /**
  * metastore resource api
  * <p/>
@@ -341,9 +343,13 @@ public class MetastoreResource {
 
 
   private void checkTableNotFound(LensException e, String table) {
+    List<String> messages = Lists.newArrayList();
+    messages.add(e.getMessage());
     if (e.getCause() instanceof HiveException) {
-      HiveException hiveErr = (HiveException) e.getCause();
-      if (hiveErr.getMessage().startsWith("Could not get table")) {
+      messages.add(e.getCause().getMessage());
+    }
+    for (String message : messages) {
+      if (message.startsWith("Could not get table")) {
         throw new NotFoundException("Table not found " + table, e);
       }
     }
@@ -714,6 +720,27 @@ public class MetastoreResource {
   }
 
   /**
+   * Get all dimtables that belong to a dimension in the metastore
+   *
+   * @param sessionid The sessionid in which user is working
+   * @param dimensionName name of the dimension
+   * @return List of {@link XDimensionTable} objects
+   */
+  @GET
+  @Path("/dimensions/{dimName}/dimtables")
+  public StringList getAllDimensionTablesOfDimension(
+    @QueryParam("sessionid") LensSessionHandle sessionid, @PathParam("dimName") String dimensionName)
+    throws LensException {
+    checkSessionId(sessionid);
+    try {
+      return new StringList(getSvc().getAllDimTableNames(sessionid, dimensionName));
+    } catch (LensException exc) {
+      checkTableNotFound(exc, dimensionName);
+      throw exc;
+    }
+  }
+
+  /**
    * Get all facts that belong to a cube in the metastore
    *
    * @param sessionid The sessionid in which user is working
@@ -722,12 +749,12 @@ public class MetastoreResource {
    */
   @GET
   @Path("/cubes/{cubeName}/facts")
-  public List<XFactTable> getAllFactsOfCube(
+  public StringList getAllFactsOfCube(
     @QueryParam("sessionid") LensSessionHandle sessionid, @PathParam("cubeName") String cubeName)
     throws LensException {
     checkSessionId(sessionid);
     try {
-      return getSvc().getAllFactsOfCube(sessionid, cubeName);
+      return new StringList(getSvc().getAllFactNames(sessionid, cubeName));
     } catch (LensException exc) {
       checkTableNotFound(exc, cubeName);
       throw exc;
@@ -744,7 +771,7 @@ public class MetastoreResource {
   @Path("/facts")
   public StringList getAllFacts(@QueryParam("sessionid") LensSessionHandle sessionid) throws LensException {
     checkSessionId(sessionid);
-    return new StringList(getSvc().getAllFactNames(sessionid));
+    return new StringList(getSvc().getAllFactNames(sessionid, null));
   }
 
   /**
@@ -1147,7 +1174,7 @@ public class MetastoreResource {
   @GET
   @Path("/dimtables")
   public StringList getAllDims(@QueryParam("sessionid") LensSessionHandle sessionid) throws LensException {
-    return new StringList(getSvc().getAllDimTableNames(sessionid));
+    return new StringList(getSvc().getAllDimTableNames(sessionid, null));
   }
 
   /**


[25/50] [abbrv] incubator-lens git commit: Lens-465 : Refactor ml packages. (sharad)

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLServiceResource.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLServiceResource.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLServiceResource.java
new file mode 100644
index 0000000..f9c954e
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLServiceResource.java
@@ -0,0 +1,427 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.server;
+
+import static org.apache.commons.lang.StringUtils.isBlank;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import javax.ws.rs.BadRequestException;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.NotFoundException;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.lens.api.LensException;
+import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.api.StringList;
+import org.apache.lens.ml.algo.api.MLModel;
+import org.apache.lens.ml.api.MLTestReport;
+import org.apache.lens.ml.api.ModelMetadata;
+import org.apache.lens.ml.api.TestReport;
+import org.apache.lens.ml.impl.ModelLoader;
+import org.apache.lens.server.api.LensConfConstants;
+import org.apache.lens.server.api.ServiceProvider;
+import org.apache.lens.server.api.ServiceProviderFactory;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+import org.glassfish.jersey.media.multipart.FormDataParam;
+
+/**
+ * Machine Learning service.
+ */
+@Path("/ml")
+@Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+public class MLServiceResource {
+
+  /** The Constant LOG. */
+  public static final Log LOG = LogFactory.getLog(MLServiceResource.class);
+
+  /** The ml service. */
+  MLService mlService;
+
+  /** The service provider. */
+  ServiceProvider serviceProvider;
+
+  /** The service provider factory. */
+  ServiceProviderFactory serviceProviderFactory;
+
+  private static final HiveConf HIVE_CONF;
+
+  /**
+   * Message indicating if ML service is up
+   */
+  public static final String ML_UP_MESSAGE = "ML service is up";
+
+  static {
+    HIVE_CONF = new HiveConf();
+    // Add default config so that we know the service provider implementation
+    HIVE_CONF.addResource("lensserver-default.xml");
+    HIVE_CONF.addResource("lens-site.xml");
+  }
+
+  /**
+   * Instantiates a new ML service resource.
+   */
+  public MLServiceResource() {
+    serviceProviderFactory = getServiceProviderFactory(HIVE_CONF);
+  }
+
+  private ServiceProvider getServiceProvider() {
+    if (serviceProvider == null) {
+      serviceProvider = serviceProviderFactory.getServiceProvider();
+    }
+    return serviceProvider;
+  }
+
+  /**
+   * Gets the service provider factory.
+   *
+   * @param conf the conf
+   * @return the service provider factory
+   */
+  private ServiceProviderFactory getServiceProviderFactory(HiveConf conf) {
+    Class<?> spfClass = conf.getClass(LensConfConstants.SERVICE_PROVIDER_FACTORY, ServiceProviderFactory.class);
+    try {
+      return (ServiceProviderFactory) spfClass.newInstance();
+    } catch (InstantiationException e) {
+      throw new RuntimeException(e);
+    } catch (IllegalAccessException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  private MLService getMlService() {
+    if (mlService == null) {
+      mlService = (MLService) getServiceProvider().getService(MLService.NAME);
+    }
+    return mlService;
+  }
+
+  /**
+   * Indicates if ML resource is up
+   *
+   * @return
+   */
+  @GET
+  public String mlResourceUp() {
+    return ML_UP_MESSAGE;
+  }
+
+  /**
+   * Get a list of algos available
+   *
+   * @return
+   */
+  @GET
+  @Path("algos")
+  public StringList getAlgoNames() {
+    List<String> algos = getMlService().getAlgorithms();
+    StringList result = new StringList(algos);
+    return result;
+  }
+
+  /**
+   * Gets the human readable param description of an algorithm
+   *
+   * @param algorithm the algorithm
+   * @return the param description
+   */
+  @GET
+  @Path("algos/{algorithm}")
+  public StringList getParamDescription(@PathParam("algorithm") String algorithm) {
+    Map<String, String> paramDesc = getMlService().getAlgoParamDescription(algorithm);
+    if (paramDesc == null) {
+      throw new NotFoundException("Param description not found for " + algorithm);
+    }
+
+    List<String> descriptions = new ArrayList<String>();
+    for (String key : paramDesc.keySet()) {
+      descriptions.add(key + " : " + paramDesc.get(key));
+    }
+    return new StringList(descriptions);
+  }
+
+  /**
+   * Get model ID list for a given algorithm.
+   *
+   * @param algorithm algorithm name
+   * @return the models for algo
+   * @throws LensException the lens exception
+   */
+  @GET
+  @Path("models/{algorithm}")
+  public StringList getModelsForAlgo(@PathParam("algorithm") String algorithm) throws LensException {
+    List<String> models = getMlService().getModels(algorithm);
+    if (models == null || models.isEmpty()) {
+      throw new NotFoundException("No models found for algorithm " + algorithm);
+    }
+    return new StringList(models);
+  }
+
+  /**
+   * Get metadata of the model given algorithm and model ID.
+   *
+   * @param algorithm algorithm name
+   * @param modelID   model ID
+   * @return model metadata
+   * @throws LensException the lens exception
+   */
+  @GET
+  @Path("models/{algorithm}/{modelID}")
+  public ModelMetadata getModelMetadata(@PathParam("algorithm") String algorithm, @PathParam("modelID") String modelID)
+    throws LensException {
+    MLModel model = getMlService().getModel(algorithm, modelID);
+    if (model == null) {
+      throw new NotFoundException("Model not found " + modelID + ", algo=" + algorithm);
+    }
+
+    ModelMetadata meta = new ModelMetadata(model.getId(), model.getTable(), model.getAlgoName(), StringUtils.join(
+      model.getParams(), ' '), model.getCreatedAt().toString(), getMlService().getModelPath(algorithm, modelID),
+      model.getLabelColumn(), StringUtils.join(model.getFeatureColumns(), ","));
+    return meta;
+  }
+
+  /**
+   * Delete a model given model ID and algorithm name.
+   *
+   * @param algorithm the algorithm
+   * @param modelID   the model id
+   * @return confirmation text
+   * @throws LensException the lens exception
+   */
+  @DELETE
+  @Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_PLAIN})
+  @Path("models/{algorithm}/{modelID}")
+  public String deleteModel(@PathParam("algorithm") String algorithm, @PathParam("modelID") String modelID)
+    throws LensException {
+    getMlService().deleteModel(algorithm, modelID);
+    return "DELETED model=" + modelID + " algorithm=" + algorithm;
+  }
+
+  /**
+   * Train a model given an algorithm name and algorithm parameters
+   * <p>
+   * Following parameters are mandatory and must be passed as part of the form
+   * <p/>
+   * <ol>
+   * <li>table - input Hive table to load training data from</li>
+   * <li>label - name of the labelled column</li>
+   * <li>feature - one entry per feature column. At least one feature column is required</li>
+   * </ol>
+   * <p/>
+   * </p>
+   *
+   * @param algorithm algorithm name
+   * @param form      form data
+   * @return if model is successfully trained, the model ID will be returned
+   * @throws LensException the lens exception
+   */
+  @POST
+  @Consumes(MediaType.APPLICATION_FORM_URLENCODED)
+  @Path("{algorithm}/train")
+  public String train(@PathParam("algorithm") String algorithm, MultivaluedMap<String, String> form)
+    throws LensException {
+
+    // Check if algo is valid
+    if (getMlService().getAlgoForName(algorithm) == null) {
+      throw new NotFoundException("Algo for algo: " + algorithm + " not found");
+    }
+
+    if (isBlank(form.getFirst("table"))) {
+      throw new BadRequestException("table parameter is rquired");
+    }
+
+    String table = form.getFirst("table");
+
+    if (isBlank(form.getFirst("label"))) {
+      throw new BadRequestException("label parameter is required");
+    }
+
+    // Check features
+    List<String> featureNames = form.get("feature");
+    if (featureNames.size() < 1) {
+      throw new BadRequestException("At least one feature is required");
+    }
+
+    List<String> algoArgs = new ArrayList<String>();
+    Set<Map.Entry<String, List<String>>> paramSet = form.entrySet();
+
+    for (Map.Entry<String, List<String>> e : paramSet) {
+      String p = e.getKey();
+      List<String> values = e.getValue();
+      if ("algorithm".equals(p) || "table".equals(p)) {
+        continue;
+      } else if ("feature".equals(p)) {
+        for (String feature : values) {
+          algoArgs.add("feature");
+          algoArgs.add(feature);
+        }
+      } else if ("label".equals(p)) {
+        algoArgs.add("label");
+        algoArgs.add(values.get(0));
+      } else {
+        algoArgs.add(p);
+        algoArgs.add(values.get(0));
+      }
+    }
+    LOG.info("Training table " + table + " with algo " + algorithm + " params=" + algoArgs.toString());
+    String modelId = getMlService().train(table, algorithm, algoArgs.toArray(new String[]{}));
+    LOG.info("Done training " + table + " modelid = " + modelId);
+    return modelId;
+  }
+
+  /**
+   * Clear model cache (for admin use).
+   *
+   * @return OK if the cache was cleared
+   */
+  @DELETE
+  @Path("clearModelCache")
+  @Produces(MediaType.TEXT_PLAIN)
+  public Response clearModelCache() {
+    ModelLoader.clearCache();
+    LOG.info("Cleared model cache");
+    return Response.ok("Cleared cache", MediaType.TEXT_PLAIN_TYPE).build();
+  }
+
+  /**
+   * Run a test on a model for an algorithm.
+   *
+   * @param algorithm algorithm name
+   * @param modelID   model ID
+   * @param table     Hive table to run test on
+   * @param session   Lens session ID. This session ID will be used to run the test query
+   * @return Test report ID
+   * @throws LensException the lens exception
+   */
+  @POST
+  @Path("test/{table}/{algorithm}/{modelID}")
+  @Consumes(MediaType.MULTIPART_FORM_DATA)
+  public String test(@PathParam("algorithm") String algorithm, @PathParam("modelID") String modelID,
+    @PathParam("table") String table, @FormDataParam("sessionid") LensSessionHandle session,
+    @FormDataParam("outputTable") String outputTable) throws LensException {
+    MLTestReport testReport = getMlService().testModel(session, table, algorithm, modelID, outputTable);
+    return testReport.getReportID();
+  }
+
+  /**
+   * Get list of reports for a given algorithm.
+   *
+   * @param algoritm the algoritm
+   * @return the reports for algorithm
+   * @throws LensException the lens exception
+   */
+  @GET
+  @Path("reports/{algorithm}")
+  public StringList getReportsForAlgorithm(@PathParam("algorithm") String algoritm) throws LensException {
+    List<String> reports = getMlService().getTestReports(algoritm);
+    if (reports == null || reports.isEmpty()) {
+      throw new NotFoundException("No test reports found for " + algoritm);
+    }
+    return new StringList(reports);
+  }
+
+  /**
+   * Get a single test report given the algorithm name and report id.
+   *
+   * @param algorithm the algorithm
+   * @param reportID  the report id
+   * @return the test report
+   * @throws LensException the lens exception
+   */
+  @GET
+  @Path("reports/{algorithm}/{reportID}")
+  public TestReport getTestReport(@PathParam("algorithm") String algorithm, @PathParam("reportID") String reportID)
+    throws LensException {
+    MLTestReport report = getMlService().getTestReport(algorithm, reportID);
+
+    if (report == null) {
+      throw new NotFoundException("Test report: " + reportID + " not found for algorithm " + algorithm);
+    }
+
+    TestReport result = new TestReport(report.getTestTable(), report.getOutputTable(), report.getOutputColumn(),
+      report.getLabelColumn(), StringUtils.join(report.getFeatureColumns(), ","), report.getAlgorithm(),
+      report.getModelID(), report.getReportID(), report.getLensQueryID());
+    return result;
+  }
+
+  /**
+   * DELETE a report given the algorithm name and report ID.
+   *
+   * @param algorithm the algorithm
+   * @param reportID  the report id
+   * @return the string
+   * @throws LensException the lens exception
+   */
+  @DELETE
+  @Path("reports/{algorithm}/{reportID}")
+  @Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_PLAIN})
+  public String deleteTestReport(@PathParam("algorithm") String algorithm, @PathParam("reportID") String reportID)
+    throws LensException {
+    getMlService().deleteTestReport(algorithm, reportID);
+    return "DELETED report=" + reportID + " algorithm=" + algorithm;
+  }
+
+  /**
+   * Predict.
+   *
+   * @param algorithm the algorithm
+   * @param modelID   the model id
+   * @param uriInfo   the uri info
+   * @return the string
+   * @throws LensException the lens exception
+   */
+  @GET
+  @Path("/predict/{algorithm}/{modelID}")
+  @Produces({MediaType.APPLICATION_ATOM_XML, MediaType.APPLICATION_JSON})
+  public String predict(@PathParam("algorithm") String algorithm, @PathParam("modelID") String modelID,
+    @Context UriInfo uriInfo) throws LensException {
+    // Load the model instance
+    MLModel<?> model = getMlService().getModel(algorithm, modelID);
+
+    // Get input feature names
+    MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
+    String[] features = new String[model.getFeatureColumns().size()];
+    // Assuming that feature name parameters are same
+    int i = 0;
+    for (String feature : model.getFeatureColumns()) {
+      features[i++] = params.getFirst(feature);
+    }
+
+    // TODO needs a 'prediction formatter'
+    return getMlService().predict(algorithm, modelID, features).toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/ColumnFeatureFunction.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/ColumnFeatureFunction.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/ColumnFeatureFunction.java
deleted file mode 100644
index abdad68..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/ColumnFeatureFunction.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark;
-
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hive.hcatalog.data.HCatRecord;
-import org.apache.log4j.Logger;
-import org.apache.spark.mllib.linalg.Vectors;
-import org.apache.spark.mllib.regression.LabeledPoint;
-
-import com.google.common.base.Preconditions;
-import scala.Tuple2;
-
-/**
- * A feature function that directly maps an HCatRecord to a feature vector. Each column becomes a feature in the vector,
- * with the value of the feature obtained using the value mapper for that column
- */
-public class ColumnFeatureFunction extends FeatureFunction {
-
-  /** The Constant LOG. */
-  public static final Logger LOG = Logger.getLogger(ColumnFeatureFunction.class);
-
-  /** The feature value mappers. */
-  private final FeatureValueMapper[] featureValueMappers;
-
-  /** The feature positions. */
-  private final int[] featurePositions;
-
-  /** The label column pos. */
-  private final int labelColumnPos;
-
-  /** The num features. */
-  private final int numFeatures;
-
-  /** The default labeled point. */
-  private final LabeledPoint defaultLabeledPoint;
-
-  /**
-   * Feature positions and value mappers are parallel arrays. featurePositions[i] gives the position of ith feature in
-   * the HCatRecord, and valueMappers[i] gives the value mapper used to map that feature to a Double value
-   *
-   * @param featurePositions position number of feature column in the HCatRecord
-   * @param valueMappers     mapper for each column position
-   * @param labelColumnPos   position of the label column
-   * @param numFeatures      number of features in the feature vector
-   * @param defaultLabel     default lable to be used for null records
-   */
-  public ColumnFeatureFunction(int[] featurePositions, FeatureValueMapper[] valueMappers, int labelColumnPos,
-    int numFeatures, double defaultLabel) {
-    Preconditions.checkNotNull(valueMappers, "Value mappers argument is required");
-    Preconditions.checkNotNull(featurePositions, "Feature positions are required");
-    Preconditions.checkArgument(valueMappers.length == featurePositions.length,
-      "Mismatch between number of value mappers and feature positions");
-
-    this.featurePositions = featurePositions;
-    this.featureValueMappers = valueMappers;
-    this.labelColumnPos = labelColumnPos;
-    this.numFeatures = numFeatures;
-    defaultLabeledPoint = new LabeledPoint(defaultLabel, Vectors.dense(new double[numFeatures]));
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.spark.FeatureFunction#call(scala.Tuple2)
-   */
-  @Override
-  public LabeledPoint call(Tuple2<WritableComparable, HCatRecord> tuple) throws Exception {
-    HCatRecord record = tuple._2();
-
-    if (record == null) {
-      LOG.info("@@@ Null record");
-      return defaultLabeledPoint;
-    }
-
-    double[] features = new double[numFeatures];
-
-    for (int i = 0; i < numFeatures; i++) {
-      int featurePos = featurePositions[i];
-      features[i] = featureValueMappers[i].call(record.get(featurePos));
-    }
-
-    double label = featureValueMappers[labelColumnPos].call(record.get(labelColumnPos));
-    return new LabeledPoint(label, Vectors.dense(features));
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/DoubleValueMapper.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/DoubleValueMapper.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/DoubleValueMapper.java
deleted file mode 100644
index 781ccd1..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/DoubleValueMapper.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark;
-
-/**
- * Directly return input when it is known to be double.
- */
-public class DoubleValueMapper extends FeatureValueMapper {
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.spark.FeatureValueMapper#call(java.lang.Object)
-   */
-  @Override
-  public final Double call(Object input) {
-    if (input instanceof Double || input == null) {
-      return input == null ? Double.valueOf(0d) : (Double) input;
-    }
-
-    throw new IllegalArgumentException("Invalid input expecting only doubles, but got " + input);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/FeatureFunction.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/FeatureFunction.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/FeatureFunction.java
deleted file mode 100644
index affed7b..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/FeatureFunction.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark;
-
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hive.hcatalog.data.HCatRecord;
-import org.apache.spark.api.java.function.Function;
-import org.apache.spark.mllib.regression.LabeledPoint;
-
-import scala.Tuple2;
-
-/**
- * Function to map an HCatRecord to a feature vector usable by MLLib.
- */
-public abstract class FeatureFunction implements Function<Tuple2<WritableComparable, HCatRecord>, LabeledPoint> {
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.spark.api.java.function.Function#call(java.lang.Object)
-   */
-  @Override
-  public abstract LabeledPoint call(Tuple2<WritableComparable, HCatRecord> tuple) throws Exception;
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/FeatureValueMapper.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/FeatureValueMapper.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/FeatureValueMapper.java
deleted file mode 100644
index b692379..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/FeatureValueMapper.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark;
-
-import java.io.Serializable;
-
-import org.apache.spark.api.java.function.Function;
-
-/**
- * Map a feature value to a Double value usable by MLLib.
- */
-public abstract class FeatureValueMapper implements Function<Object, Double>, Serializable {
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.spark.api.java.function.Function#call(java.lang.Object)
-   */
-  public abstract Double call(Object input);
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/HiveTableRDD.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/HiveTableRDD.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/HiveTableRDD.java
deleted file mode 100644
index 44a8e1d..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/HiveTableRDD.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark;
-
-import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hive.hcatalog.data.HCatRecord;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.spark.api.java.JavaPairRDD;
-import org.apache.spark.api.java.JavaSparkContext;
-
-/**
- * Create a JavaRDD based on a Hive table using HCatInputFormat.
- */
-public final class HiveTableRDD {
-  private HiveTableRDD() {
-  }
-
-  public static final Log LOG = LogFactory.getLog(HiveTableRDD.class);
-
-  /**
-   * Creates the hive table rdd.
-   *
-   * @param javaSparkContext the java spark context
-   * @param conf             the conf
-   * @param db               the db
-   * @param table            the table
-   * @param partitionFilter  the partition filter
-   * @return the java pair rdd
-   * @throws IOException Signals that an I/O exception has occurred.
-   */
-  public static JavaPairRDD<WritableComparable, HCatRecord> createHiveTableRDD(JavaSparkContext javaSparkContext,
-    Configuration conf, String db, String table, String partitionFilter) throws IOException {
-
-    HCatInputFormat.setInput(conf, db, table, partitionFilter);
-
-    JavaPairRDD<WritableComparable, HCatRecord> rdd = javaSparkContext.newAPIHadoopRDD(conf,
-      HCatInputFormat.class, // Input
-      WritableComparable.class, // input key class
-      HCatRecord.class); // input value class
-    return rdd;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/SparkMLDriver.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/SparkMLDriver.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/SparkMLDriver.java
deleted file mode 100644
index 1e452f1..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/SparkMLDriver.java
+++ /dev/null
@@ -1,275 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.lens.api.LensConf;
-import org.apache.lens.api.LensException;
-import org.apache.lens.ml.Algorithms;
-import org.apache.lens.ml.MLAlgo;
-import org.apache.lens.ml.MLDriver;
-import org.apache.lens.ml.spark.algos.*;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.spark.SparkConf;
-import org.apache.spark.api.java.JavaSparkContext;
-
-/**
- * The Class SparkMLDriver.
- */
-public class SparkMLDriver implements MLDriver {
-
-  /** The Constant LOG. */
-  public static final Log LOG = LogFactory.getLog(SparkMLDriver.class);
-
-  /** The owns spark context. */
-  private boolean ownsSparkContext = true;
-
-  /**
-   * The Enum SparkMasterMode.
-   */
-  private enum SparkMasterMode {
-    // Embedded mode used in tests
-    /** The embedded. */
-    EMBEDDED,
-    // Yarn client and Yarn cluster modes are used when deploying the app to Yarn cluster
-    /** The yarn client. */
-    YARN_CLIENT,
-
-    /** The yarn cluster. */
-    YARN_CLUSTER
-  }
-
-  /** The algorithms. */
-  private final Algorithms algorithms = new Algorithms();
-
-  /** The client mode. */
-  private SparkMasterMode clientMode = SparkMasterMode.EMBEDDED;
-
-  /** The is started. */
-  private boolean isStarted;
-
-  /** The spark conf. */
-  private SparkConf sparkConf;
-
-  /** The spark context. */
-  private JavaSparkContext sparkContext;
-
-  /**
-   * Use spark context.
-   *
-   * @param jsc the jsc
-   */
-  public void useSparkContext(JavaSparkContext jsc) {
-    ownsSparkContext = false;
-    this.sparkContext = jsc;
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.MLDriver#isAlgoSupported(java.lang.String)
-   */
-  @Override
-  public boolean isAlgoSupported(String name) {
-    return algorithms.isAlgoSupported(name);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.MLDriver#getAlgoInstance(java.lang.String)
-   */
-  @Override
-  public MLAlgo getAlgoInstance(String name) throws LensException {
-    checkStarted();
-
-    if (!isAlgoSupported(name)) {
-      return null;
-    }
-
-    MLAlgo algo = null;
-    try {
-      algo = algorithms.getAlgoForName(name);
-      if (algo instanceof BaseSparkAlgo) {
-        ((BaseSparkAlgo) algo).setSparkContext(sparkContext);
-      }
-    } catch (LensException exc) {
-      LOG.error("Error creating algo object", exc);
-    }
-    return algo;
-  }
-
-  /**
-   * Register algos.
-   */
-  private void registerAlgos() {
-    algorithms.register(NaiveBayesAlgo.class);
-    algorithms.register(SVMAlgo.class);
-    algorithms.register(LogisticRegressionAlgo.class);
-    algorithms.register(DecisionTreeAlgo.class);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.MLDriver#init(org.apache.lens.api.LensConf)
-   */
-  @Override
-  public void init(LensConf conf) throws LensException {
-    sparkConf = new SparkConf();
-    registerAlgos();
-    for (String key : conf.getProperties().keySet()) {
-      if (key.startsWith("lens.ml.sparkdriver.")) {
-        sparkConf.set(key.substring("lens.ml.sparkdriver.".length()), conf.getProperties().get(key));
-      }
-    }
-
-    String sparkAppMaster = sparkConf.get("spark.master");
-    if ("yarn-client".equalsIgnoreCase(sparkAppMaster)) {
-      clientMode = SparkMasterMode.YARN_CLIENT;
-    } else if ("yarn-cluster".equalsIgnoreCase(sparkAppMaster)) {
-      clientMode = SparkMasterMode.YARN_CLUSTER;
-    } else if ("local".equalsIgnoreCase(sparkAppMaster) || StringUtils.isBlank(sparkAppMaster)) {
-      clientMode = SparkMasterMode.EMBEDDED;
-    } else {
-      throw new IllegalArgumentException("Invalid master mode " + sparkAppMaster);
-    }
-
-    if (clientMode == SparkMasterMode.YARN_CLIENT || clientMode == SparkMasterMode.YARN_CLUSTER) {
-      String sparkHome = System.getenv("SPARK_HOME");
-      if (StringUtils.isNotBlank(sparkHome)) {
-        sparkConf.setSparkHome(sparkHome);
-      }
-
-      // If SPARK_HOME is not set, SparkConf can read from the Lens-site.xml or System properties.
-      if (StringUtils.isBlank(sparkConf.get("spark.home"))) {
-        throw new IllegalArgumentException("Spark home is not set");
-      }
-
-      LOG.info("Spark home is set to " + sparkConf.get("spark.home"));
-    }
-
-    sparkConf.setAppName("lens-ml");
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.MLDriver#start()
-   */
-  @Override
-  public void start() throws LensException {
-    if (sparkContext == null) {
-      sparkContext = new JavaSparkContext(sparkConf);
-    }
-
-    // Adding jars to spark context is only required when running in yarn-client mode
-    if (clientMode != SparkMasterMode.EMBEDDED) {
-      // TODO Figure out only necessary set of JARs to be added for HCatalog
-      // Add hcatalog and hive jars
-      String hiveLocation = System.getenv("HIVE_HOME");
-
-      if (StringUtils.isBlank(hiveLocation)) {
-        throw new LensException("HIVE_HOME is not set");
-      }
-
-      LOG.info("HIVE_HOME at " + hiveLocation);
-
-      File hiveLibDir = new File(hiveLocation, "lib");
-      FilenameFilter jarFileFilter = new FilenameFilter() {
-        @Override
-        public boolean accept(File file, String s) {
-          return s.endsWith(".jar");
-        }
-      };
-
-      List<String> jarFiles = new ArrayList<String>();
-      // Add hive jars
-      for (File jarFile : hiveLibDir.listFiles(jarFileFilter)) {
-        jarFiles.add(jarFile.getAbsolutePath());
-        LOG.info("Adding HIVE jar " + jarFile.getAbsolutePath());
-        sparkContext.addJar(jarFile.getAbsolutePath());
-      }
-
-      // Add hcatalog jars
-      File hcatalogDir = new File(hiveLocation + "/hcatalog/share/hcatalog");
-      for (File jarFile : hcatalogDir.listFiles(jarFileFilter)) {
-        jarFiles.add(jarFile.getAbsolutePath());
-        LOG.info("Adding HCATALOG jar " + jarFile.getAbsolutePath());
-        sparkContext.addJar(jarFile.getAbsolutePath());
-      }
-
-      // Add the current jar
-      String[] lensSparkLibJars = JavaSparkContext.jarOfClass(SparkMLDriver.class);
-      for (String lensSparkJar : lensSparkLibJars) {
-        LOG.info("Adding Lens JAR " + lensSparkJar);
-        sparkContext.addJar(lensSparkJar);
-      }
-    }
-
-    isStarted = true;
-    LOG.info("Created Spark context for app: '" + sparkContext.appName() + "', Spark master: " + sparkContext.master());
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.MLDriver#stop()
-   */
-  @Override
-  public void stop() throws LensException {
-    if (!isStarted) {
-      LOG.warn("Spark driver was not started");
-      return;
-    }
-    isStarted = false;
-    if (ownsSparkContext) {
-      sparkContext.stop();
-    }
-    LOG.info("Stopped spark context " + this);
-  }
-
-  @Override
-  public List<String> getAlgoNames() {
-    return algorithms.getAlgorithmNames();
-  }
-
-  /**
-   * Check started.
-   *
-   * @throws LensException the lens exception
-   */
-  public void checkStarted() throws LensException {
-    if (!isStarted) {
-      throw new LensException("Spark driver is not started yet");
-    }
-  }
-
-  public JavaSparkContext getSparkContext() {
-    return sparkContext;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/TableTrainingSpec.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/TableTrainingSpec.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/TableTrainingSpec.java
deleted file mode 100644
index e569b1e..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/TableTrainingSpec.java
+++ /dev/null
@@ -1,433 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.lens.api.LensException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hive.hcatalog.data.HCatRecord;
-import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
-import org.apache.hive.hcatalog.data.schema.HCatSchema;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.spark.api.java.JavaPairRDD;
-import org.apache.spark.api.java.JavaRDD;
-import org.apache.spark.api.java.JavaSparkContext;
-import org.apache.spark.api.java.function.Function;
-import org.apache.spark.mllib.regression.LabeledPoint;
-import org.apache.spark.rdd.RDD;
-
-import com.google.common.base.Preconditions;
-import lombok.Getter;
-import lombok.ToString;
-
-/**
- * The Class TableTrainingSpec.
- */
-@ToString
-public class TableTrainingSpec implements Serializable {
-
-  /** The Constant LOG. */
-  public static final Log LOG = LogFactory.getLog(TableTrainingSpec.class);
-
-  /** The training rdd. */
-  @Getter
-  private transient RDD<LabeledPoint> trainingRDD;
-
-  /** The testing rdd. */
-  @Getter
-  private transient RDD<LabeledPoint> testingRDD;
-
-  /** The database. */
-  @Getter
-  private String database;
-
-  /** The table. */
-  @Getter
-  private String table;
-
-  /** The partition filter. */
-  @Getter
-  private String partitionFilter;
-
-  /** The feature columns. */
-  @Getter
-  private List<String> featureColumns;
-
-  /** The label column. */
-  @Getter
-  private String labelColumn;
-
-  /** The conf. */
-  @Getter
-  private transient HiveConf conf;
-
-  // By default all samples are considered for training
-  /** The split training. */
-  private boolean splitTraining;
-
-  /** The training fraction. */
-  private double trainingFraction = 1.0;
-
-  /** The label pos. */
-  int labelPos;
-
-  /** The feature positions. */
-  int[] featurePositions;
-
-  /** The num features. */
-  int numFeatures;
-
-  /** The labeled rdd. */
-  transient JavaRDD<LabeledPoint> labeledRDD;
-
-  /**
-   * New builder.
-   *
-   * @return the table training spec builder
-   */
-  public static TableTrainingSpecBuilder newBuilder() {
-    return new TableTrainingSpecBuilder();
-  }
-
-  /**
-   * The Class TableTrainingSpecBuilder.
-   */
-  public static class TableTrainingSpecBuilder {
-
-    /** The spec. */
-    final TableTrainingSpec spec;
-
-    /**
-     * Instantiates a new table training spec builder.
-     */
-    public TableTrainingSpecBuilder() {
-      spec = new TableTrainingSpec();
-    }
-
-    /**
-     * Hive conf.
-     *
-     * @param conf the conf
-     * @return the table training spec builder
-     */
-    public TableTrainingSpecBuilder hiveConf(HiveConf conf) {
-      spec.conf = conf;
-      return this;
-    }
-
-    /**
-     * Database.
-     *
-     * @param db the db
-     * @return the table training spec builder
-     */
-    public TableTrainingSpecBuilder database(String db) {
-      spec.database = db;
-      return this;
-    }
-
-    /**
-     * Table.
-     *
-     * @param table the table
-     * @return the table training spec builder
-     */
-    public TableTrainingSpecBuilder table(String table) {
-      spec.table = table;
-      return this;
-    }
-
-    /**
-     * Partition filter.
-     *
-     * @param partFilter the part filter
-     * @return the table training spec builder
-     */
-    public TableTrainingSpecBuilder partitionFilter(String partFilter) {
-      spec.partitionFilter = partFilter;
-      return this;
-    }
-
-    /**
-     * Label column.
-     *
-     * @param labelColumn the label column
-     * @return the table training spec builder
-     */
-    public TableTrainingSpecBuilder labelColumn(String labelColumn) {
-      spec.labelColumn = labelColumn;
-      return this;
-    }
-
-    /**
-     * Feature columns.
-     *
-     * @param featureColumns the feature columns
-     * @return the table training spec builder
-     */
-    public TableTrainingSpecBuilder featureColumns(List<String> featureColumns) {
-      spec.featureColumns = featureColumns;
-      return this;
-    }
-
-    /**
-     * Builds the.
-     *
-     * @return the table training spec
-     */
-    public TableTrainingSpec build() {
-      return spec;
-    }
-
-    /**
-     * Training fraction.
-     *
-     * @param trainingFraction the training fraction
-     * @return the table training spec builder
-     */
-    public TableTrainingSpecBuilder trainingFraction(double trainingFraction) {
-      Preconditions.checkArgument(trainingFraction >= 0 && trainingFraction <= 1.0,
-        "Training fraction shoule be between 0 and 1");
-      spec.trainingFraction = trainingFraction;
-      spec.splitTraining = true;
-      return this;
-    }
-  }
-
-  /**
-   * The Class DataSample.
-   */
-  public static class DataSample implements Serializable {
-
-    /** The labeled point. */
-    private final LabeledPoint labeledPoint;
-
-    /** The sample. */
-    private final double sample;
-
-    /**
-     * Instantiates a new data sample.
-     *
-     * @param labeledPoint the labeled point
-     */
-    public DataSample(LabeledPoint labeledPoint) {
-      sample = Math.random();
-      this.labeledPoint = labeledPoint;
-    }
-  }
-
-  /**
-   * The Class TrainingFilter.
-   */
-  public static class TrainingFilter implements Function<DataSample, Boolean> {
-
-    /** The training fraction. */
-    private double trainingFraction;
-
-    /**
-     * Instantiates a new training filter.
-     *
-     * @param fraction the fraction
-     */
-    public TrainingFilter(double fraction) {
-      trainingFraction = fraction;
-    }
-
-    /*
-     * (non-Javadoc)
-     *
-     * @see org.apache.spark.api.java.function.Function#call(java.lang.Object)
-     */
-    @Override
-    public Boolean call(DataSample v1) throws Exception {
-      return v1.sample <= trainingFraction;
-    }
-  }
-
-  /**
-   * The Class TestingFilter.
-   */
-  public static class TestingFilter implements Function<DataSample, Boolean> {
-
-    /** The training fraction. */
-    private double trainingFraction;
-
-    /**
-     * Instantiates a new testing filter.
-     *
-     * @param fraction the fraction
-     */
-    public TestingFilter(double fraction) {
-      trainingFraction = fraction;
-    }
-
-    /*
-     * (non-Javadoc)
-     *
-     * @see org.apache.spark.api.java.function.Function#call(java.lang.Object)
-     */
-    @Override
-    public Boolean call(DataSample v1) throws Exception {
-      return v1.sample > trainingFraction;
-    }
-  }
-
-  /**
-   * The Class GetLabeledPoint.
-   */
-  public static class GetLabeledPoint implements Function<DataSample, LabeledPoint> {
-
-    /*
-     * (non-Javadoc)
-     *
-     * @see org.apache.spark.api.java.function.Function#call(java.lang.Object)
-     */
-    @Override
-    public LabeledPoint call(DataSample v1) throws Exception {
-      return v1.labeledPoint;
-    }
-  }
-
-  /**
-   * Validate.
-   *
-   * @return true, if successful
-   */
-  boolean validate() {
-    List<HCatFieldSchema> columns;
-    try {
-      HCatInputFormat.setInput(conf, database == null ? "default" : database, table, partitionFilter);
-      HCatSchema tableSchema = HCatInputFormat.getTableSchema(conf);
-      columns = tableSchema.getFields();
-    } catch (IOException exc) {
-      LOG.error("Error getting table info " + toString(), exc);
-      return false;
-    }
-
-    LOG.info(table + " columns " + columns.toString());
-
-    boolean valid = false;
-    if (columns != null && !columns.isEmpty()) {
-      // Check labeled column
-      List<String> columnNames = new ArrayList<String>();
-      for (HCatFieldSchema col : columns) {
-        columnNames.add(col.getName());
-      }
-
-      // Need at least one feature column and one label column
-      valid = columnNames.contains(labelColumn) && columnNames.size() > 1;
-
-      if (valid) {
-        labelPos = columnNames.indexOf(labelColumn);
-
-        // Check feature columns
-        if (featureColumns == null || featureColumns.isEmpty()) {
-          // feature columns are not provided, so all columns except label column are feature columns
-          featurePositions = new int[columnNames.size() - 1];
-          int p = 0;
-          for (int i = 0; i < columnNames.size(); i++) {
-            if (i == labelPos) {
-              continue;
-            }
-            featurePositions[p++] = i;
-          }
-
-          columnNames.remove(labelPos);
-          featureColumns = columnNames;
-        } else {
-          // Feature columns were provided, verify all feature columns are present in the table
-          valid = columnNames.containsAll(featureColumns);
-          if (valid) {
-            // Get feature positions
-            featurePositions = new int[featureColumns.size()];
-            for (int i = 0; i < featureColumns.size(); i++) {
-              featurePositions[i] = columnNames.indexOf(featureColumns.get(i));
-            }
-          }
-        }
-        numFeatures = featureColumns.size();
-      }
-    }
-
-    return valid;
-  }
-
-  /**
-   * Creates the rd ds.
-   *
-   * @param sparkContext the spark context
-   * @throws LensException the lens exception
-   */
-  public void createRDDs(JavaSparkContext sparkContext) throws LensException {
-    // Validate the spec
-    if (!validate()) {
-      throw new LensException("Table spec not valid: " + toString());
-    }
-
-    LOG.info("Creating RDDs with spec " + toString());
-
-    // Get the RDD for table
-    JavaPairRDD<WritableComparable, HCatRecord> tableRDD;
-    try {
-      tableRDD = HiveTableRDD.createHiveTableRDD(sparkContext, conf, database, table, partitionFilter);
-    } catch (IOException e) {
-      throw new LensException(e);
-    }
-
-    // Map into trainable RDD
-    // TODO: Figure out a way to use custom value mappers
-    FeatureValueMapper[] valueMappers = new FeatureValueMapper[numFeatures];
-    final DoubleValueMapper doubleMapper = new DoubleValueMapper();
-    for (int i = 0; i < numFeatures; i++) {
-      valueMappers[i] = doubleMapper;
-    }
-
-    ColumnFeatureFunction trainPrepFunction = new ColumnFeatureFunction(featurePositions, valueMappers, labelPos,
-      numFeatures, 0);
-    labeledRDD = tableRDD.map(trainPrepFunction);
-
-    if (splitTraining) {
-      // We have to split the RDD between a training RDD and a testing RDD
-      LOG.info("Splitting RDD for table " + database + "." + table + " with split fraction " + trainingFraction);
-      JavaRDD<DataSample> sampledRDD = labeledRDD.map(new Function<LabeledPoint, DataSample>() {
-        @Override
-        public DataSample call(LabeledPoint v1) throws Exception {
-          return new DataSample(v1);
-        }
-      });
-
-      trainingRDD = sampledRDD.filter(new TrainingFilter(trainingFraction)).map(new GetLabeledPoint()).rdd();
-      testingRDD = sampledRDD.filter(new TestingFilter(trainingFraction)).map(new GetLabeledPoint()).rdd();
-    } else {
-      LOG.info("Using same RDD for train and test");
-      trainingRDD = labeledRDD.rdd();
-      testingRDD = trainingRDD;
-    }
-    LOG.info("Generated RDDs");
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/BaseSparkAlgo.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/BaseSparkAlgo.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/BaseSparkAlgo.java
deleted file mode 100644
index 22cda6d..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/BaseSparkAlgo.java
+++ /dev/null
@@ -1,290 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark.algos;
-
-import java.lang.reflect.Field;
-import java.util.*;
-
-import org.apache.lens.api.LensConf;
-import org.apache.lens.api.LensException;
-import org.apache.lens.ml.AlgoParam;
-import org.apache.lens.ml.Algorithm;
-import org.apache.lens.ml.MLAlgo;
-import org.apache.lens.ml.MLModel;
-
-import org.apache.lens.ml.spark.TableTrainingSpec;
-import org.apache.lens.ml.spark.models.BaseSparkClassificationModel;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.spark.api.java.JavaSparkContext;
-import org.apache.spark.mllib.regression.LabeledPoint;
-import org.apache.spark.rdd.RDD;
-
-/**
- * The Class BaseSparkAlgo.
- */
-public abstract class BaseSparkAlgo implements MLAlgo {
-
-  /** The Constant LOG. */
-  public static final Log LOG = LogFactory.getLog(BaseSparkAlgo.class);
-
-  /** The name. */
-  private final String name;
-
-  /** The description. */
-  private final String description;
-
-  /** The spark context. */
-  protected JavaSparkContext sparkContext;
-
-  /** The params. */
-  protected Map<String, String> params;
-
-  /** The conf. */
-  protected transient LensConf conf;
-
-  /** The training fraction. */
-  @AlgoParam(name = "trainingFraction", help = "% of dataset to be used for training", defaultValue = "0")
-  protected double trainingFraction;
-
-  /** The use training fraction. */
-  private boolean useTrainingFraction;
-
-  /** The label. */
-  @AlgoParam(name = "label", help = "Name of column which is used as a training label for supervised learning")
-  protected String label;
-
-  /** The partition filter. */
-  @AlgoParam(name = "partition", help = "Partition filter used to create create HCatInputFormats")
-  protected String partitionFilter;
-
-  /** The features. */
-  @AlgoParam(name = "feature", help = "Column name(s) which are to be used as sample features")
-  protected List<String> features;
-
-  /**
-   * Instantiates a new base spark algo.
-   *
-   * @param name        the name
-   * @param description the description
-   */
-  public BaseSparkAlgo(String name, String description) {
-    this.name = name;
-    this.description = description;
-  }
-
-  public void setSparkContext(JavaSparkContext sparkContext) {
-    this.sparkContext = sparkContext;
-  }
-
-  @Override
-  public LensConf getConf() {
-    return conf;
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.MLAlgo#configure(org.apache.lens.api.LensConf)
-   */
-  @Override
-  public void configure(LensConf configuration) {
-    this.conf = configuration;
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.MLAlgo#train(org.apache.lens.api.LensConf, java.lang.String, java.lang.String,
-   * java.lang.String, java.lang.String[])
-   */
-  @Override
-  public MLModel<?> train(LensConf conf, String db, String table, String modelId, String... params)
-    throws LensException {
-    parseParams(params);
-
-    TableTrainingSpec.TableTrainingSpecBuilder builder = TableTrainingSpec.newBuilder().hiveConf(toHiveConf(conf))
-      .database(db).table(table).partitionFilter(partitionFilter).featureColumns(features).labelColumn(label);
-
-    if (useTrainingFraction) {
-      builder.trainingFraction(trainingFraction);
-    }
-
-    TableTrainingSpec spec = builder.build();
-    LOG.info("Training " + " with " + features.size() + " features");
-
-    spec.createRDDs(sparkContext);
-
-    RDD<LabeledPoint> trainingRDD = spec.getTrainingRDD();
-    BaseSparkClassificationModel<?> model = trainInternal(modelId, trainingRDD);
-    model.setTable(table);
-    model.setParams(Arrays.asList(params));
-    model.setLabelColumn(label);
-    model.setFeatureColumns(features);
-    return model;
-  }
-
-  /**
-   * To hive conf.
-   *
-   * @param conf the conf
-   * @return the hive conf
-   */
-  protected HiveConf toHiveConf(LensConf conf) {
-    HiveConf hiveConf = new HiveConf();
-    for (String key : conf.getProperties().keySet()) {
-      hiveConf.set(key, conf.getProperties().get(key));
-    }
-    return hiveConf;
-  }
-
-  /**
-   * Parses the params.
-   *
-   * @param args the args
-   */
-  public void parseParams(String[] args) {
-    if (args.length % 2 != 0) {
-      throw new IllegalArgumentException("Invalid number of params " + args.length);
-    }
-
-    params = new LinkedHashMap<String, String>();
-
-    for (int i = 0; i < args.length; i += 2) {
-      if ("f".equalsIgnoreCase(args[i]) || "feature".equalsIgnoreCase(args[i])) {
-        if (features == null) {
-          features = new ArrayList<String>();
-        }
-        features.add(args[i + 1]);
-      } else if ("l".equalsIgnoreCase(args[i]) || "label".equalsIgnoreCase(args[i])) {
-        label = args[i + 1];
-      } else {
-        params.put(args[i].replaceAll("\\-+", ""), args[i + 1]);
-      }
-    }
-
-    if (params.containsKey("trainingFraction")) {
-      // Get training Fraction
-      String trainingFractionStr = params.get("trainingFraction");
-      try {
-        trainingFraction = Double.parseDouble(trainingFractionStr);
-        useTrainingFraction = true;
-      } catch (NumberFormatException nfe) {
-        throw new IllegalArgumentException("Invalid training fraction", nfe);
-      }
-    }
-
-    if (params.containsKey("partition") || params.containsKey("p")) {
-      partitionFilter = params.containsKey("partition") ? params.get("partition") : params.get("p");
-    }
-
-    parseAlgoParams(params);
-  }
-
-  /**
-   * Gets the param value.
-   *
-   * @param param      the param
-   * @param defaultVal the default val
-   * @return the param value
-   */
-  public double getParamValue(String param, double defaultVal) {
-    if (params.containsKey(param)) {
-      try {
-        return Double.parseDouble(params.get(param));
-      } catch (NumberFormatException nfe) {
-        LOG.warn("Couldn't parse param value: " + param + " as double.");
-      }
-    }
-    return defaultVal;
-  }
-
-  /**
-   * Gets the param value.
-   *
-   * @param param      the param
-   * @param defaultVal the default val
-   * @return the param value
-   */
-  public int getParamValue(String param, int defaultVal) {
-    if (params.containsKey(param)) {
-      try {
-        return Integer.parseInt(params.get(param));
-      } catch (NumberFormatException nfe) {
-        LOG.warn("Couldn't parse param value: " + param + " as integer.");
-      }
-    }
-    return defaultVal;
-  }
-
-  public String getName() {
-    return name;
-  }
-
-  public String getDescription() {
-    return description;
-  }
-
-  public Map<String, String> getArgUsage() {
-    Map<String, String> usage = new LinkedHashMap<String, String>();
-    Class<?> clz = this.getClass();
-    // Put class name and description as well as part of the usage
-    Algorithm algorithm = clz.getAnnotation(Algorithm.class);
-    if (algorithm != null) {
-      usage.put("Algorithm Name", algorithm.name());
-      usage.put("Algorithm Description", algorithm.description());
-    }
-
-    // Get all algo params including base algo params
-    while (clz != null) {
-      for (Field field : clz.getDeclaredFields()) {
-        AlgoParam param = field.getAnnotation(AlgoParam.class);
-        if (param != null) {
-          usage.put("[param] " + param.name(), param.help() + " Default Value = " + param.defaultValue());
-        }
-      }
-
-      if (clz.equals(BaseSparkAlgo.class)) {
-        break;
-      }
-      clz = clz.getSuperclass();
-    }
-    return usage;
-  }
-
-  /**
-   * Parses the algo params.
-   *
-   * @param params the params
-   */
-  public abstract void parseAlgoParams(Map<String, String> params);
-
-  /**
-   * Train internal.
-   *
-   * @param modelId     the model id
-   * @param trainingRDD the training rdd
-   * @return the base spark classification model
-   * @throws LensException the lens exception
-   */
-  protected abstract BaseSparkClassificationModel trainInternal(String modelId, RDD<LabeledPoint> trainingRDD)
-    throws LensException;
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/DecisionTreeAlgo.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/DecisionTreeAlgo.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/DecisionTreeAlgo.java
deleted file mode 100644
index a6d66c5..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/DecisionTreeAlgo.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark.algos;
-
-import java.util.Map;
-
-import org.apache.lens.api.LensException;
-import org.apache.lens.ml.AlgoParam;
-import org.apache.lens.ml.Algorithm;
-import org.apache.lens.ml.spark.models.BaseSparkClassificationModel;
-import org.apache.lens.ml.spark.models.DecisionTreeClassificationModel;
-import org.apache.lens.ml.spark.models.SparkDecisionTreeModel;
-
-import org.apache.spark.mllib.regression.LabeledPoint;
-import org.apache.spark.mllib.tree.DecisionTree$;
-import org.apache.spark.mllib.tree.configuration.Algo$;
-import org.apache.spark.mllib.tree.impurity.Entropy$;
-import org.apache.spark.mllib.tree.impurity.Gini$;
-import org.apache.spark.mllib.tree.impurity.Impurity;
-import org.apache.spark.mllib.tree.impurity.Variance$;
-import org.apache.spark.mllib.tree.model.DecisionTreeModel;
-import org.apache.spark.rdd.RDD;
-
-import scala.Enumeration;
-
-/**
- * The Class DecisionTreeAlgo.
- */
-@Algorithm(name = "spark_decision_tree", description = "Spark Decision Tree classifier algo")
-public class DecisionTreeAlgo extends BaseSparkAlgo {
-
-  /** The algo. */
-  @AlgoParam(name = "algo", help = "Decision tree algorithm. Allowed values are 'classification' and 'regression'")
-  private Enumeration.Value algo;
-
-  /** The decision tree impurity. */
-  @AlgoParam(name = "impurity", help = "Impurity measure used by the decision tree. "
-    + "Allowed values are 'gini', 'entropy' and 'variance'")
-  private Impurity decisionTreeImpurity;
-
-  /** The max depth. */
-  @AlgoParam(name = "maxDepth", help = "Max depth of the decision tree. Integer values expected.",
-    defaultValue = "100")
-  private int maxDepth;
-
-  /**
-   * Instantiates a new decision tree algo.
-   *
-   * @param name        the name
-   * @param description the description
-   */
-  public DecisionTreeAlgo(String name, String description) {
-    super(name, description);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#parseAlgoParams(java.util.Map)
-   */
-  @Override
-  public void parseAlgoParams(Map<String, String> params) {
-    String dtreeAlgoName = params.get("algo");
-    if ("classification".equalsIgnoreCase(dtreeAlgoName)) {
-      algo = Algo$.MODULE$.Classification();
-    } else if ("regression".equalsIgnoreCase(dtreeAlgoName)) {
-      algo = Algo$.MODULE$.Regression();
-    }
-
-    String impurity = params.get("impurity");
-    if ("gini".equals(impurity)) {
-      decisionTreeImpurity = Gini$.MODULE$;
-    } else if ("entropy".equals(impurity)) {
-      decisionTreeImpurity = Entropy$.MODULE$;
-    } else if ("variance".equals(impurity)) {
-      decisionTreeImpurity = Variance$.MODULE$;
-    }
-
-    maxDepth = getParamValue("maxDepth", 100);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#trainInternal(java.lang.String, org.apache.spark.rdd.RDD)
-   */
-  @Override
-  protected BaseSparkClassificationModel trainInternal(String modelId, RDD<LabeledPoint> trainingRDD)
-    throws LensException {
-    DecisionTreeModel model = DecisionTree$.MODULE$.train(trainingRDD, algo, decisionTreeImpurity, maxDepth);
-    return new DecisionTreeClassificationModel(modelId, new SparkDecisionTreeModel(model));
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/KMeansAlgo.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/KMeansAlgo.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/KMeansAlgo.java
deleted file mode 100644
index 7ca5a79..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/KMeansAlgo.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark.algos;
-
-import java.util.List;
-
-import org.apache.lens.api.LensConf;
-import org.apache.lens.api.LensException;
-import org.apache.lens.ml.*;
-import org.apache.lens.ml.spark.HiveTableRDD;
-import org.apache.lens.ml.spark.models.KMeansClusteringModel;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hive.hcatalog.data.HCatRecord;
-import org.apache.spark.api.java.JavaPairRDD;
-import org.apache.spark.api.java.JavaRDD;
-import org.apache.spark.api.java.JavaSparkContext;
-import org.apache.spark.api.java.function.Function;
-import org.apache.spark.mllib.clustering.KMeans;
-import org.apache.spark.mllib.clustering.KMeansModel;
-import org.apache.spark.mllib.linalg.Vector;
-import org.apache.spark.mllib.linalg.Vectors;
-
-import scala.Tuple2;
-
-/**
- * The Class KMeansAlgo.
- */
-@Algorithm(name = "spark_kmeans_algo", description = "Spark MLLib KMeans algo")
-public class KMeansAlgo implements MLAlgo {
-
-  /** The conf. */
-  private transient LensConf conf;
-
-  /** The spark context. */
-  private JavaSparkContext sparkContext;
-
-  /** The part filter. */
-  @AlgoParam(name = "partition", help = "Partition filter to be used while constructing table RDD")
-  private String partFilter = null;
-
-  /** The k. */
-  @AlgoParam(name = "k", help = "Number of cluster")
-  private int k;
-
-  /** The max iterations. */
-  @AlgoParam(name = "maxIterations", help = "Maximum number of iterations", defaultValue = "100")
-  private int maxIterations = 100;
-
-  /** The runs. */
-  @AlgoParam(name = "runs", help = "Number of parallel run", defaultValue = "1")
-  private int runs = 1;
-
-  /** The initialization mode. */
-  @AlgoParam(name = "initializationMode",
-    help = "initialization model, either \"random\" or \"k-means||\" (default).", defaultValue = "k-means||")
-  private String initializationMode = "k-means||";
-
-  @Override
-  public String getName() {
-    return getClass().getAnnotation(Algorithm.class).name();
-  }
-
-  @Override
-  public String getDescription() {
-    return getClass().getAnnotation(Algorithm.class).description();
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.MLAlgo#configure(org.apache.lens.api.LensConf)
-   */
-  @Override
-  public void configure(LensConf configuration) {
-    this.conf = configuration;
-  }
-
-  @Override
-  public LensConf getConf() {
-    return conf;
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.MLAlgo#train(org.apache.lens.api.LensConf, java.lang.String, java.lang.String,
-   * java.lang.String, java.lang.String[])
-   */
-  @Override
-  public MLModel train(LensConf conf, String db, String table, String modelId, String... params) throws LensException {
-    List<String> features = AlgoArgParser.parseArgs(this, params);
-    final int[] featurePositions = new int[features.size()];
-    final int NUM_FEATURES = features.size();
-
-    JavaPairRDD<WritableComparable, HCatRecord> rdd = null;
-    try {
-      // Map feature names to positions
-      Table tbl = Hive.get(toHiveConf(conf)).getTable(db, table);
-      List<FieldSchema> allCols = tbl.getAllCols();
-      int f = 0;
-      for (int i = 0; i < tbl.getAllCols().size(); i++) {
-        String colName = allCols.get(i).getName();
-        if (features.contains(colName)) {
-          featurePositions[f++] = i;
-        }
-      }
-
-      rdd = HiveTableRDD.createHiveTableRDD(sparkContext, toHiveConf(conf), db, table, partFilter);
-      JavaRDD<Vector> trainableRDD = rdd.map(new Function<Tuple2<WritableComparable, HCatRecord>, Vector>() {
-        @Override
-        public Vector call(Tuple2<WritableComparable, HCatRecord> v1) throws Exception {
-          HCatRecord hCatRecord = v1._2();
-          double[] arr = new double[NUM_FEATURES];
-          for (int i = 0; i < NUM_FEATURES; i++) {
-            Object val = hCatRecord.get(featurePositions[i]);
-            arr[i] = val == null ? 0d : (Double) val;
-          }
-          return Vectors.dense(arr);
-        }
-      });
-
-      KMeansModel model = KMeans.train(trainableRDD.rdd(), k, maxIterations, runs, initializationMode);
-      return new KMeansClusteringModel(modelId, model);
-    } catch (Exception e) {
-      throw new LensException("KMeans algo failed for " + db + "." + table, e);
-    }
-  }
-
-  /**
-   * To hive conf.
-   *
-   * @param conf the conf
-   * @return the hive conf
-   */
-  private HiveConf toHiveConf(LensConf conf) {
-    HiveConf hiveConf = new HiveConf();
-    for (String key : conf.getProperties().keySet()) {
-      hiveConf.set(key, conf.getProperties().get(key));
-    }
-    return hiveConf;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/LogisticRegressionAlgo.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/LogisticRegressionAlgo.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/LogisticRegressionAlgo.java
deleted file mode 100644
index 106b3c5..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/LogisticRegressionAlgo.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark.algos;
-
-import java.util.Map;
-
-import org.apache.lens.api.LensException;
-import org.apache.lens.ml.AlgoParam;
-import org.apache.lens.ml.Algorithm;
-import org.apache.lens.ml.spark.models.BaseSparkClassificationModel;
-import org.apache.lens.ml.spark.models.LogitRegressionClassificationModel;
-
-import org.apache.spark.mllib.classification.LogisticRegressionModel;
-import org.apache.spark.mllib.classification.LogisticRegressionWithSGD;
-import org.apache.spark.mllib.regression.LabeledPoint;
-import org.apache.spark.rdd.RDD;
-
-/**
- * The Class LogisticRegressionAlgo.
- */
-@Algorithm(name = "spark_logistic_regression", description = "Spark logistic regression algo")
-public class LogisticRegressionAlgo extends BaseSparkAlgo {
-
-  /** The iterations. */
-  @AlgoParam(name = "iterations", help = "Max number of iterations", defaultValue = "100")
-  private int iterations;
-
-  /** The step size. */
-  @AlgoParam(name = "stepSize", help = "Step size", defaultValue = "1.0d")
-  private double stepSize;
-
-  /** The min batch fraction. */
-  @AlgoParam(name = "minBatchFraction", help = "Fraction for batched learning", defaultValue = "1.0d")
-  private double minBatchFraction;
-
-  /**
-   * Instantiates a new logistic regression algo.
-   *
-   * @param name        the name
-   * @param description the description
-   */
-  public LogisticRegressionAlgo(String name, String description) {
-    super(name, description);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#parseAlgoParams(java.util.Map)
-   */
-  @Override
-  public void parseAlgoParams(Map<String, String> params) {
-    iterations = getParamValue("iterations", 100);
-    stepSize = getParamValue("stepSize", 1.0d);
-    minBatchFraction = getParamValue("minBatchFraction", 1.0d);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#trainInternal(java.lang.String, org.apache.spark.rdd.RDD)
-   */
-  @Override
-  protected BaseSparkClassificationModel trainInternal(String modelId, RDD<LabeledPoint> trainingRDD)
-    throws LensException {
-    LogisticRegressionModel lrModel = LogisticRegressionWithSGD.train(trainingRDD, iterations, stepSize,
-      minBatchFraction);
-    return new LogitRegressionClassificationModel(modelId, lrModel);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/NaiveBayesAlgo.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/NaiveBayesAlgo.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/NaiveBayesAlgo.java
deleted file mode 100644
index f7652d1..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/NaiveBayesAlgo.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark.algos;
-
-import java.util.Map;
-
-import org.apache.lens.api.LensException;
-import org.apache.lens.ml.AlgoParam;
-import org.apache.lens.ml.Algorithm;
-import org.apache.lens.ml.spark.models.BaseSparkClassificationModel;
-import org.apache.lens.ml.spark.models.NaiveBayesClassificationModel;
-
-import org.apache.spark.mllib.classification.NaiveBayes;
-import org.apache.spark.mllib.regression.LabeledPoint;
-import org.apache.spark.rdd.RDD;
-
-/**
- * The Class NaiveBayesAlgo.
- */
-@Algorithm(name = "spark_naive_bayes", description = "Spark Naive Bayes classifier algo")
-public class NaiveBayesAlgo extends BaseSparkAlgo {
-
-  /** The lambda. */
-  @AlgoParam(name = "lambda", help = "Lambda parameter for naive bayes learner", defaultValue = "1.0d")
-  private double lambda = 1.0;
-
-  /**
-   * Instantiates a new naive bayes algo.
-   *
-   * @param name        the name
-   * @param description the description
-   */
-  public NaiveBayesAlgo(String name, String description) {
-    super(name, description);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#parseAlgoParams(java.util.Map)
-   */
-  @Override
-  public void parseAlgoParams(Map<String, String> params) {
-    lambda = getParamValue("lambda", 1.0d);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#trainInternal(java.lang.String, org.apache.spark.rdd.RDD)
-   */
-  @Override
-  protected BaseSparkClassificationModel trainInternal(String modelId, RDD<LabeledPoint> trainingRDD)
-    throws LensException {
-    return new NaiveBayesClassificationModel(modelId, NaiveBayes.train(trainingRDD, lambda));
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/SVMAlgo.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/SVMAlgo.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/SVMAlgo.java
deleted file mode 100644
index 09251b7..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/SVMAlgo.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark.algos;
-
-import java.util.Map;
-
-import org.apache.lens.api.LensException;
-import org.apache.lens.ml.AlgoParam;
-import org.apache.lens.ml.Algorithm;
-import org.apache.lens.ml.spark.models.BaseSparkClassificationModel;
-import org.apache.lens.ml.spark.models.SVMClassificationModel;
-
-import org.apache.spark.mllib.classification.SVMModel;
-import org.apache.spark.mllib.classification.SVMWithSGD;
-import org.apache.spark.mllib.regression.LabeledPoint;
-import org.apache.spark.rdd.RDD;
-
-/**
- * The Class SVMAlgo.
- */
-@Algorithm(name = "spark_svm", description = "Spark SVML classifier algo")
-public class SVMAlgo extends BaseSparkAlgo {
-
-  /** The min batch fraction. */
-  @AlgoParam(name = "minBatchFraction", help = "Fraction for batched learning", defaultValue = "1.0d")
-  private double minBatchFraction;
-
-  /** The reg param. */
-  @AlgoParam(name = "regParam", help = "regularization parameter for gradient descent", defaultValue = "1.0d")
-  private double regParam;
-
-  /** The step size. */
-  @AlgoParam(name = "stepSize", help = "Iteration step size", defaultValue = "1.0d")
-  private double stepSize;
-
-  /** The iterations. */
-  @AlgoParam(name = "iterations", help = "Number of iterations", defaultValue = "100")
-  private int iterations;
-
-  /**
-   * Instantiates a new SVM algo.
-   *
-   * @param name        the name
-   * @param description the description
-   */
-  public SVMAlgo(String name, String description) {
-    super(name, description);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#parseAlgoParams(java.util.Map)
-   */
-  @Override
-  public void parseAlgoParams(Map<String, String> params) {
-    minBatchFraction = getParamValue("minBatchFraction", 1.0);
-    regParam = getParamValue("regParam", 1.0);
-    stepSize = getParamValue("stepSize", 1.0);
-    iterations = getParamValue("iterations", 100);
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#trainInternal(java.lang.String, org.apache.spark.rdd.RDD)
-   */
-  @Override
-  protected BaseSparkClassificationModel trainInternal(String modelId, RDD<LabeledPoint> trainingRDD)
-    throws LensException {
-    SVMModel svmModel = SVMWithSGD.train(trainingRDD, iterations, stepSize, regParam, minBatchFraction);
-    return new SVMClassificationModel(modelId, svmModel);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/BaseSparkClassificationModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/BaseSparkClassificationModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/BaseSparkClassificationModel.java
deleted file mode 100644
index deee1b7..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/BaseSparkClassificationModel.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark.models;
-
-import org.apache.lens.ml.ClassifierBaseModel;
-
-import org.apache.spark.mllib.classification.ClassificationModel;
-import org.apache.spark.mllib.linalg.Vectors;
-
-/**
- * The Class BaseSparkClassificationModel.
- *
- * @param <MODEL> the generic type
- */
-public class BaseSparkClassificationModel<MODEL extends ClassificationModel> extends ClassifierBaseModel {
-
-  /** The model id. */
-  private final String modelId;
-
-  /** The spark model. */
-  private final MODEL sparkModel;
-
-  /**
-   * Instantiates a new base spark classification model.
-   *
-   * @param modelId the model id
-   * @param model   the model
-   */
-  public BaseSparkClassificationModel(String modelId, MODEL model) {
-    this.modelId = modelId;
-    this.sparkModel = model;
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.MLModel#predict(java.lang.Object[])
-   */
-  @Override
-  public Double predict(Object... args) {
-    return sparkModel.predict(Vectors.dense(getFeatureVector(args)));
-  }
-
-  @Override
-  public String getId() {
-    return modelId;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/DecisionTreeClassificationModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/DecisionTreeClassificationModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/DecisionTreeClassificationModel.java
deleted file mode 100644
index 0460024..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/DecisionTreeClassificationModel.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark.models;
-
-/**
- * The Class DecisionTreeClassificationModel.
- */
-public class DecisionTreeClassificationModel extends BaseSparkClassificationModel<SparkDecisionTreeModel> {
-
-  /**
-   * Instantiates a new decision tree classification model.
-   *
-   * @param modelId the model id
-   * @param model   the model
-   */
-  public DecisionTreeClassificationModel(String modelId, SparkDecisionTreeModel model) {
-    super(modelId, model);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/KMeansClusteringModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/KMeansClusteringModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/KMeansClusteringModel.java
deleted file mode 100644
index 959d9f4..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/spark/models/KMeansClusteringModel.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml.spark.models;
-
-import org.apache.lens.ml.MLModel;
-
-import org.apache.spark.mllib.clustering.KMeansModel;
-import org.apache.spark.mllib.linalg.Vectors;
-
-/**
- * The Class KMeansClusteringModel.
- */
-public class KMeansClusteringModel extends MLModel<Integer> {
-
-  /** The model. */
-  private final KMeansModel model;
-
-  /** The model id. */
-  private final String modelId;
-
-  /**
-   * Instantiates a new k means clustering model.
-   *
-   * @param modelId the model id
-   * @param model   the model
-   */
-  public KMeansClusteringModel(String modelId, KMeansModel model) {
-    this.model = model;
-    this.modelId = modelId;
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.ml.MLModel#predict(java.lang.Object[])
-   */
-  @Override
-  public Integer predict(Object... args) {
-    // Convert the params to array of double
-    double[] arr = new double[args.length];
-    for (int i = 0; i < args.length; i++) {
-      if (args[i] != null) {
-        arr[i] = (Double) args[i];
-      } else {
-        arr[i] = 0d;
-      }
-    }
-
-    return model.predict(Vectors.dense(arr));
-  }
-}


[46/50] [abbrv] incubator-lens git commit: LENS-496: Populate sample data for sales cube and add example queries

Posted by am...@apache.org.
LENS-496: Populate sample data for sales cube and add example queries


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/9b97c19f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/9b97c19f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/9b97c19f

Branch: refs/heads/current-release-line
Commit: 9b97c19f9ccb1cbb092fe1719cc6a22835cb31b6
Parents: cddfc84
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Tue Apr 14 16:59:06 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Tue Apr 14 16:59:06 2015 +0530

----------------------------------------------------------------------
 lens-dist/src/main/assembly/bin-dist.xml        | 110 +++++++++++++++++++
 .../lens/examples/PopulateSampleMetastore.java  |  78 ++++++++-----
 .../apache/lens/examples/SampleMetastore.java   |   1 +
 .../src/main/resources/city-local-part.xml      |  28 +++++
 .../src/main/resources/city-local.data          |   4 +
 lens-examples/src/main/resources/city_table.xml |   2 +-
 .../src/main/resources/cube-queries.sql         |  18 +++
 .../src/main/resources/customer-local-part.xml  |  29 +++++
 .../src/main/resources/customer-local.data      |   4 +
 .../src/main/resources/db-storage-schema.sql    |  32 +++++-
 .../src/main/resources/dimension-queries.sql    |   8 ++
 .../main/resources/product-local-clothes.data   |   1 +
 .../src/main/resources/product-local-food.data  |   1 +
 .../src/main/resources/product-local-parts.xml  |  60 ++++++++++
 .../resources/product-local-stationary.data     |   2 +
 .../main/resources/product-local-wearables.data |   1 +
 .../src/main/resources/product_db_table.xml     |  47 ++++++++
 .../src/main/resources/product_table.xml        |   9 --
 .../resources/sales-aggr-fact1-local-parts.xml  |  48 ++++++++
 .../main/resources/sales-aggr-fact1-local1.data |   2 +
 .../main/resources/sales-aggr-fact1-local2.data |   2 +
 .../main/resources/sales-aggr-fact1-local3.data |   2 +
 .../resources/sales-aggr-fact1-mydb-parts.xml   |  34 ++++++
 .../src/main/resources/sales-aggr-fact1.xml     |   2 -
 .../resources/sales-aggr-fact2-local-parts.xml  |  42 +++++++
 .../main/resources/sales-aggr-fact2-local1.data |   2 +
 .../main/resources/sales-aggr-fact2-local2.data |   2 +
 .../main/resources/sales-aggr-fact2-local3.data |   2 +
 .../resources/sales-aggr-fact2-mydb-parts.xml   |  34 ++++++
 lens-examples/src/main/resources/sales-cube.xml |  15 ++-
 .../src/main/resources/sales-raw-fact.xml       |   1 -
 .../main/resources/sales-raw-local-parts.xml    |  40 +++++++
 .../src/main/resources/sales-raw-local1.data    |   2 +
 .../src/main/resources/sales-raw-local2.data    |   3 +
 34 files changed, 621 insertions(+), 47 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-dist/src/main/assembly/bin-dist.xml
----------------------------------------------------------------------
diff --git a/lens-dist/src/main/assembly/bin-dist.xml b/lens-dist/src/main/assembly/bin-dist.xml
index 3554425..b7b74d5 100644
--- a/lens-dist/src/main/assembly/bin-dist.xml
+++ b/lens-dist/src/main/assembly/bin-dist.xml
@@ -172,6 +172,76 @@
 
     <fileSet>
       <directory>../lens-examples/src/main/resources/</directory>
+      <outputDirectory>/client/examples/data/product_local_clothes</outputDirectory>
+      <includes>
+        <include>product-local-clothes.data</include>
+      </includes>
+    </fileSet>
+
+    <fileSet>
+      <directory>../lens-examples/src/main/resources/</directory>
+      <outputDirectory>/client/examples/data/product_local_food</outputDirectory>
+      <includes>
+        <include>product-local-food.data</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>../lens-examples/src/main/resources/</directory>
+      <outputDirectory>/client/examples/data/product_local_stationary</outputDirectory>
+      <includes>
+        <include>product-local-stationary.data</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>../lens-examples/src/main/resources/</directory>
+      <outputDirectory>/client/examples/data/product_local_wearables</outputDirectory>
+      <includes>
+        <include>product-local-wearables.data</include>
+      </includes>
+    </fileSet>
+
+    <fileSet>
+      <directory>../lens-examples/src/main/resources/</directory>
+      <outputDirectory>/client/examples/data/customer_local</outputDirectory>
+      <includes>
+        <include>customer-local.data</include>
+      </includes>
+    </fileSet>
+
+    <fileSet>
+      <directory>../lens-examples/src/main/resources/</directory>
+      <outputDirectory>/client/examples/data/city_local</outputDirectory>
+      <includes>
+        <include>city-local.data</include>
+      </includes>
+    </fileSet>
+
+    <fileSet>
+      <directory>../lens-examples/src/main/resources/</directory>
+      <outputDirectory>/client/examples/data/sales_aggr_fact1_local1</outputDirectory>
+      <includes>
+        <include>sales-aggr-fact1-local1*.data</include>
+      </includes>
+    </fileSet>
+
+    <fileSet>
+      <directory>../lens-examples/src/main/resources/</directory>
+      <outputDirectory>/client/examples/data/sales_aggr_fact1_local2</outputDirectory>
+      <includes>
+        <include>sales-aggr-fact1-local2*.data</include>
+      </includes>
+    </fileSet>
+
+    <fileSet>
+      <directory>../lens-examples/src/main/resources/</directory>
+      <outputDirectory>/client/examples/data/sales_aggr_fact1_local3</outputDirectory>
+      <includes>
+        <include>sales-aggr-fact1-local3*.data</include>
+      </includes>
+    </fileSet>
+
+    <fileSet>
+      <directory>../lens-examples/src/main/resources/</directory>
       <outputDirectory>/client/examples/data/fact1_local1</outputDirectory>
       <includes>
         <include>fact1-local1*.data</include>
@@ -220,6 +290,46 @@
 
     <fileSet>
       <directory>../lens-examples/src/main/resources/</directory>
+      <outputDirectory>/client/examples/data/sales_aggr_fact2_local1</outputDirectory>
+      <includes>
+        <include>sales-aggr-fact2-local1*.data</include>
+      </includes>
+    </fileSet>
+
+    <fileSet>
+      <directory>../lens-examples/src/main/resources/</directory>
+      <outputDirectory>/client/examples/data/sales_aggr_fact2_local2</outputDirectory>
+      <includes>
+        <include>sales-aggr-fact2-local2*.data</include>
+      </includes>
+    </fileSet>
+
+    <fileSet>
+      <directory>../lens-examples/src/main/resources/</directory>
+      <outputDirectory>/client/examples/data/sales_aggr_fact2_local3</outputDirectory>
+      <includes>
+        <include>sales-aggr-fact2-local3*.data</include>
+      </includes>
+    </fileSet>
+
+    <fileSet>
+      <directory>../lens-examples/src/main/resources/</directory>
+      <outputDirectory>/client/examples/data/sales_raw_local1</outputDirectory>
+      <includes>
+        <include>sales-raw-local1*.data</include>
+      </includes>
+    </fileSet>
+
+    <fileSet>
+      <directory>../lens-examples/src/main/resources/</directory>
+      <outputDirectory>/client/examples/data/sales_raw_local2</outputDirectory>
+      <includes>
+        <include>sales-raw-local2*.data</include>
+      </includes>
+    </fileSet>
+
+    <fileSet>
+      <directory>../lens-examples/src/main/resources/</directory>
       <outputDirectory>/client/examples/data/raw_local1</outputDirectory>
       <includes>
         <include>raw-local1*.data</include>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/java/org/apache/lens/examples/PopulateSampleMetastore.java
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/java/org/apache/lens/examples/PopulateSampleMetastore.java b/lens-examples/src/main/java/org/apache/lens/examples/PopulateSampleMetastore.java
index 0b2f37c..7883b15 100644
--- a/lens-examples/src/main/java/org/apache/lens/examples/PopulateSampleMetastore.java
+++ b/lens-examples/src/main/java/org/apache/lens/examples/PopulateSampleMetastore.java
@@ -24,6 +24,7 @@ import javax.xml.bind.JAXBException;
 
 import org.apache.lens.api.APIResult;
 import org.apache.lens.api.metastore.XPartition;
+import org.apache.lens.api.metastore.XPartitionList;
 import org.apache.lens.client.LensClientSingletonWrapper;
 import org.apache.lens.client.LensMetadataClient;
 
@@ -57,7 +58,9 @@ public class PopulateSampleMetastore {
       if (populate != null) {
         populate.close();
       }
-
+    }
+    if (populate.retCode != 0) {
+      System.exit(populate.retCode);
     }
   }
 
@@ -67,42 +70,45 @@ public class PopulateSampleMetastore {
   }
 
   public void populateDimTables() throws JAXBException, IOException {
-    XPartition partition = (XPartition) SampleMetastore.readFromXML("dim1-local-part.xml");
+    createDimTablePartition("dim1-local-part.xml", "dim_table", "local");
+    createDimTablePartition("dim2-local-part.xml", "dim_table2", "local");
+    createDimTablePartition("dim4-local-part.xml", "dim_table4", "local");
+    createDimTablePartitions("product-local-parts.xml", "product_table", "local");
+    createDimTablePartition("city-local-part.xml", "city_table", "local");
+    createDimTablePartition("customer-local-part.xml", "customer_table", "local");
+  }
+
+  private void createDimTablePartition(String fileName, String dimTable, String storage)
+    throws JAXBException, IOException {
+    XPartition partition = (XPartition) SampleMetastore.readFromXML(fileName);
     String partLocation = partition.getLocation();
     if (!partLocation.startsWith("/")) {
       partition.setLocation("file://" + System.getProperty("lens.home") + "/" + partLocation);
     }
-    result = metaClient.addPartitionToDimensionTable("dim_table", "local", partition);
+    result = metaClient.addPartitionToDimensionTable(dimTable, storage, partition);
     if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Adding partition from:dim1-local-part.xml failed");
+      System.err.println("Adding partition from:"+ fileName + " failed");
       retCode = 1;
     } else {
-      System.out.println("Added partition from:dim1-local-part.xml");
-    }
-    partition = (XPartition) SampleMetastore.readFromXML("dim2-local-part.xml");
-    partLocation = partition.getLocation();
-    if (!partLocation.startsWith("/")) {
-      partition.setLocation("file://" + System.getProperty("lens.home") + "/" + partLocation);
-    }
-    result = metaClient.addPartitionToDimensionTable("dim_table2", "local", partition);
-    if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Adding partition from:dim2-local-part.xml failed");
-      retCode = 1;
-    } else {
-      System.out.println("Added partition from:dim2-local-part.xml");
+      System.out.println("Added partition from:" + fileName);
     }
+  }
 
-    partition = (XPartition) SampleMetastore.readFromXML("dim4-local-part.xml");
-    partLocation = partition.getLocation();
-    if (!partLocation.startsWith("/")) {
-      partition.setLocation("file://" + System.getProperty("lens.home") + "/" + partLocation);
+  private void createDimTablePartitions(String fileName, String dimTable, String storage)
+    throws JAXBException, IOException {
+    XPartitionList partitionList = (XPartitionList) SampleMetastore.readFromXML(fileName);
+    for (XPartition partition : partitionList.getPartition()) {
+      String partLocation = partition.getLocation();
+      if (!partLocation.startsWith("/")) {
+        partition.setLocation("file://" + System.getProperty("lens.home") + "/" + partLocation);
+      }
     }
-    result = metaClient.addPartitionToDimensionTable("dim_table4", "local", partition);
+    result = metaClient.addPartitionsToDimensionTable(dimTable, storage, partitionList);
     if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Adding partition from:dim4-local-part.xml failed");
+      System.err.println("Adding partitions from:" + fileName + " failed");
       retCode = 1;
     } else {
-      System.out.println("Added partition from:dim4-local-part.xml");
+      System.out.println("Added partitions from:" + fileName);
     }
   }
 
@@ -114,13 +120,30 @@ public class PopulateSampleMetastore {
     }
     result = metaClient.addPartitionToFactTable(fact, storage, partition);
     if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Adding partition from:" + fileName + " failed");
+      System.err.println("Adding partition from:" + fileName + " failed");
       retCode = 1;
     } else {
       System.out.println("Added partition from:" + fileName);
     }
   }
 
+  private void createFactPartitions(String fileName, String fact, String storage) throws JAXBException, IOException {
+    XPartitionList partitionList = (XPartitionList) SampleMetastore.readFromXML(fileName);
+    for (XPartition partition : partitionList.getPartition()) {
+      String partLocation = partition.getLocation();
+      if (!partLocation.startsWith("/")) {
+        partition.setLocation("file://" + System.getProperty("lens.home") + "/" + partLocation);
+      }
+    }
+    result = metaClient.addPartitionsToFactTable(fact, storage, partitionList);
+    if (result.getStatus().equals(APIResult.Status.FAILED)) {
+      System.err.println("Adding partitions from:" + fileName + " failed");
+      retCode = 1;
+    } else {
+      System.out.println("Added partitions from:" + fileName);
+    }
+  }
+
   public void populateFactTables() throws JAXBException, IOException {
     createFactPartition("fact1-local-part1.xml", "fact1", "local");
     createFactPartition("fact1-local-part2.xml", "fact1", "local");
@@ -134,6 +157,11 @@ public class PopulateSampleMetastore {
     createFactPartition("raw-local-part4.xml", "rawfact", "local");
     createFactPartition("raw-local-part5.xml", "rawfact", "local");
     createFactPartition("raw-local-part6.xml", "rawfact", "local");
+    createFactPartitions("sales-raw-local-parts.xml", "sales_raw_fact", "local");
+    createFactPartitions("sales-aggr-fact1-local-parts.xml", "sales_aggr_fact1", "local");
+    createFactPartitions("sales-aggr-fact2-local-parts.xml", "sales_aggr_fact2", "local");
+    createFactPartitions("sales-aggr-fact1-mydb-parts.xml", "sales_aggr_fact1", "mydb");
+    createFactPartitions("sales-aggr-fact2-mydb-parts.xml", "sales_aggr_fact2", "mydb");
   }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java b/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java
index df67ed7..2f28e5d 100644
--- a/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java
+++ b/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java
@@ -137,6 +137,7 @@ public class SampleMetastore {
     createDimTable("city_table.xml");
     createDimTable("city_subset.xml");
     createDimTable("product_table.xml");
+    createDimTable("product_db_table.xml");
     createDimTable("customer_table.xml");
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/city-local-part.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/city-local-part.xml b/lens-examples/src/main/resources/city-local-part.xml
new file mode 100644
index 0000000..528b880
--- /dev/null
+++ b/lens-examples/src/main/resources/city-local-part.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_partition fact_or_dimension_table_name="city_table" location="examples/data/city_local" update_period="HOURLY"
+  xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <time_partition_spec>
+    <part_spec_element key="dt" value="2014-03-27T12:00:00"/>
+  </time_partition_spec>
+</x_partition>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/city-local.data
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/city-local.data b/lens-examples/src/main/resources/city-local.data
new file mode 100644
index 0000000..a7a2d5e
--- /dev/null
+++ b/lens-examples/src/main/resources/city-local.data
@@ -0,0 +1,4 @@
+1,Bangalore,Parks:Temples:Software companies,10000000
+2,Hyderabad,Minars:Museums:Software companies,11000000
+3,Austin,Lakes:University:Museums:Software companies,800000
+4,San Francisco,Golden gate bridge:Beaches:Software companies,800000

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/city_table.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/city_table.xml b/lens-examples/src/main/resources/city_table.xml
index 5ce18b6..27be305 100644
--- a/lens-examples/src/main/resources/city_table.xml
+++ b/lens-examples/src/main/resources/city_table.xml
@@ -36,7 +36,7 @@
         <update_period>HOURLY</update_period>
       </update_periods>
       <storage_name>local</storage_name>
-      <table_desc external="true" field_delimiter="," table_location="/tmp/examples/city">
+      <table_desc external="true" field_delimiter="," collection_delimiter=":" table_location="/tmp/examples/city">
         <part_cols>
           <column comment="Time column" name="dt" type="STRING"/>
         </part_cols>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/cube-queries.sql
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/cube-queries.sql b/lens-examples/src/main/resources/cube-queries.sql
index 8fee5a8..7a0b542 100644
--- a/lens-examples/src/main/resources/cube-queries.sql
+++ b/lens-examples/src/main/resources/cube-queries.sql
@@ -85,3 +85,21 @@ drop table temp3
 create table temp3(name string, msr4 float) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' WITH SERDEPROPERTIES ('serialization.null.format'='-NA-','field.delim'=','  ) STORED AS TEXTFILE
 insert overwrite table temp3 cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
 select * from temp3
+cube select product_id, store_sales from sales where time_range_in(order_time, '2015-04-11-00', '2015-04-13-00')
+cube select product_id, store_sales from sales where time_range_in(order_time, '2015-04-11-00', '2015-04-13-01')
+cube select product_id, store_sales from sales where time_range_in(order_time, '2015-04-12-00', '2015-04-13-00')
+cube select product_id, store_sales from sales where time_range_in(order_time, '2015-04-13-00', '2015-04-13-02')
+cube select product_id, store_sales from sales where time_range_in(delivery_time, '2015-04-11-00', '2015-04-13-00')
+cube select product_id, store_sales from sales where time_range_in(delivery_time, '2015-04-12-00', '2015-04-13-00')
+cube select promotion_sales, store_sales from sales where time_range_in(order_time, '2015-04-13-00', '2015-04-13-02')
+cube select promotion_sales, store_sales from sales where time_range_in(order_time, '2015-04-13-00', '2015-04-13-01')
+cube select customer_city_name, store_sales from sales where time_range_in(delivery_time, '2015-04-12-00', '2015-04-13-00')
+cube select customer_city_name, store_sales from sales where time_range_in(delivery_time, '2015-04-11-00', '2015-04-13-00')
+cube select customer_city_name, delivery_city.name, production_city.name, store_sales from sales where time_range_in(delivery_time, '2015-04-11-00', '2015-04-13-00')
+cube select product_details.color, store_sales from sales where time_range_in(order_time, '2015-04-11-00', '2015-04-13-00') and product_details.category='Stationary'
+cube select product_details.category, store_sales from sales where time_range_in(order_time, '2015-04-11-00', '2015-04-13-01')
+cube select product_details.color, store_sales from sales where time_range_in(order_time, '2015-04-12-00', '2015-04-13-00')and product_details.category='Stationary'
+cube select product_details.category, store_sales from sales where time_range_in(order_time, '2015-04-11-00', '2015-04-13-01')
+cube select product_details.category, store_sales from sales where time_range_in(order_time, '2015-04-12-00', '2015-04-13-00')
+cube select product_details.color, store_sales from sales where time_range_in(delivery_time, '2015-04-11-00', '2015-04-13-00') and product_details.category='Stationary'
+cube select product_details.color, store_sales from sales where time_range_in(delivery_time, '2015-04-12-00', '2015-04-13-00') and product_details.category='Stationary'

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/customer-local-part.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/customer-local-part.xml b/lens-examples/src/main/resources/customer-local-part.xml
new file mode 100644
index 0000000..2b9e137
--- /dev/null
+++ b/lens-examples/src/main/resources/customer-local-part.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_partition fact_or_dimension_table_name="customer_table" location="examples/data/customer_local"
+  update_period="HOURLY"
+  xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <time_partition_spec>
+    <part_spec_element key="dt" value="2014-03-27T12:00:00"/>
+  </time_partition_spec>
+</x_partition>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/customer-local.data
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/customer-local.data b/lens-examples/src/main/resources/customer-local.data
new file mode 100644
index 0000000..619a0e4
--- /dev/null
+++ b/lens-examples/src/main/resources/customer-local.data
@@ -0,0 +1,4 @@
+1,Ramu,Male,25,1,Good
+2,Meena,Female,30,2,Good
+3,JohnX,Male,25,3,Bad
+4,Anju,Female,35,4,Good
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/db-storage-schema.sql
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/db-storage-schema.sql b/lens-examples/src/main/resources/db-storage-schema.sql
index 653d2fd..bbb62e8 100644
--- a/lens-examples/src/main/resources/db-storage-schema.sql
+++ b/lens-examples/src/main/resources/db-storage-schema.sql
@@ -30,21 +30,41 @@ CREATE TABLE mydb_dim_table4 (id integer, name varchar(255), detail varchar(255)
 
 insert into mydb_dim_table4(id, name, detail, d2id) values (1,'first','this is one',11)
 insert into mydb_dim_table4(id, name, detail, d2id) values (2,'second','this is two',12)
-
-
 insert into mydb_dim_table4(id, name, detail, d2id) values (3,'third','this is three',12)
 
 DROP TABLE IF EXISTS mydb_sales_aggr_fact2
 CREATE TABLE mydb_sales_aggr_fact2 (order_time timestamp, delivery_time timestamp, product_id integer, promotion_id integer, customer_city_id integer, production_city_id integer, delivery_city_id integer, unit_sales double, store_sales double, store_cost double)
 
+INSERT INTO mydb_sales_aggr_fact2(order_time, delivery_time, product_id, promotion_id, customer_city_id, production_city_id, delivery_city_id, unit_sales, store_sales, store_cost) values ('2015-04-12 00:00:00','2015-04-12 00:00:00',1,1,1,1,1,1,5,0)
+INSERT INTO mydb_sales_aggr_fact2(order_time, delivery_time, product_id, promotion_id, customer_city_id, production_city_id, delivery_city_id, unit_sales, store_sales, store_cost) values ('2015-04-12 00:00:00','2015-04-12 00:00:00',2,1,2,2,2,1,8,2)
+
 DROP TABLE IF EXISTS mydb_sales_aggr_fact1
-CREATE TABLE mydb_sales_aggr_fact1 (order_time timestamp, delivery_time timestamp, customer_id integer, product_id integer, promotion_id integer, customer_city_id integer, production_city_id integer, delivery_city_id integer, unit_sales double, store_sales double, store_cost double, average_line_item_price float, average_line_item_discount float, max_line_item_price float, max_line_item_discount float)
+CREATE TABLE mydb_sales_aggr_fact1 (order_time timestamp, delivery_time timestamp, customer_id integer, product_id integer, promotion_id integer, customer_city_id integer, production_city_id integer, delivery_city_id integer, unit_sales double, store_sales double, store_cost double, max_line_item_price float, max_line_item_discount float)
+
+INSERT INTO mydb_sales_aggr_fact1 (order_time, delivery_time, customer_id, product_id, promotion_id, customer_city_id, production_city_id, delivery_city_id, unit_sales, store_sales, store_cost, max_line_item_price, max_line_item_discount) values ('2015-04-12 00:00:00','2015-04-12 00:00:00',1,1,1,1,1,1,1,5,0,5,0)
+INSERT INTO mydb_sales_aggr_fact1 (order_time, delivery_time, customer_id, product_id, promotion_id, customer_city_id, production_city_id, delivery_city_id, unit_sales, store_sales, store_cost, max_line_item_price, max_line_item_discount) values ('2015-04-12 00:00:00','2015-04-12 00:00:00',2,2,1,2,2,2,1,8,2,10,2)
 
-DROP TABLE IF EXISTS mydb_product_table
-CREATE TABLE mydb_product_table (id integer, SKU_number integer, description varchar(255), color varchar(50), category varchar(255), weight float, manufacturer varchar(255))
+DROP TABLE IF EXISTS mydb_product_db_table
+CREATE TABLE mydb_product_db_table (id integer, SKU_number integer, description varchar(255), color varchar(50), category varchar(255), weight float, manufacturer varchar(255))
+
+INSERT INTO mydb_product_db_table (id, SKU_number, description, color, category, weight, manufacturer) values (1,111,'Book','White','Stationary',200,'BookCompany')
+INSERT INTO mydb_product_db_table (id, SKU_number, description, color, category, weight, manufacturer) values (2,222,'Pen','Blue','Stationary',50,'BookCompany')
+INSERT INTO mydb_product_db_table (id, SKU_number, description, color, category, weight, manufacturer) values (3,333,'Shirt','Purple','Clothes',200,'StylistCompany')
+INSERT INTO mydb_product_db_table (id, SKU_number, description, color, category, weight, manufacturer) values (4,444,'Shoes','Blue','Wearables',1000,'StylistCompany')
+INSERT INTO mydb_product_db_table (id, SKU_number, description, color, category, weight, manufacturer) values (5,555,'Chocolates','Brown','Food',500,'ChocoManufacturer')
 
 DROP TABLE IF EXISTS mydb_customer_table
-CREATE TABLE mydb_customer_table (id integer, name varchar(255), description varchar(255), gender varchar(50), age integer, city_id integer, customer_credit_status varchar(255))
+CREATE TABLE mydb_customer_table (id integer, name varchar(255), gender varchar(50), age integer, city_id integer, customer_credit_status varchar(255))
+
+INSERT INTO mydb_customer_table (id, name, gender, age, city_id, customer_credit_status) values (1,'Ramu','Male',25,1,'Good')
+INSERT INTO mydb_customer_table (id, name, gender, age, city_id, customer_credit_status) values (2,'Meena','Female',30,2,'Good')
+INSERT INTO mydb_customer_table (id, name, gender, age, city_id, customer_credit_status) values (3,'JohnX','Male',25,3,'Bad')
+INSERT INTO mydb_customer_table (id, name, gender, age, city_id, customer_credit_status) values (4,'Anju','Female',35,4,'Good')
 
 DROP TABLE IF EXISTS mydb_city_subset
 CREATE TABLE mydb_city_subset (id integer, name varchar(255))
+
+INSERT INTO mydb_city_subset (id, name) values (1, 'Bangalore')
+INSERT INTO mydb_city_subset (id, name) values (2, 'Hyderabad')
+INSERT INTO mydb_city_subset (id, name) values (3, 'Austin')
+INSERT INTO mydb_city_subset (id, name) values (4, 'San Fransisco')

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/dimension-queries.sql
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/dimension-queries.sql b/lens-examples/src/main/resources/dimension-queries.sql
index 5807336..7aff639 100644
--- a/lens-examples/src/main/resources/dimension-queries.sql
+++ b/lens-examples/src/main/resources/dimension-queries.sql
@@ -69,3 +69,11 @@ drop table temp3
 create table temp3(id int, name string) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' WITH SERDEPROPERTIES ('serialization.null.format'='-NA-','field.delim'=','  ) STORED AS TEXTFILE
 insert overwrite table temp3 cube select id,name from sample_dim
 select * from temp3
+cube select name from city
+cube select name from city where population > 1000000
+cube select name, poi from city
+cube select distinct category from product
+cube select id, description from product where weight > 100
+cube select category, count(1) as `Number of products` from product
+cube select name, customer_city_name from customer
+cube select customer.name, customer_city.population from customer

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/product-local-clothes.data
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/product-local-clothes.data b/lens-examples/src/main/resources/product-local-clothes.data
new file mode 100644
index 0000000..b85e329
--- /dev/null
+++ b/lens-examples/src/main/resources/product-local-clothes.data
@@ -0,0 +1 @@
+3,333,Shirt,Purple,200,StylistCompany
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/product-local-food.data
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/product-local-food.data b/lens-examples/src/main/resources/product-local-food.data
new file mode 100644
index 0000000..78fe205
--- /dev/null
+++ b/lens-examples/src/main/resources/product-local-food.data
@@ -0,0 +1 @@
+5,555,Chocolates,Brown,500,ChocoManufacturer
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/product-local-parts.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/product-local-parts.xml b/lens-examples/src/main/resources/product-local-parts.xml
new file mode 100644
index 0000000..426dec0
--- /dev/null
+++ b/lens-examples/src/main/resources/product-local-parts.xml
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_partition_list xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <partition fact_or_dimension_table_name="product_table" location="examples/data/product_local_clothes"
+    update_period="HOURLY">
+    <non_time_partition_spec>
+      <part_spec_element key="category" value="Clothes" />
+    </non_time_partition_spec>
+    <time_partition_spec>
+      <part_spec_element key="dt" value="2014-03-27T12:00:00" />
+    </time_partition_spec>
+  </partition>
+  <partition fact_or_dimension_table_name="product_table" location="examples/data/product_local_food"
+    update_period="HOURLY">
+    <non_time_partition_spec>
+      <part_spec_element key="category" value="Food" />
+    </non_time_partition_spec>
+    <time_partition_spec>
+      <part_spec_element key="dt" value="2014-03-27T12:00:00" />
+    </time_partition_spec>
+  </partition>
+  <partition fact_or_dimension_table_name="product_table" location="examples/data/product_local_stationary"
+    update_period="HOURLY">
+    <non_time_partition_spec>
+      <part_spec_element key="category" value="Stationary" />
+    </non_time_partition_spec>
+    <time_partition_spec>
+      <part_spec_element key="dt" value="2014-03-27T12:00:00" />
+    </time_partition_spec>
+  </partition>
+  <partition fact_or_dimension_table_name="product_table" location="examples/data/product_local_wearables"
+    update_period="HOURLY">
+    <non_time_partition_spec>
+      <part_spec_element key="category" value="Wearables" />
+    </non_time_partition_spec>
+    <time_partition_spec>
+      <part_spec_element key="dt" value="2014-03-27T12:00:00" />
+    </time_partition_spec>
+  </partition>
+</x_partition_list>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/product-local-stationary.data
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/product-local-stationary.data b/lens-examples/src/main/resources/product-local-stationary.data
new file mode 100644
index 0000000..9b994af
--- /dev/null
+++ b/lens-examples/src/main/resources/product-local-stationary.data
@@ -0,0 +1,2 @@
+1,111,Book,White,200,BookCompany
+2,222,Pen,Blue,50,BookCompany
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/product-local-wearables.data
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/product-local-wearables.data b/lens-examples/src/main/resources/product-local-wearables.data
new file mode 100644
index 0000000..16ff905
--- /dev/null
+++ b/lens-examples/src/main/resources/product-local-wearables.data
@@ -0,0 +1 @@
+4,444,Shoes,Blue,1000,StylistCompany

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/product_db_table.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/product_db_table.xml b/lens-examples/src/main/resources/product_db_table.xml
new file mode 100644
index 0000000..d961c44
--- /dev/null
+++ b/lens-examples/src/main/resources/product_db_table.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_dimension_table dimension_name="product" table_name="product_db_table" weight="0.0" xmlns="uri:lens:cube:0.1"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <columns>
+    <column comment="ID" name="id" type="INT"/>
+    <column comment="SKU_number" name="SKU_number" type="INT"/>
+    <column comment="" name="description" type="STRING"/>
+    <column comment="" name="color" type="STRING"/>
+    <column comment="Category" name="category" type="STRING"/>
+    <column comment="" name="weight" type="FLOAT"/>
+    <column comment="" name="manufacturer" type="STRING"/>
+  </columns>
+  <properties>
+    <property name="dim4.prop" value="d1"/>
+  </properties>
+  <storage_tables>
+    <storage_table>
+      <storage_name>mydb</storage_name>
+      <table_desc external="true" table_location="/tmp/db-storage.db"
+        storage_handler_name="org.apache.lens.storage.db.DBStorageHandler">
+        <table_parameters>
+          <property name="lens.metastore.native.db.name" value="default"/>
+        </table_parameters>
+      </table_desc>
+    </storage_table>
+  </storage_tables>
+</x_dimension_table>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/product_table.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/product_table.xml b/lens-examples/src/main/resources/product_table.xml
index f7e804f..86a1115 100644
--- a/lens-examples/src/main/resources/product_table.xml
+++ b/lens-examples/src/main/resources/product_table.xml
@@ -47,14 +47,5 @@
         <time_part_cols>dt</time_part_cols>
       </table_desc>
     </storage_table>
-    <storage_table>
-      <storage_name>mydb</storage_name>
-      <table_desc external="true" field_delimiter="," table_location="/tmp/db-storage.db"
-        storage_handler_name="org.apache.lens.storage.db.DBStorageHandler">
-        <table_parameters>
-          <property name="lens.metastore.native.db.name" value="default"/>
-        </table_parameters>
-      </table_desc>
-    </storage_table>
   </storage_tables>
 </x_dimension_table>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-aggr-fact1-local-parts.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact1-local-parts.xml b/lens-examples/src/main/resources/sales-aggr-fact1-local-parts.xml
new file mode 100644
index 0000000..3bba7fa
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-aggr-fact1-local-parts.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_partition_list xmlns="uri:lens:cube:0.1"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <partition fact_or_dimension_table_name="sales_aggr_fact1" location="examples/data/sales_aggr_fact1_local1"
+    update_period="DAILY">
+    <time_partition_spec>
+      <part_spec_element key="pt" value="2015-04-12T00:00:00"/>
+      <part_spec_element key="ot" value="2015-04-11T00:00:00"/>
+      <part_spec_element key="dt" value="2015-04-11T00:00:00"/>
+    </time_partition_spec>
+  </partition>
+  <partition fact_or_dimension_table_name="sales_aggr_fact1" location="examples/data/sales_aggr_fact1_local2"
+    update_period="DAILY">
+    <time_partition_spec>
+      <part_spec_element key="pt" value="2015-04-13T00:00:00"/>
+      <part_spec_element key="ot" value="2015-04-12T00:00:00"/>
+      <part_spec_element key="dt" value="2015-04-12T00:00:00"/>
+    </time_partition_spec>
+  </partition>
+  <partition fact_or_dimension_table_name="sales_aggr_fact1" location="examples/data/sales_aggr_fact1_local3"
+    update_period="HOURLY">
+    <time_partition_spec>
+      <part_spec_element key="pt" value="2015-04-13T05:00:00"/>
+      <part_spec_element key="ot" value="2015-04-13T00:00:00"/>
+      <part_spec_element key="dt" value="2015-04-13T04:00:00"/>
+    </time_partition_spec>
+  </partition>
+</x_partition_list>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-aggr-fact1-local1.data
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact1-local1.data b/lens-examples/src/main/resources/sales-aggr-fact1-local1.data
new file mode 100644
index 0000000..859f326
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-aggr-fact1-local1.data
@@ -0,0 +1,2 @@
+2015-04-11 00:00:00,2015-04-11 00:00:00,1,1,1,1,1,1,1,5,0,5,0
+2015-04-11 00:00:00,2015-04-11 00:00:00,2,2,1,2,2,2,1,8,2,10,2
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-aggr-fact1-local2.data
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact1-local2.data b/lens-examples/src/main/resources/sales-aggr-fact1-local2.data
new file mode 100644
index 0000000..6fd01f7
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-aggr-fact1-local2.data
@@ -0,0 +1,2 @@
+2015-04-12 00:00:00,2015-04-12 00:00:00,1,1,1,1,1,1,1,5,0,5,0
+2015-04-12 00:00:00,2015-04-12 00:00:00,2,2,1,2,2,2,1,8,2,10,2
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-aggr-fact1-local3.data
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact1-local3.data b/lens-examples/src/main/resources/sales-aggr-fact1-local3.data
new file mode 100644
index 0000000..2cdbe45
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-aggr-fact1-local3.data
@@ -0,0 +1,2 @@
+2015-04-13 00:00:00,2015-04-13 04:00:00,1,1,1,1,1,1,1,5,0,5,0
+2015-04-13 00:00:00,2015-04-13 04:00:00,2,2,1,2,2,2,1,8,2,10,2
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-aggr-fact1-mydb-parts.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact1-mydb-parts.xml b/lens-examples/src/main/resources/sales-aggr-fact1-mydb-parts.xml
new file mode 100644
index 0000000..451e165
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-aggr-fact1-mydb-parts.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_partition_list xmlns="uri:lens:cube:0.1"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <partition fact_or_dimension_table_name="sales_aggr_fact1" location="/tmp/examples/data/sales_aggr_fact1_local2"
+    update_period="DAILY"
+    xmlns="uri:lens:cube:0.1"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+    <time_partition_spec>
+      <part_spec_element key="pt" value="2015-04-13T00:00:00"/>
+      <part_spec_element key="ot" value="2015-04-12T00:00:00"/>
+      <part_spec_element key="dt" value="2015-04-12T00:00:00"/>
+    </time_partition_spec>
+  </partition>
+</x_partition_list>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-aggr-fact1.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact1.xml b/lens-examples/src/main/resources/sales-aggr-fact1.xml
index 8d50554..073052d 100644
--- a/lens-examples/src/main/resources/sales-aggr-fact1.xml
+++ b/lens-examples/src/main/resources/sales-aggr-fact1.xml
@@ -33,8 +33,6 @@
     <column comment="" name="unit_sales" type="BIGINT"/>
     <column comment="" name="store_sales" type="DOUBLE"/>
     <column comment="" name="store_cost" type="DOUBLE"/>
-    <column comment="" name="average_line_item_price" type="FLOAT"/>
-    <column comment="" name="average_line_item_discount" type="FLOAT"/>
     <column comment="" name="max_line_item_price" type="FLOAT"/>
     <column comment="" name="max_line_item_discount" type="FLOAT"/>
   </columns>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-aggr-fact2-local-parts.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact2-local-parts.xml b/lens-examples/src/main/resources/sales-aggr-fact2-local-parts.xml
new file mode 100644
index 0000000..35a8801
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-aggr-fact2-local-parts.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_partition_list xmlns="uri:lens:cube:0.1"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <partition fact_or_dimension_table_name="sales_aggr_fact2" location="examples/data/sales_aggr_fact2_local1"
+    update_period="DAILY">
+    <time_partition_spec>
+      <part_spec_element key="dt" value="2015-04-11T00:00:00"/>
+    </time_partition_spec>
+  </partition>
+  <partition fact_or_dimension_table_name="sales_aggr_fact2" location="examples/data/sales_aggr_fact2_local2"
+    update_period="DAILY">
+    <time_partition_spec>
+      <part_spec_element key="dt" value="2015-04-12T00:00:00"/>
+    </time_partition_spec>
+  </partition>
+  <partition fact_or_dimension_table_name="sales_aggr_fact2" location="examples/data/sales_aggr_fact2_local3"
+    update_period="HOURLY">
+    <time_partition_spec>
+      <part_spec_element key="dt" value="2015-04-13T04:00:00"/>
+    </time_partition_spec>
+  </partition>
+</x_partition_list>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-aggr-fact2-local1.data
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact2-local1.data b/lens-examples/src/main/resources/sales-aggr-fact2-local1.data
new file mode 100644
index 0000000..e0ccd5b
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-aggr-fact2-local1.data
@@ -0,0 +1,2 @@
+2015-04-11 00:00:00,2015-04-11 00:00:00,1,1,1,1,1,1,5,0
+2015-04-11 00:00:00,2015-04-11 00:00:00,2,1,2,2,2,1,8,2
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-aggr-fact2-local2.data
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact2-local2.data b/lens-examples/src/main/resources/sales-aggr-fact2-local2.data
new file mode 100644
index 0000000..0fe9442
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-aggr-fact2-local2.data
@@ -0,0 +1,2 @@
+2015-04-12 00:00:00,2015-04-12 00:00:00,1,1,1,1,1,1,5,0
+2015-04-12 00:00:00,2015-04-12 00:00:00,2,1,2,2,2,1,8,2
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-aggr-fact2-local3.data
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact2-local3.data b/lens-examples/src/main/resources/sales-aggr-fact2-local3.data
new file mode 100644
index 0000000..a0cb704
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-aggr-fact2-local3.data
@@ -0,0 +1,2 @@
+2015-04-13 00:00:00,2015-04-13 04:00:00,1,1,1,1,1,1,5,0
+2015-04-13 00:00:00,2015-04-13 04:00:00,2,1,2,2,2,1,8,2
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-aggr-fact2-mydb-parts.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact2-mydb-parts.xml b/lens-examples/src/main/resources/sales-aggr-fact2-mydb-parts.xml
new file mode 100644
index 0000000..ae8a663
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-aggr-fact2-mydb-parts.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_partition_list xmlns="uri:lens:cube:0.1"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <partition fact_or_dimension_table_name="sales_aggr_fact2" location="/tmp/examples/data/sales_aggr_fact2_local2"
+    update_period="DAILY"
+    xmlns="uri:lens:cube:0.1"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+    <time_partition_spec>
+      <part_spec_element key="pt" value="2015-04-13T00:00:00"/>
+      <part_spec_element key="ot" value="2015-04-12T00:00:00"/>
+      <part_spec_element key="dt" value="2015-04-12T00:00:00"/>
+    </time_partition_spec>
+  </partition>
+</x_partition_list>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-cube.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-cube.xml b/lens-examples/src/main/resources/sales-cube.xml
index d4768cc..951ace9 100644
--- a/lens-examples/src/main/resources/sales-cube.xml
+++ b/lens-examples/src/main/resources/sales-cube.xml
@@ -30,9 +30,6 @@
     <measure name="line_item_product_price" type="FLOAT" default_aggr="SUM" display_string="Line item product price"/>
     <measure name="line_item_discount_amount" type="FLOAT" default_aggr="SUM" display_string="Line item discount"/>
     <measure name="line_item_tax" type="FLOAT" default_aggr="SUM" display_string="Line item tax"/>
-    <measure name="average_line_item_price" type="FLOAT" default_aggr="AVG" display_string="Average Line item price"/>
-    <measure name="average_line_item_discount" type="FLOAT" default_aggr="AVG"
-     display_string="Average Line item discount"/>
     <measure name="max_line_item_price" type="FLOAT" default_aggr="MAX" display_string="Maximum Line item price"/>
     <measure name="max_line_item_discount" type="FLOAT" default_aggr="MAX"
      display_string="Maximum Line item discount"/>
@@ -85,6 +82,18 @@
         </path>
       </paths>
     </join_chain>
+    <join_chain name="product_details">
+      <paths>
+        <path>
+          <edges>
+            <edge>
+              <from table="sales" column="product_id" />
+              <to table="product" column="id" />
+            </edge>
+          </edges>
+        </path>
+      </paths>
+    </join_chain>
     <join_chain name="customer_city">
       <paths>
         <path>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-raw-fact.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-raw-fact.xml b/lens-examples/src/main/resources/sales-raw-fact.xml
index 5b4fa18..4c2d571 100644
--- a/lens-examples/src/main/resources/sales-raw-fact.xml
+++ b/lens-examples/src/main/resources/sales-raw-fact.xml
@@ -47,7 +47,6 @@
     <storage_table>
       <update_periods>
         <update_period>HOURLY</update_period>
-        <update_period>DAILY</update_period>
       </update_periods>
       <storage_name>local</storage_name>
       <table_desc external="true" field_delimiter="," table_location="/tmp/examples/rawfact">

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-raw-local-parts.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-raw-local-parts.xml b/lens-examples/src/main/resources/sales-raw-local-parts.xml
new file mode 100644
index 0000000..4cbda31
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-raw-local-parts.xml
@@ -0,0 +1,40 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_partition_list xmlns="uri:lens:cube:0.1"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <partition fact_or_dimension_table_name="sales_raw_fact" location="examples/data/sales_raw_local1"
+    update_period="HOURLY">
+    <time_partition_spec>
+      <part_spec_element key="pt" value="2015-04-13T05:00:00"/>
+      <part_spec_element key="ot" value="2015-04-13T00:00:00"/>
+      <part_spec_element key="dt" value="2015-04-13T04:00:00"/>
+    </time_partition_spec>
+  </partition>
+  <partition fact_or_dimension_table_name="sales_raw_fact" location="examples/data/sales_raw_local2"
+    update_period="HOURLY">
+    <time_partition_spec>
+      <part_spec_element key="pt" value="2015-04-13T06:00:00"/>
+      <part_spec_element key="ot" value="2015-04-13T01:00:00"/>
+      <part_spec_element key="dt" value="2015-04-13T05:00:00"/>
+    </time_partition_spec>
+  </partition>
+</x_partition_list>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-raw-local1.data
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-raw-local1.data b/lens-examples/src/main/resources/sales-raw-local1.data
new file mode 100644
index 0000000..5c8a909
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-raw-local1.data
@@ -0,0 +1,2 @@
+2015-04-13 00:00:00,2015-04-13 04:00:00,1,1,1,1,1,1,1,1,1,5,0,1,5,0,0
+2015-04-13 00:00:00,2015-04-13 04:00:00,2,2,1,1,1,2,2,2,1,8,2,1,10,2,0
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/9b97c19f/lens-examples/src/main/resources/sales-raw-local2.data
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-raw-local2.data b/lens-examples/src/main/resources/sales-raw-local2.data
new file mode 100644
index 0000000..a5d5cc5
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-raw-local2.data
@@ -0,0 +1,3 @@
+2015-04-13 01:00:00,2015-04-13 05:00:00,1,1,1,1,1,1,1,1,1,5,0,1,5,0,0
+2015-04-13 01:00:00,2015-04-13 05:00:00,2,2,2,1,1,2,2,2,1,8,2,1,10,2,0
+2015-04-13 01:00:00,2015-04-13 05:00:00,2,2,3,0,1,2,2,2,1,8,2,1,10,2,0
\ No newline at end of file


[43/50] [abbrv] incubator-lens git commit: LENS-500: Fix scm url in pom

Posted by am...@apache.org.
LENS-500: Fix scm url in pom


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/2391a808
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/2391a808
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/2391a808

Branch: refs/heads/current-release-line
Commit: 2391a8080fcb8b2cce25cdbc3fa510867a36e5da
Parents: 3aa5fa3
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Mon Apr 13 16:18:27 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Mon Apr 13 16:18:27 2015 +0530

----------------------------------------------------------------------
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/2391a808/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 1ffb135..d05e55a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -284,7 +284,7 @@
   <scm>
     <connection>scm:git:https://git-wip-us.apache.org/repos/asf/incubator-lens.git</connection>
     <developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/incubator-lens.git</developerConnection>
-    <url>https://git-wip-us.apache.org/repos/asf/incubator-falcon.git</url>
+    <url>https://git-wip-us.apache.org/repos/asf/incubator-lens.git</url>
     <tag>HEAD</tag>
   </scm>
 


[19/50] [abbrv] incubator-lens git commit: ELNS-469 : Make threadConnections a concurrent hashmap (Jaideep Dhok via amareshwari)

Posted by am...@apache.org.
ELNS-469 : Make threadConnections a concurrent hashmap (Jaideep Dhok via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/d180c2c4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/d180c2c4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/d180c2c4

Branch: refs/heads/current-release-line
Commit: d180c2c4f4a2444d5a049075a5630272f5263572
Parents: 72691f1
Author: Jaideep Dhok <jd...@apache.org>
Authored: Tue Mar 31 11:20:30 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Mar 31 11:20:30 2015 +0530

----------------------------------------------------------------------
 .../src/main/java/org/apache/lens/driver/hive/HiveDriver.java      | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/d180c2c4/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index 3edce4d..a230515 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -113,7 +113,7 @@ public class HiveDriver implements LensDriver {
   // connections need to be separate for each user and each thread
   /** The thread connections. */
   private final Map<String, ExpirableConnection> threadConnections =
-    new HashMap<String, ExpirableConnection>();
+    new ConcurrentHashMap<String, ExpirableConnection>();
 
   /** The thrift conn expiry queue. */
   private final DelayQueue<ExpirableConnection> thriftConnExpiryQueue = new DelayQueue<ExpirableConnection>();


[33/50] [abbrv] incubator-lens git commit: LENS-462 : Handle the case when a table property becomes large for a timeline (Rajat Khandelwal via amareshwari)

Posted by am...@apache.org.
LENS-462 : Handle the case when a table property becomes large for a timeline (Rajat Khandelwal via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/21102e6b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/21102e6b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/21102e6b

Branch: refs/heads/current-release-line
Commit: 21102e6b96c3139563ef02449a6857bf48d983c4
Parents: 07492f1
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Tue Apr 7 17:50:48 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Apr 7 17:50:48 2015 +0530

----------------------------------------------------------------------
 .../lens/cube/metadata/CubeMetastoreClient.java |  95 ++++++++------
 .../lens/cube/metadata/MetastoreUtil.java       |  16 ++-
 .../lens/cube/metadata/TimePartition.java       |  77 ++---------
 .../lens/cube/metadata/TimePartitionRange.java  | 130 +++++++++++++++++++
 .../timeline/EndsAndHolesPartitionTimeline.java |   9 +-
 .../timeline/RangesPartitionTimeline.java       |  27 ++--
 .../timeline/StoreAllPartitionTimeline.java     |   5 +-
 .../lens/cube/parse/StorageTableResolver.java   |   2 +-
 .../lens/cube/metadata/TestTimePartition.java   |  94 ++++++++++++--
 .../timeline/TestPartitionTimelines.java        |  38 ++++--
 .../apache/lens/cube/parse/CubeTestSetup.java   |   6 +-
 11 files changed, 335 insertions(+), 164 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/21102e6b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index 1835d2f..11ef7ec 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -186,56 +186,72 @@ public class CubeMetastoreClient {
       if (get(storageTableName) == null) {
         synchronized (this) {
           if (get(storageTableName) == null) {
-            log.info("loading timeline from all partitions for storage table: " + storageTableName);
-            // not found in memory, try loading from table properties.
             Table storageTable = getTable(storageTableName);
-            if (!"true".equalsIgnoreCase(
-              storageTable.getParameters().get(MetastoreUtil.getPartitoinTimelineCachePresenceKey()))) {
-              // Not found in table properties either, compute from all partitions of the fact-storage table.
-              // First make sure all combinations of update period and partition column have an entry even
-              // if no partitions exist
-              log.info("loading from all partitions");
-              if (getCubeFact(fact).getUpdatePeriods() != null && getCubeFact(fact).getUpdatePeriods().get(
-                storage) != null) {
-                for (UpdatePeriod updatePeriod : getCubeFact(fact).getUpdatePeriods().get(storage)) {
-                  for (String partCol : getTimePartsOfTable(storageTable)) {
-                    partitionTimelineCache.ensureEntry(storageTableName, updatePeriod, partCol);
-                  }
-                }
-              }
-              // Then add all existing partitions for batch addition in respective timelines.
-              List<String> timeParts = getTimePartsOfTable(storageTable);
-              List<FieldSchema> partCols = storageTable.getPartCols();
-              for (Partition partition : getPartitionsByFilter(storageTableName, null)) {
-                UpdatePeriod period = deduceUpdatePeriod(partition);
-                List<String> values = partition.getValues();
-                for (int i = 0; i < partCols.size(); i++) {
-                  if (timeParts.contains(partCols.get(i).getName())) {
-                    partitionTimelineCache.addForBatchAddition(storageTableName, period, partCols.get(i).getName(),
-                      values.get(i));
-                  }
-                }
+            if ("true".equalsIgnoreCase(storageTable.getParameters().get(
+              MetastoreUtil.getPartitoinTimelineCachePresenceKey()))) {
+              try {
+                loadTimelinesFromTableProperties(fact, storage);
+              } catch (Exception e) {
+                // Ideally this should never come. But since we have another source,
+                // let's piggyback on that for loading timeline
+                log.error("Error while loading timelines from table properties.", e);
+                loadTimelinesFromAllPartitions(fact, storage);
               }
-              // commit all batch addition for the storage table,
-              // which will in-turn commit all batch additions in all it's timelines.
-              commitAllBatchAdditions(storageTableName);
             } else {
-              // found in table properties, load from there.
-              log.info("loading from table properties");
-              for (UpdatePeriod updatePeriod : getCubeFact(fact).getUpdatePeriods().get(storage)) {
-                for (String partCol : getTimePartsOfTable(storageTableName)) {
-                  ensureEntry(storageTableName, updatePeriod, partCol).init(storageTable);
-                }
-              }
+              loadTimelinesFromAllPartitions(fact, storage);
             }
           }
         }
+        log.info("timeline for " + storageTableName + " is: " + get(storageTableName));
       }
       // return the final value from memory
       return get(storageTableName);
       // RESUME CHECKSTYLE CHECK DoubleCheckedLockingCheck
     }
 
+    private void loadTimelinesFromAllPartitions(String fact, String storage) throws HiveException, LensException {
+      // Not found in table properties either, compute from all partitions of the fact-storage table.
+      // First make sure all combinations of update period and partition column have an entry even
+      // if no partitions exist
+      String storageTableName = MetastoreUtil.getStorageTableName(fact, Storage.getPrefix(storage));
+      log.info("loading from all partitions: " + storageTableName);
+      Table storageTable = getTable(storageTableName);
+      if (getCubeFact(fact).getUpdatePeriods() != null && getCubeFact(fact).getUpdatePeriods().get(
+        storage) != null) {
+        for (UpdatePeriod updatePeriod : getCubeFact(fact).getUpdatePeriods().get(storage)) {
+          for (String partCol : getTimePartsOfTable(storageTable)) {
+            ensureEntry(storageTableName, updatePeriod, partCol);
+          }
+        }
+      }
+      // Then add all existing partitions for batch addition in respective timelines.
+      List<String> timeParts = getTimePartsOfTable(storageTable);
+      List<FieldSchema> partCols = storageTable.getPartCols();
+      for (Partition partition : getPartitionsByFilter(storageTableName, null)) {
+        UpdatePeriod period = deduceUpdatePeriod(partition);
+        List<String> values = partition.getValues();
+        for (int i = 0; i < partCols.size(); i++) {
+          if (timeParts.contains(partCols.get(i).getName())) {
+            addForBatchAddition(storageTableName, period, partCols.get(i).getName(), values.get(i));
+          }
+        }
+      }
+      // commit all batch addition for the storage table,
+      // which will in-turn commit all batch additions in all it's timelines.
+      commitAllBatchAdditions(storageTableName);
+    }
+
+    private void loadTimelinesFromTableProperties(String fact, String storage) throws HiveException, LensException {
+      // found in table properties, load from there.
+      String storageTableName = MetastoreUtil.getStorageTableName(fact, Storage.getPrefix(storage));
+      log.info("loading from table properties: " + storageTableName);
+      for (UpdatePeriod updatePeriod : getCubeFact(fact).getUpdatePeriods().get(storage)) {
+        for (String partCol : getTimePartsOfTable(storageTableName)) {
+          ensureEntry(storageTableName, updatePeriod, partCol).init(getTable(storageTableName));
+        }
+      }
+    }
+
     /**
      * Adds given partition(for storageTable, updatePeriod, partitionColum=partition) for batch addition in an
      * appropriate timeline object. Ignore if partition is not valid.
@@ -277,8 +293,7 @@ public class CubeMetastoreClient {
         get(storageTable).get(updatePeriod).put(partitionColumn, PartitionTimelineFactory.get(
           CubeMetastoreClient.this, storageTable, updatePeriod, partitionColumn));
       }
-      PartitionTimeline ret = get(storageTable).get(updatePeriod).get(partitionColumn);
-      return ret;
+      return get(storageTable).get(updatePeriod).get(partitionColumn);
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/21102e6b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
index 0be8e5f..203ff58 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
@@ -402,12 +402,18 @@ public class MetastoreUtil {
   }
 
   public static String getNamedStringValue(Map<String, String> props, String key) {
-    int size = Integer.parseInt(props.get(key + ".size"));
-    StringBuilder valueStr = new StringBuilder();
-    for (int i = 0; i < size; i++) {
-      valueStr.append(props.get(key + i));
+    if (props.containsKey(key + ".size")) {
+      int size = Integer.parseInt(props.get(key + ".size"));
+      StringBuilder valueStr = new StringBuilder();
+      for (int i = 0; i < size; i++) {
+        valueStr.append(props.get(key + i));
+      }
+      return valueStr.toString();
+    } else if (props.containsKey(key)) {
+      return props.get(key);
+    } else {
+      return null;
     }
-    return valueStr.toString();
   }
 
   public static String getObjectStr(Collection<?> set) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/21102e6b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
index b948467..8f088c0 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
@@ -21,7 +21,6 @@ package org.apache.lens.cube.metadata;
 import java.text.ParseException;
 import java.util.Calendar;
 import java.util.Date;
-import java.util.Iterator;
 
 import org.apache.lens.api.LensException;
 
@@ -32,7 +31,7 @@ import lombok.NonNull;
 
 /** stores a partition's update period, date and string representation. Provides some utility methods around it */
 @Data
-public class TimePartition implements Comparable<TimePartition> {
+public class TimePartition implements Comparable<TimePartition>, Named {
   private static final String UPDATE_PERIOD_WRONG_ERROR_MESSAGE = "Update period %s not correct for parsing %s";
   private final UpdatePeriod updatePeriod;
   private final Date date;
@@ -89,6 +88,9 @@ public class TimePartition implements Comparable<TimePartition> {
   public TimePartition partitionAtDiff(int increment) {
     Calendar cal = Calendar.getInstance();
     cal.setTime(date);
+    if (getUpdatePeriod().equals(UpdatePeriod.QUARTERLY)) {
+      increment *= 3;
+    }
     cal.add(updatePeriod.calendarField(), increment);
     return new TimePartition(updatePeriod, cal.getTime());
   }
@@ -125,78 +127,21 @@ public class TimePartition implements Comparable<TimePartition> {
     return String.format(UPDATE_PERIOD_WRONG_ERROR_MESSAGE, up, dateString);
   }
 
-  public TimePartitionRange rangeUpto(TimePartition to) {
+  public TimePartitionRange rangeUpto(TimePartition to) throws LensException {
     return new TimePartitionRange(this, to);
   }
 
-  public TimePartitionRange rangeFrom(TimePartition from) {
+  public TimePartitionRange rangeFrom(TimePartition from) throws LensException {
     return new TimePartitionRange(from, this);
   }
 
-  public TimePartitionRange singletonRange() {
+  public TimePartitionRange singletonRange() throws LensException {
     return rangeUpto(next());
   }
 
-  /**
-   * Range of time partition. [begin,end). i.e. inclusive begin and exclusive end.
-   */
-  @Data
-  public static class TimePartitionRange implements Iterable<TimePartition> {
-    private TimePartition begin;
-    private TimePartition end;
-
-    public TimePartitionRange(TimePartition from, TimePartition to) {
-      this.begin = from;
-      this.end = to;
-    }
-
-    @Override
-    public String toString() {
-      return "[" + begin.getDateString() + ", " + end.getDateString() + ")";
-    }
-
-    /**
-     * returns TimePartition objects starting from begin and upto(excluding) end. interval of iteration is the update
-     * period of the partitions. Assumes both partitions have same update period.
-     */
-    @Override
-    public Iterator<TimePartition> iterator() {
-
-      return new Iterator<TimePartition>() {
-        TimePartition current = begin;
-
-        @Override
-        public boolean hasNext() {
-          return current.before(end);
-        }
-
-        @Override
-        public TimePartition next() {
-          TimePartition ret = current;
-          current = current.next();
-          return ret;
-        }
-
-        @Override
-        public void remove() {
-          throw new UnsupportedOperationException("remove not supported");
-        }
-      };
-    }
-
-    /**
-     * @param partition
-     * @return begin <= partition < end
-     */
-    public boolean contains(TimePartition partition) {
-      return !partition.before(begin) && partition.before(end);
-    }
-
-    /**
-     * @return if range is empty range.
-     */
-    public boolean isEmpty() {
-      return begin.equals(end);
-    }
+  @Override
+  public String getName() {
+    return getDateString();
   }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/21102e6b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
new file mode 100644
index 0000000..86c3453
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata;
+
+import java.util.Iterator;
+
+import org.apache.lens.api.LensException;
+import org.apache.lens.cube.parse.DateUtil;
+
+import lombok.Data;
+
+/**
+ * Range of time partition. [begin,end). i.e. inclusive begin and exclusive end.
+ */
+@Data
+public class TimePartitionRange implements Iterable<TimePartition>, Named {
+  private TimePartition begin;
+  private TimePartition end;
+
+  public TimePartitionRange(TimePartition begin, TimePartition end) throws LensException {
+    if (end.before(begin)) {
+      throw new LensException("condition of creation of timepartition failed: end>=begin");
+    }
+    if (end.getUpdatePeriod() != begin.getUpdatePeriod()) {
+      throw new LensException("update periods are not same");
+    }
+    this.begin = begin;
+    this.end = end;
+  }
+
+  @Override
+  public String toString() {
+    return "[" + begin.getDateString() + ", " + end.getDateString() + ")";
+  }
+
+  /**
+   * returns TimePartition objects starting from begin and upto(excluding) end. interval of iteration is the update
+   * period of the partitions. Assumes both partitions have same update period.
+   */
+  @Override
+  public Iterator<TimePartition> iterator() {
+
+    return new Iterator<TimePartition>() {
+      TimePartition current = begin;
+
+      @Override
+      public boolean hasNext() {
+        return current.before(end);
+      }
+
+      @Override
+      public TimePartition next() {
+        TimePartition ret = current;
+        current = current.next();
+        return ret;
+      }
+
+      @Override
+      public void remove() {
+        throw new UnsupportedOperationException("remove not supported");
+      }
+    };
+  }
+
+  /**
+   * @param partition
+   * @return begin <= partition < end
+   */
+  public boolean contains(TimePartition partition) {
+    return !partition.before(begin) && partition.before(end);
+  }
+
+  /**
+   * @return if range is empty range.
+   */
+  public boolean isEmpty() {
+    return begin.equals(end);
+  }
+
+  @Override
+  public String getName() {
+    return toString();
+  }
+
+  public static TimePartitionRange parseFrom(UpdatePeriod updatePeriod, String from, String to) throws LensException {
+    boolean incrementFrom = false;
+    boolean incrementTo = false;
+    if (from.charAt(0) == '[') {
+      from = from.substring(1);
+    } else if (from.charAt(0) == '(') {
+      from = from.substring(1);
+      incrementFrom = true;
+    }
+    if (to.charAt(to.length() - 1) == ']') {
+      to = to.substring(0, to.length() - 1);
+      incrementTo = true;
+    } else if (to.charAt(to.length() - 1) == ')') {
+      to = to.substring(0, to.length() - 1);
+    }
+    TimePartition fromPartition = TimePartition.of(updatePeriod, from);
+    TimePartition toPartition = TimePartition.of(updatePeriod, to);
+    if (incrementFrom) {
+      fromPartition = fromPartition.next();
+    }
+    if (incrementTo) {
+      toPartition = toPartition.next();
+    }
+    return new TimePartitionRange(fromPartition, toPartition);
+  }
+
+  public long size() {
+    return DateUtil.getTimeDiff(begin.getDate(), end.getDate(), begin.getUpdatePeriod());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/21102e6b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
index 79e8a62..5867587 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
@@ -22,12 +22,11 @@ package org.apache.lens.cube.metadata.timeline;
 import java.util.*;
 
 import org.apache.lens.api.LensException;
+import org.apache.lens.cube.metadata.MetastoreUtil;
 import org.apache.lens.cube.metadata.TimePartition;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.cube.parse.TimeRange;
 
-import org.apache.commons.lang.StringUtils;
-
 import com.google.common.base.Strings;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
@@ -99,12 +98,12 @@ public class EndsAndHolesPartitionTimeline extends PartitionTimeline {
   @Override
   public Map<String, String> toProperties() {
     HashMap<String, String> ret = Maps.newHashMap();
+    MetastoreUtil.addNameStrings(ret, "holes", holes);
     if (isEmpty()) {
       return ret;
     }
     ret.put("first", first.getDateString());
     ret.put("latest", latest.getDateString());
-    ret.put("holes", StringUtils.join(holes, ","));
     return ret;
   }
 
@@ -115,7 +114,7 @@ public class EndsAndHolesPartitionTimeline extends PartitionTimeline {
     holes.clear();
     String firstStr = properties.get("first");
     String latestStr = properties.get("latest");
-    String holesStr = properties.get("holes");
+    String holesStr = MetastoreUtil.getNamedStringValue(properties, "holes");
     if (!Strings.isNullOrEmpty(firstStr)) {
       first = TimePartition.of(getUpdatePeriod(), firstStr);
     }
@@ -124,7 +123,7 @@ public class EndsAndHolesPartitionTimeline extends PartitionTimeline {
     }
     holes = Sets.newTreeSet();
     if (!Strings.isNullOrEmpty(holesStr)) {
-      for (String hole : properties.get("holes").split("\\s*,\\s*")) {
+      for (String hole : holesStr.split("\\s*,\\s*")) {
         holes.add(TimePartition.of(getUpdatePeriod(), hole));
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/21102e6b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/RangesPartitionTimeline.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/RangesPartitionTimeline.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/RangesPartitionTimeline.java
index fb2d0a8..8d80f0b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/RangesPartitionTimeline.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/RangesPartitionTimeline.java
@@ -25,7 +25,9 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.lens.api.LensException;
+import org.apache.lens.cube.metadata.MetastoreUtil;
 import org.apache.lens.cube.metadata.TimePartition;
+import org.apache.lens.cube.metadata.TimePartitionRange;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 
 import com.google.common.base.Strings;
@@ -42,7 +44,7 @@ import lombok.ToString;
 @Data
 @ToString(callSuper = true)
 public class RangesPartitionTimeline extends PartitionTimeline {
-  private List<TimePartition.TimePartitionRange> ranges = Lists.newArrayList();
+  private List<TimePartitionRange> ranges = Lists.newArrayList();
 
   public RangesPartitionTimeline(String storageTableName, UpdatePeriod updatePeriod,
     String partCol) {
@@ -103,7 +105,7 @@ public class RangesPartitionTimeline extends PartitionTimeline {
   private void mergeRanges() {
     for (int i = 0; i < ranges.size() - 1; i++) {
       if (ranges.get(i).getEnd().equals(ranges.get(i + 1).getBegin())) {
-        TimePartition.TimePartitionRange removed = ranges.remove(i + 1);
+        TimePartitionRange removed = ranges.remove(i + 1);
         ranges.get(i).setEnd(removed.getEnd());
         i--; // check again at same index
       }
@@ -143,32 +145,21 @@ public class RangesPartitionTimeline extends PartitionTimeline {
   @Override
   public Map<String, String> toProperties() {
     HashMap<String, String> ret = Maps.newHashMap();
-    if (isEmpty()) {
-      return ret;
-    }
-    StringBuilder sb = new StringBuilder();
-    String sep = "";
-    for (TimePartition.TimePartitionRange range : ranges) {
-      sb.append(sep);
-      sep = ",";
-      sb.append(range.getBegin()).append(sep).append(range.getEnd());
-    }
-    ret.put("ranges", sb.toString());
+    MetastoreUtil.addNameStrings(ret, "ranges", ranges);
     return ret;
   }
 
   @Override
   public boolean initFromProperties(Map<String, String> properties) throws LensException {
     ranges.clear();
-    String rangesStr = properties.get("ranges");
+    String rangesStr = MetastoreUtil.getNamedStringValue(properties, "ranges");
     if (!Strings.isNullOrEmpty(rangesStr)) {
       String[] split = rangesStr.split("\\s*,\\s*");
       if (split.length % 2 == 1) {
         throw new LensException("Ranges incomplete");
       }
       for (int i = 0; i < split.length; i += 2) {
-        ranges.add(TimePartition.of(getUpdatePeriod(), split[i]).rangeUpto(TimePartition.of(getUpdatePeriod(),
-          split[i + 1])));
+        ranges.add(TimePartitionRange.parseFrom(getUpdatePeriod(), split[i], split[i + 1]));
       }
     }
     return isConsistent();
@@ -203,7 +194,7 @@ public class RangesPartitionTimeline extends PartitionTimeline {
     if (isEmpty()) {
       return false;
     }
-    for (TimePartition.TimePartitionRange range : ranges) {
+    for (TimePartitionRange range : ranges) {
       if (range.contains(toCheck)) {
         return true;
       }
@@ -215,7 +206,7 @@ public class RangesPartitionTimeline extends PartitionTimeline {
   public Iterator<TimePartition> iterator() {
 
     return new Iterator<TimePartition>() {
-      Iterator<TimePartition.TimePartitionRange> uber = ranges.iterator();
+      Iterator<TimePartitionRange> uber = ranges.iterator();
       Iterator<TimePartition> cur = null;
 
       @Override

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/21102e6b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/StoreAllPartitionTimeline.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/StoreAllPartitionTimeline.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/StoreAllPartitionTimeline.java
index d6ee0a1..2364400 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/StoreAllPartitionTimeline.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/StoreAllPartitionTimeline.java
@@ -21,6 +21,7 @@ package org.apache.lens.cube.metadata.timeline;
 import java.util.*;
 
 import org.apache.lens.api.LensException;
+import org.apache.lens.cube.metadata.MetastoreUtil;
 import org.apache.lens.cube.metadata.TimePartition;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 
@@ -70,14 +71,14 @@ public class StoreAllPartitionTimeline extends PartitionTimeline {
   @Override
   public Map<String, String> toProperties() {
     HashMap<String, String> map = Maps.newHashMap();
-    map.put("partitions", StringUtils.join(allPartitions, ","));
+    MetastoreUtil.addNameStrings(map, "partitions", allPartitions);
     return map;
   }
 
   @Override
   public boolean initFromProperties(Map<String, String> properties) throws LensException {
     allPartitions.clear();
-    String partitionsStr = properties.get("partitions");
+    String partitionsStr = MetastoreUtil.getNamedStringValue(properties, "partitions");
     if (partitionsStr == null) {
       return true;
     }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/21102e6b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index 96ca82c..ab2c3f9 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -334,7 +334,7 @@ class StorageTableResolver implements ContextRewriter {
       Set<String> nonExistingParts = Sets.newHashSet();
       if (!missingPartitionRanges.isEmpty()) {
         for (UpdatePeriod period : missingPartitionRanges.keySet()) {
-          for (TimePartition.TimePartitionRange range : missingPartitionRanges.get(period).getRanges()) {
+          for (TimePartitionRange range : missingPartitionRanges.get(period).getRanges()) {
             nonExistingParts.add(range.toString());
           }
         }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/21102e6b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestTimePartition.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestTimePartition.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestTimePartition.java
index 4c98d84..9d5d08b 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestTimePartition.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestTimePartition.java
@@ -18,25 +18,36 @@
  */
 package org.apache.lens.cube.metadata;
 
+import static org.testng.Assert.*;
+
+import java.util.Calendar;
 import java.util.Date;
 
 import org.apache.lens.api.LensException;
 
-import org.testng.Assert;
 import org.testng.annotations.Test;
 
 public class TestTimePartition {
+  public static final Date NOW = new Date();
+
   @Test
   public void test() throws LensException {
-    Date now = new Date();
+    // Test for all update periods
     for (UpdatePeriod up : UpdatePeriod.values()) {
-      String nowStr = up.format().format(now);
-      TimePartition nowPartition = TimePartition.of(up, now);
+      // Normal date object parsable
+      String nowStr = up.format().format(NOW);
+
+      // Create partition by date object or it's string representation -- both should be same.
+      TimePartition nowPartition = TimePartition.of(up, NOW);
       TimePartition nowStrPartition = TimePartition.of(up, nowStr);
-      Assert.assertEquals(nowPartition, nowStrPartition);
-      Assert.assertTrue(nowPartition.next().after(nowPartition));
-      Assert.assertTrue(nowPartition.previous().before(nowPartition));
-      Assert.assertEquals(getLensExceptionFromPartitionParsing(up, "garbage").getMessage(),
+      assertEquals(nowPartition, nowStrPartition);
+
+      // Test next and previous
+      assertTrue(nowPartition.next().after(nowPartition));
+      assertTrue(nowPartition.previous().before(nowPartition));
+
+      // date parse failures should give lens exception
+      assertEquals(getLensExceptionFromPartitionParsing(up, "garbage").getMessage(),
         TimePartition.getWrongUpdatePeriodMessage(up, "garbage"));
       getLensExceptionFromPartitionParsing(up, (Date) null);
       getLensExceptionFromPartitionParsing(up, (String) null);
@@ -48,7 +59,8 @@ public class TestTimePartition {
         if (up.formatStr().equals(up2.formatStr())) {
           continue;
         }
-        Assert.assertEquals(getLensExceptionFromPartitionParsing(up2, nowStr).getMessage(),
+        // Parsing a string representation with differnet update period should give lens exception.
+        assertEquals(getLensExceptionFromPartitionParsing(up2, nowStr).getMessage(),
           TimePartition.getWrongUpdatePeriodMessage(up2, nowStr));
       }
     }
@@ -57,7 +69,7 @@ public class TestTimePartition {
   private LensException getLensExceptionFromPartitionParsing(UpdatePeriod up, String dateStr) {
     try {
       TimePartition.of(up, dateStr);
-      Assert.fail("Should have thrown LensException");
+      fail("Should have thrown LensException");
     } catch (LensException e) {
       return e;
     }
@@ -67,10 +79,70 @@ public class TestTimePartition {
   private LensException getLensExceptionFromPartitionParsing(UpdatePeriod up, Date date) {
     try {
       TimePartition.of(up, date);
-      Assert.fail("Should have thrown LensException");
+      fail("Should have thrown LensException");
     } catch (LensException e) {
       return e;
     }
     return null; // redundant
   }
+
+
+  public static Date timeAtDiff(Date date, UpdatePeriod period, int d) {
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(date);
+    if (period.equals(UpdatePeriod.QUARTERLY)) {
+      d *= 3;
+    }
+    cal.add(period.calendarField(), d);
+    return cal.getTime();
+  }
+
+  @Test
+  public void testTimeRange() throws LensException {
+    // test for all update periods
+    for (UpdatePeriod up : UpdatePeriod.values()) {
+      // create two partition of different time
+      TimePartition nowPartition = TimePartition.of(up, NOW);
+      TimePartition tenLater = TimePartition.of(up, timeAtDiff(NOW, up, 10));
+
+      // a.upto(b) == b.from(a)
+      TimePartitionRange range = nowPartition.rangeUpto(tenLater);
+      assertEquals(range, tenLater.rangeFrom(nowPartition));
+      // size check
+      assertEquals(range.size(), 10);
+      // test singleton range
+      assertEquals(nowPartition.singletonRange().size(), 1);
+      // test begin belongs to [begin, end) and end doesn't belong
+      assertTrue(range.contains(nowPartition));
+      assertFalse(range.contains(tenLater));
+      // test partition parsing for string arguments.
+      // a,b == [a,b)
+      // Other possible arguments: [a,b), [a,b], (a,b), (a,b]
+      String nowStr = nowPartition.getDateString();
+      String tenLaterStr = tenLater.getDateString();
+      assertEquals(TimePartitionRange.parseFrom(up, nowStr, tenLaterStr), range);
+      assertEquals(TimePartitionRange.parseFrom(up, "[" + nowStr, tenLaterStr + ")"), range);
+      assertEquals(TimePartitionRange.parseFrom(up, "[" + nowStr, tenLaterStr + "]"),
+        nowPartition.rangeUpto(tenLater.next()));
+      assertEquals(TimePartitionRange.parseFrom(up, "(" + nowStr, tenLaterStr + "]"),
+        nowPartition.next().rangeUpto(
+          tenLater.next()));
+      assertEquals(TimePartitionRange.parseFrom(up, "(" + nowStr, tenLaterStr + ")"),
+        nowPartition.next().rangeUpto(tenLater));
+    }
+  }
+
+  @Test(expectedExceptions = LensException.class)
+  public void testPartitionRangeValidity() throws LensException {
+    // begin and end partitions should follow begin <= end
+    TimePartition.of(UpdatePeriod.HOURLY, NOW)
+      .rangeFrom(TimePartition.of(UpdatePeriod.HOURLY, timeAtDiff(NOW, UpdatePeriod.HOURLY, 10)));
+  }
+
+  @Test(expectedExceptions = LensException.class)
+  public void testTimeRangeCreationWithDifferentUpdatePeriod() throws LensException {
+    // begin and end partitions should have same update period for range creation to succeed.
+    TimePartition.of(UpdatePeriod.HOURLY, NOW).rangeUpto(TimePartition.of(UpdatePeriod.DAILY, NOW));
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/21102e6b/lens-cube/src/test/java/org/apache/lens/cube/metadata/timeline/TestPartitionTimelines.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/timeline/TestPartitionTimelines.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/timeline/TestPartitionTimelines.java
index 50b75e3..c87050f 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/timeline/TestPartitionTimelines.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/timeline/TestPartitionTimelines.java
@@ -21,7 +21,7 @@ package org.apache.lens.cube.metadata.timeline;
 import java.util.*;
 
 import org.apache.lens.api.LensException;
-import org.apache.lens.cube.metadata.CubeMetastoreClient;
+import org.apache.lens.cube.metadata.TestTimePartition;
 import org.apache.lens.cube.metadata.TimePartition;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 
@@ -31,11 +31,9 @@ import org.testng.annotations.Test;
 import com.beust.jcommander.internal.Lists;
 
 public class TestPartitionTimelines {
-  CubeMetastoreClient client = null;
   private static final String TABLE_NAME = "storage_fact";
-  private static final UpdatePeriod PERIOD = UpdatePeriod.HOURLY;
+  public static final UpdatePeriod PERIOD = UpdatePeriod.HOURLY;
   private static final String PART_COL = "pt";
-  private static final Date DATE = new Date();
   private static final List<Class<? extends PartitionTimeline>> TIMELINE_IMPLEMENTATIONS = Arrays.asList(
     StoreAllPartitionTimeline.class,
     EndsAndHolesPartitionTimeline.class,
@@ -60,7 +58,8 @@ public class TestPartitionTimelines {
       final List<TimePartition> addedPartitions = Lists.newArrayList();
       for (int i = 0; i < 200; i++) {
         int randomInt = randomGenerator.nextInt(100) - 50;
-        TimePartition part = TimePartition.of(PERIOD, timeAtHourDiff(randomInt));
+        TimePartition part = TimePartition.of(PERIOD, TestTimePartition.timeAtDiff(TestTimePartition.NOW, PERIOD,
+          randomInt));
         addedPartitions.add(part);
         for (PartitionTimeline timeline : timelines) {
           timeline.add(part);
@@ -98,12 +97,6 @@ public class TestPartitionTimelines {
     }
   }
 
-  private Date timeAtHourDiff(int d) {
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(DATE);
-    cal.add(PERIOD.calendarField(), d);
-    return cal.getTime();
-  }
 
   private <T extends PartitionTimeline> T getInstance(Class<T> clz) {
     try {
@@ -116,22 +109,41 @@ public class TestPartitionTimelines {
   }
 
   private <T extends PartitionTimeline> void testPropertiesContract(Class<T> clz) throws LensException {
+    // Make two instances, one to modify, other to validate against
     T inst1 = getInstance(clz);
     T inst2 = getInstance(clz);
+    // whenever we'll init from props, timeline should become empty.
     Map<String, String> props = inst1.toProperties();
     Assert.assertTrue(inst2.initFromProperties(props));
+    // init from props of an empty timeline: should succeed and make the timeline empty
     Assert.assertEquals(inst1, inst2);
     Assert.assertTrue(inst1.isEmpty());
     Assert.assertTrue(inst2.isEmpty());
-    Assert.assertTrue(inst1.add(TimePartition.of(PERIOD, DATE)));
+    // Add single partition and test for non-equivalence
+    Assert.assertTrue(inst1.add(TimePartition.of(PERIOD, TestTimePartition.NOW)));
     Assert.assertFalse(inst1.equals(inst2));
-    Assert.assertTrue(inst2.add(TimePartition.of(PERIOD, DATE)));
+    // add same parittion in other timeline, test for equality
+    Assert.assertTrue(inst2.add(TimePartition.of(PERIOD, TestTimePartition.NOW)));
     Assert.assertTrue(inst1.isConsistent());
     Assert.assertTrue(inst2.isConsistent());
     Assert.assertEquals(inst1, inst2);
+    // init with blank properties. Should become empty
     Assert.assertTrue(inst2.initFromProperties(props));
     Assert.assertFalse(inst1.equals(inst2));
+    // init from properties of timeline with single partition.
     Assert.assertTrue(inst2.initFromProperties(inst1.toProperties()));
     Assert.assertEquals(inst1, inst2);
+    // clear timelines
+    inst1.initFromProperties(props);
+    inst2.initFromProperties(props);
+    // Make sparse partition range in one, init other from its properties. Test equality.
+    for (int i = 0; i < 5000; i++) {
+      Assert.assertTrue(inst1.add(TimePartition.of(PERIOD, TestTimePartition.timeAtDiff(TestTimePartition.NOW, PERIOD,
+        i * 2))));
+    }
+    Assert.assertTrue(inst1.isConsistent());
+    inst2.initFromProperties(inst1.toProperties());
+    Assert.assertTrue(inst2.isConsistent());
+    Assert.assertEquals(inst1, inst2);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/21102e6b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index b2ae9b5..62b2c95 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -1001,10 +1001,10 @@ public class CubeTestSetup {
     Assert.assertEquals(table.getParameters().get(
         MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.YEARLY, "ttd2")),
       EndsAndHolesPartitionTimeline.class.getCanonicalName());
-    Assert.assertEquals(table.getParameters().get(
+    Assert.assertEquals(MetastoreUtil.getNamedStringValue(table.getParameters(),
         MetastoreUtil.getPartitionInfoKeyPrefix(UpdatePeriod.HOURLY, "ttd") + "partitions"),
       StringUtils.join(partitions, ","));
-    Assert.assertEquals(table.getParameters().get(
+    Assert.assertEquals(MetastoreUtil.getNamedStringValue(table.getParameters(),
         MetastoreUtil.getPartitionInfoKeyPrefix(UpdatePeriod.HOURLY, "ttd2") + "partitions"),
       StringUtils.join(partitions, ","));
     // Add all hourly partitions for TWO_DAYS_RANGE_BEFORE_4_DAYS
@@ -2045,7 +2045,7 @@ public class CubeTestSetup {
       for (String p : Arrays.asList("et", "it", "pt")) {
         String first = params.get(prefix + up + "." + p + "." + "first");
         String latest = params.get(prefix + up + "." + p + "." + "latest");
-        String holes = params.get(prefix + up + "." + p + "." + "holes");
+        String holes = MetastoreUtil.getNamedStringValue(params, prefix + up + "." + p + "." + "holes");
         String storageClass = params.get(prefix + up + "." + p + "." + "storage.class");
         Assert.assertNotNull(first);
         Assert.assertNotNull(latest);


[44/50] [abbrv] incubator-lens git commit: LENS-455: Allow partition columns to be queried as field in where/select clause.

Posted by am...@apache.org.
LENS-455: Allow partition columns to be queried as field in where/select clause.


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/7586a83c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/7586a83c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/7586a83c

Branch: refs/heads/current-release-line
Commit: 7586a83c8aead80c0b8e21300623ea50eb61ac34
Parents: 2391a80
Author: Rajat Khandelwal <ra...@gmail.com>
Authored: Mon Apr 13 18:53:57 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Mon Apr 13 18:53:57 2015 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/cube-0.1.xsd        |  6 ++-
 .../lens/cube/metadata/CubeDimensionTable.java  | 34 ++++++++++----
 .../lens/cube/metadata/MetastoreConstants.java  |  1 +
 .../lens/cube/metadata/MetastoreUtil.java       |  4 ++
 .../apache/lens/cube/parse/CandidateDim.java    |  3 +-
 .../cube/parse/CandidateTablePruneCause.java    |  2 +-
 .../cube/metadata/TestCubeMetastoreClient.java  |  8 ----
 .../apache/lens/cube/parse/CubeTestSetup.java   | 30 +++++++++++++
 .../lens/cube/parse/TestCubeRewriter.java       | 47 ++++++++++++++++++--
 .../src/main/resources/product_table.xml        |  3 +-
 10 files changed, 113 insertions(+), 25 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/7586a83c/lens-api/src/main/resources/cube-0.1.xsd
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/cube-0.1.xsd b/lens-api/src/main/resources/cube-0.1.xsd
index 1de4258..24d0d64 100644
--- a/lens-api/src/main/resources/cube-0.1.xsd
+++ b/lens-api/src/main/resources/cube-0.1.xsd
@@ -528,7 +528,11 @@
       <xs:element type="x_properties" name="properties" maxOccurs="1" minOccurs="0">
         <xs:annotation>
           <xs:documentation>
-            Dimension table properties
+            Dimension table properties. The properties that should be set are:
+            1.  dimtable.{dim_table_name}.part.cols = comma separated list of partition columns of this dimtable.
+                This would basically be union of all partition columns of all storage tables of the dimtable.
+                Setting this makes that partition column queryable.
+                Time part columns can be skipped as they will generally not be queried.
           </xs:documentation>
         </xs:annotation>
       </xs:element>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/7586a83c/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeDimensionTable.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeDimensionTable.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeDimensionTable.java
index 33e0482..47a30dd 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeDimensionTable.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeDimensionTable.java
@@ -25,6 +25,8 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 
+import com.google.common.collect.Sets;
+
 public final class CubeDimensionTable extends AbstractCubeTable {
   private String dimName; // dimension name the dimtabe belongs to
   private final Map<String, UpdatePeriod> snapshotDumpPeriods = new HashMap<String, UpdatePeriod>();
@@ -54,6 +56,7 @@ public final class CubeDimensionTable extends AbstractCubeTable {
     addProperties();
   }
 
+
   private static Map<String, UpdatePeriod> getSnapshotDumpPeriods(Set<String> storages) {
     Map<String, UpdatePeriod> snapshotDumpPeriods = new HashMap<String, UpdatePeriod>();
     for (String storage : storages) {
@@ -71,6 +74,19 @@ public final class CubeDimensionTable extends AbstractCubeTable {
     }
   }
 
+  public Set<String> getPartCols() {
+    Set<String> partCols = Sets.newHashSet();
+    String partColsStr = getProperties().get(MetastoreUtil.getDimTablePartsKey(getName()));
+    if (partColsStr != null) {
+      for (String s : StringUtils.split(partColsStr, ",")) {
+        if (!StringUtils.isBlank(s)) {
+          partCols.add(s);
+        }
+      }
+    }
+    return partCols;
+  }
+
   @Override
   public CubeTableType getTableType() {
     return CubeTableType.DIM_TABLE;
@@ -79,10 +95,11 @@ public final class CubeDimensionTable extends AbstractCubeTable {
   @Override
   protected void addProperties() {
     super.addProperties();
-    setDimName(getProperties(), getName(), dimName);
-    setSnapshotPeriods(getName(), getProperties(), snapshotDumpPeriods);
+    setDimName(getName(), getProperties(), dimName);
+    setSnapshotDumpPeriods(getName(), getProperties(), snapshotDumpPeriods);
   }
 
+
   public Map<String, UpdatePeriod> getSnapshotDumpPeriods() {
     return snapshotDumpPeriods;
   }
@@ -91,7 +108,7 @@ public final class CubeDimensionTable extends AbstractCubeTable {
     return dimName;
   }
 
-  private static void setSnapshotPeriods(String name, Map<String, String> props,
+  private static void setSnapshotDumpPeriods(String name, Map<String, String> props,
     Map<String, UpdatePeriod> snapshotDumpPeriods) {
     if (snapshotDumpPeriods != null) {
       props.put(MetastoreUtil.getDimensionStorageListKey(name), MetastoreUtil.getStr(snapshotDumpPeriods.keySet()));
@@ -103,7 +120,7 @@ public final class CubeDimensionTable extends AbstractCubeTable {
     }
   }
 
-  private static void setDimName(Map<String, String> props, String dimTblName, String dimName) {
+  private static void setDimName(String dimTblName, Map<String, String> props, String dimName) {
     props.put(MetastoreUtil.getDimNameKey(dimTblName), dimName);
   }
 
@@ -115,8 +132,7 @@ public final class CubeDimensionTable extends AbstractCubeTable {
     String storagesStr = params.get(MetastoreUtil.getDimensionStorageListKey(name));
     if (!StringUtils.isBlank(storagesStr)) {
       Map<String, UpdatePeriod> dumpPeriods = new HashMap<String, UpdatePeriod>();
-      String[] storages = storagesStr.split(",");
-      for (String storage : storages) {
+      for (String storage : StringUtils.split(storagesStr, ",")) {
         String dumpPeriod = params.get(MetastoreUtil.getDimensionDumpPeriodKey(name, storage));
         if (dumpPeriod != null) {
           dumpPeriods.put(storage, UpdatePeriod.valueOf(dumpPeriod));
@@ -178,7 +194,7 @@ public final class CubeDimensionTable extends AbstractCubeTable {
    */
   public void alterUberDim(String newDimName) {
     this.dimName = newDimName;
-    setDimName(getProperties(), getName(), this.dimName);
+    setDimName(getName(), getProperties(), this.dimName);
   }
 
   /**
@@ -198,7 +214,7 @@ public final class CubeDimensionTable extends AbstractCubeTable {
     }
 
     snapshotDumpPeriods.put(storage, period);
-    setSnapshotPeriods(getName(), getProperties(), snapshotDumpPeriods);
+    setSnapshotDumpPeriods(getName(), getProperties(), snapshotDumpPeriods);
   }
 
   @Override
@@ -213,6 +229,6 @@ public final class CubeDimensionTable extends AbstractCubeTable {
 
   void dropStorage(String storage) {
     snapshotDumpPeriods.remove(storage);
-    setSnapshotPeriods(getName(), getProperties(), snapshotDumpPeriods);
+    setSnapshotDumpPeriods(getName(), getProperties(), snapshotDumpPeriods);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/7586a83c/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
index e25fc81..e7f10ac 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
@@ -42,6 +42,7 @@ public final class MetastoreConstants {
   // Uber dimension constants
   public static final String DIMENSION_PFX = "dimension.";
   public static final String ATTRIBUTES_LIST_SFX = ".attributes.list";
+  public static final String PARTCOLS_SFX = ".part.cols";
   public static final String TIMED_DIMENSION_SFX = ".timed.dimension";
 
   // fact constants

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/7586a83c/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
index 30253d3..bdec4e3 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
@@ -62,6 +62,10 @@ public class MetastoreUtil {
     return getDimPrefix(dimName) + ATTRIBUTES_LIST_SFX;
   }
 
+  public static final String getDimTablePartsKey(String dimtableName) {
+    return getDimensionTablePrefix(dimtableName) + PARTCOLS_SFX;
+  }
+
   public static final String getDimTimedDimensionKey(String dimName) {
     return getDimPrefix(dimName) + TIMED_DIMENSION_SFX;
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/7586a83c/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
index 90d0b6d..64dff16 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
@@ -29,6 +29,7 @@ import org.apache.lens.cube.metadata.StorageConstants;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
+import com.google.common.collect.Sets;
 import lombok.Getter;
 import lombok.Setter;
 
@@ -117,7 +118,7 @@ public class CandidateDim implements CandidateTable {
 
   @Override
   public Collection<String> getColumns() {
-    return dimtable.getAllFieldNames();
+    return Sets.union(dimtable.getAllFieldNames(), dimtable.getPartCols());
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/7586a83c/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
index bc9ef93..2c191fc 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
@@ -240,7 +240,7 @@ public class CandidateTablePruneCause {
     for (Map.Entry<String, SkipStorageCause> entry : storageCauses.entrySet()) {
       String key = entry.getKey();
       key = key.substring(0, (key.indexOf("_") + key.length() + 1) % (key.length() + 1)); // extract the storage part
-      cause.getStorageCauses().put(key, entry.getValue());
+      cause.getStorageCauses().put(key.toLowerCase(), entry.getValue());
     }
     return cause;
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/7586a83c/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index fe29d25..9ceea48 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -1490,10 +1490,6 @@ public class TestCubeMetastoreClient {
     String storageTableName = MetastoreUtil.getFactStorageTableName(cubeFact.getName(), c1);
     List<Partition> parts = client.getPartitionsByFilter(storageTableName, "dt='latest'");
     Assert.assertEquals(parts.size(), 0);
-//    Assert
-//      .assertEquals(TextInputFormat.class.getCanonicalName(), parts.get(0).getInputFormatClass().getCanonicalName());
-//    Assert.assertEquals(parts.get(0).getParameters().get(MetastoreUtil.getLatestPartTimestampKey("dt")),
-//      UpdatePeriod.HOURLY.format().format(now));
     Assert.assertEquals(client.getAllParts(storageTableName).size(), 1);
 
     client.dropPartition(cubeFact.getName(), c1, timeParts, null, UpdatePeriod.HOURLY);
@@ -1577,10 +1573,6 @@ public class TestCubeMetastoreClient {
     Assert.assertEquals(client.getAllParts(storageTableName).size(), 1);
     List<Partition> parts = client.getPartitionsByFilter(storageTableName, "dt='latest'");
     Assert.assertEquals(parts.size(), 0);
-//    Assert
-//      .assertEquals(TextInputFormat.class.getCanonicalName(), parts.get(0).getInputFormatClass().getCanonicalName());
-//    Assert.assertEquals(parts.get(0).getParameters().get(MetastoreUtil.getLatestPartTimestampKey("dt")),
-//      UpdatePeriod.HOURLY.format().format(now));
 
     client.dropPartition(cubeFactWithParts.getName(), c1, timeParts, partSpec, UpdatePeriod.HOURLY);
     Assert.assertFalse(client.factPartitionExists(cubeFactWithParts.getName(), c1, UpdatePeriod.HOURLY, timeParts,

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/7586a83c/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 62b2c95..517fe7b 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -1776,6 +1776,20 @@ public class CubeTestSetup {
     storageTables.put(c1, s1);
 
     client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
+    dimTblName = "countrytable_partitioned";
+
+    StorageTableDesc s2 = new StorageTableDesc();
+    s2.setInputFormat(TextInputFormat.class.getCanonicalName());
+    s2.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    ArrayList<FieldSchema> partCols = Lists.newArrayList();
+    partCols.add(dimColumns.remove(dimColumns.size() - 2));
+    s2.setPartCols(partCols);
+    dumpPeriods.clear();
+    dumpPeriods.put(c3, UpdatePeriod.HOURLY);
+    storageTables.clear();
+    storageTables.put(c3, s2);
+    dimProps.put(MetastoreUtil.getDimTablePartsKey(dimTblName), partCols.get(0).getName());
+    client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
   }
 
   private void createStateTable(CubeMetastoreClient client) throws Exception {
@@ -1814,6 +1828,22 @@ public class CubeTestSetup {
     storageTables.put(c1, s1);
 
     client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
+
+    // In this, country id will be a partition
+    dimTblName = "statetable_partitioned";
+
+    StorageTableDesc s2 = new StorageTableDesc();
+    s2.setInputFormat(TextInputFormat.class.getCanonicalName());
+    s2.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    partCols.add(dimColumns.remove(dimColumns.size() - 1));
+    s2.setPartCols(partCols);
+    s2.setTimePartCols(timePartCols);
+    dumpPeriods.clear();
+    dumpPeriods.put(c3, UpdatePeriod.HOURLY);
+    storageTables.clear();
+    storageTables.put(c3, s2);
+    dimProps.put(MetastoreUtil.getDimTablePartsKey(dimTblName), partCols.get(1).getName());
+    client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
   }
 
   public void createSources(HiveConf conf, String dbName) throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/7586a83c/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 6c65953..b85f60e 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -408,6 +408,38 @@ public class TestCubeRewriter extends TestQueryRewrite {
   }
 
   @Test
+  public void testPartColAsQueryColumn() throws Exception {
+    Configuration conf = getConf();
+    conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, false);
+    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C3");
+    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
+    String hql, expected;
+    hql = rewrite(
+      "select countrydim.name, msr2 from" + " testCube" + " where countrydim.region = 'asia' and "
+        + TWO_DAYS_RANGE, conf);
+    expected =
+      getExpectedQuery(cubeName, "select countrydim.name, sum(testcube.msr2)" + " FROM ", " JOIN " + getDbName()
+          + "c3_statetable_partitioned statedim ON" + " testCube.stateid = statedim.id and statedim.dt = 'latest' JOIN "
+          + getDbName()
+          + "c3_countrytable_partitioned countrydim on statedim.countryid=countrydim.id and countrydim.dt='latest'",
+        "countrydim.region='asia'",
+        " group by countrydim.name ", null,
+        getWhereForDailyAndHourly2days(cubeName, "C3_testfact"));
+    compareQueries(hql, expected);
+    hql = rewrite(
+      "select statedim.name, statedim.countryid, msr2 from" + " testCube" + " where statedim.countryid = 5 and "
+        + TWO_DAYS_RANGE, conf);
+    expected =
+      getExpectedQuery(cubeName, "select statedim.name, statedim.countryid, sum(testcube.msr2)" + " FROM ",
+        " JOIN " + getDbName()
+          + "c3_statetable_partitioned statedim ON" + " testCube.stateid = statedim.id and statedim.dt = 'latest'",
+        "statedim.countryid=5",
+        " group by statedim.name, statedim.countryid", null,
+        getWhereForDailyAndHourly2days(cubeName, "C3_testfact"));
+    compareQueries(hql, expected);
+  }
+
+  @Test
   public void testCubeJoinQuery() throws Exception {
     // q1
     String hqlQuery =
@@ -833,19 +865,19 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     Assert.assertEquals(
       pruneCauses.getBrief().substring(0, CandidateTablePruneCode.MISSING_PARTITIONS.errorFormat.length() - 3),
-        CandidateTablePruneCode.MISSING_PARTITIONS.errorFormat.substring(0,
-          CandidateTablePruneCode.MISSING_PARTITIONS.errorFormat.length() - 3));
+      CandidateTablePruneCode.MISSING_PARTITIONS.errorFormat.substring(0,
+        CandidateTablePruneCode.MISSING_PARTITIONS.errorFormat.length() - 3));
 
     Assert.assertEquals(pruneCauses.getDetails().get("testfact").iterator().next().getCause(),
       CandidateTablePruneCode.MISSING_PARTITIONS);
     Assert.assertEquals(pruneCauses.getDetails().get("testfactmonthly").iterator().next().getCause(),
       CandidateTablePruneCode.NO_FACT_UPDATE_PERIODS_FOR_GIVEN_RANGE);
     Assert.assertEquals(pruneCauses.getDetails().get("testfact2").iterator().next().getCause(),
-        CandidateTablePruneCode.MISSING_PARTITIONS);
+      CandidateTablePruneCode.MISSING_PARTITIONS);
     Assert.assertEquals(pruneCauses.getDetails().get("testfact2_raw").iterator().next().getCause(),
       CandidateTablePruneCode.MISSING_PARTITIONS);
     Assert.assertEquals(pruneCauses.getDetails().get("cheapfact").iterator().next().getCause(),
-        CandidateTablePruneCode.NO_CANDIDATE_STORAGES);
+      CandidateTablePruneCode.NO_CANDIDATE_STORAGES);
     Assert.assertEquals(pruneCauses.getDetails().get("summary1,summary2,summary3").iterator().next().getCause(),
       CandidateTablePruneCode.MISSING_PARTITIONS);
     Assert.assertEquals(pruneCauses.getDetails().get("summary4").iterator().next()
@@ -896,6 +928,13 @@ public class TestCubeRewriter extends TestQueryRewrite {
                 }
               }))
           );
+          put("statetable_partitioned", Arrays.asList(CandidateTablePruneCause.noCandidateStorages(
+              new HashMap<String, SkipStorageCause>() {
+                {
+                  put("C3_statetable_partitioned", new SkipStorageCause(SkipStorageCode.UNSUPPORTED));
+                }
+              }))
+          );
         }
       }
     ));

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/7586a83c/lens-examples/src/main/resources/product_table.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/product_table.xml b/lens-examples/src/main/resources/product_table.xml
index 4babca6..f7e804f 100644
--- a/lens-examples/src/main/resources/product_table.xml
+++ b/lens-examples/src/main/resources/product_table.xml
@@ -26,12 +26,12 @@
     <column comment="SKU_number" name="SKU_number" type="INT"/>
     <column comment="" name="description" type="STRING"/>
     <column comment="" name="color" type="STRING"/>
-    <column comment="" name="category" type="STRING"/>
     <column comment="" name="weight" type="FLOAT"/>
     <column comment="" name="manufacturer" type="STRING"/>
   </columns>
   <properties>
     <property name="dim4.prop" value="d1"/>
+    <property name="dimtable.product_table.part.cols" value="category"/>
   </properties>
   <storage_tables>
     <storage_table>
@@ -42,6 +42,7 @@
       <table_desc external="true" field_delimiter="," table_location="/tmp/examples/product">
         <part_cols>
           <column comment="Time column" name="dt" type="STRING"/>
+          <column comment="Category" name="category" type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>


[21/50] [abbrv] incubator-lens git commit: LENS-471 : Fix NPE while calculating priority (amareshwari)

Posted by am...@apache.org.
LENS-471 : Fix NPE while calculating priority (amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/511418a6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/511418a6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/511418a6

Branch: refs/heads/current-release-line
Commit: 511418a6be7d2f4e27c28531908d60a38efc19bb
Parents: 1820fe0
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Tue Mar 31 18:20:02 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Mar 31 18:20:02 2015 +0530

----------------------------------------------------------------------
 .../DurationBasedQueryPriorityDecider.java         |  5 ++++-
 .../apache/lens/driver/hive/TestHiveDriver.java    | 17 +++++++++++------
 .../api/query/DriverSelectorQueryContext.java      |  2 +-
 3 files changed, 16 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/511418a6/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/priority/DurationBasedQueryPriorityDecider.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/priority/DurationBasedQueryPriorityDecider.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/priority/DurationBasedQueryPriorityDecider.java
index c0156e5..a26ed0c 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/priority/DurationBasedQueryPriorityDecider.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/priority/DurationBasedQueryPriorityDecider.java
@@ -109,7 +109,10 @@ public class DurationBasedQueryPriorityDecider implements QueryPriorityDecider {
   private float getTableWeights(Set<String> tables, AbstractQueryContext queryContext) {
     float weight = 0;
     for (String tblName : tables) {
-      weight += queryContext.getDriverContext().getDriverRewriterPlan(driver).getTableWeight(tblName);
+      Double tblWeight = queryContext.getDriverContext().getDriverRewriterPlan(driver).getTableWeight(tblName);
+      if (tblWeight != null) {
+        weight += tblWeight;
+      }
     }
     return weight == 0 ? 1 : weight;
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/511418a6/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
index f02490b..36594e9 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
@@ -809,6 +809,7 @@ public class TestHiveDriver {
     BufferedReader br = new BufferedReader(new InputStreamReader(
       TestHiveDriver.class.getResourceAsStream("/priority_tests.data")));
     String line;
+    int i = 0;
     while ((line = br.readLine()) != null) {
       String[] kv = line.split("\\s*:\\s*");
 
@@ -834,14 +835,18 @@ public class TestHiveDriver {
             put("table1", partitions);
           }
         });
-      ctx.getDriverContext().getDriverRewriterPlan(driver).getTableWeights().putAll(
-        new HashMap<String, Double>() {
-          {
-            put("table1", 1.0);
-          }
-        });
+      if (i < 1) {
+        // table weights only for first calculation
+        ctx.getDriverContext().getDriverRewriterPlan(driver).getTableWeights().putAll(
+          new HashMap<String, Double>() {
+            {
+              put("table1", 1.0);
+            }
+          });
+      }
       Assert.assertEquals(expected, driver.queryPriorityDecider.decidePriority(ctx));
       Assert.assertEquals(Priority.NORMAL, alwaysNormalPriorityDecider.decidePriority(ctx));
+      i++;
     }
     // test priority without fact partitions
     AbstractQueryContext ctx = createContext("test priority query", conf);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/511418a6/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
index 2fd592a..44eba2b 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
@@ -346,13 +346,13 @@ public class DriverSelectorQueryContext {
   void clearTransientStateAfterLaunch() {
     for (DriverQueryContext driverCtx : driverQueryContextMap.values()) {
       driverCtx.driverQueryPlan = null;
-      driverCtx.rewriterPlan = null;
     }
   }
 
   void clearTransientStateAfterCompleted() {
     for (DriverQueryContext driverCtx : driverQueryContextMap.values()) {
       driverCtx.driverSpecificConf = null;
+      driverCtx.rewriterPlan = null;
     }
   }
 }


[32/50] [abbrv] incubator-lens git commit: LENS-428 : Honors LENS_SERVER_HEAP and LENS_CLIENT_HEAP values set (Arshad Matin via amareshwari)

Posted by am...@apache.org.
LENS-428 : Honors LENS_SERVER_HEAP and LENS_CLIENT_HEAP values set (Arshad Matin via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/07492f18
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/07492f18
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/07492f18

Branch: refs/heads/current-release-line
Commit: 07492f187f3b14a927b9e72c62c95be625221764
Parents: d597314
Author: Arshad Matin <ar...@gmail.com>
Authored: Tue Apr 7 17:46:35 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Apr 7 17:46:35 2015 +0530

----------------------------------------------------------------------
 tools/scripts/lens-config.sh | 11 ++++++++---
 1 file changed, 8 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/07492f18/tools/scripts/lens-config.sh
----------------------------------------------------------------------
diff --git a/tools/scripts/lens-config.sh b/tools/scripts/lens-config.sh
index 0e810cb..4470132 100644
--- a/tools/scripts/lens-config.sh
+++ b/tools/scripts/lens-config.sh
@@ -72,16 +72,17 @@ if [ ! -e $JAVA_BIN ] || [ ! -e $JAR_BIN ]; then
   exit 1
 fi
 
-# default the heap size to 1GB
 DEFAULT_JAVA_HEAP_MAX=-Xmx1024m
-LENS_OPTS="$DEFAULT_JAVA_HEAP_MAX $LENS_OPTS"
-
 type="$1"
 shift
 case $type in
   client)
     # set the client class path
     LENSCPPATH=$LENS_CONF:`ls ${BASEDIR}/lib/* 2>/dev/null | tr "\n" ':' 2>/dev/null`
+    if test -z "$LENS_CLIENT_HEAP"
+    then
+      LENS_CLIENT_HEAP=$DEFAULT_JAVA_HEAP_MAX
+    fi
     LENS_OPTS="$LENS_OPTS $LENS_CLIENT_OPTS $LENS_CLIENT_HEAP"
     LENS_LOG_DIR="${LENS_LOG_DIR:-$BASEDIR/logs}"
     export LENS_LOG_DIR    
@@ -89,6 +90,10 @@ case $type in
     export LENS_HOME_DIR    
   ;;
   server)
+    if test -z "$LENS_SERVER_HEAP"
+    then
+      LENS_SERVER_HEAP=$DEFAULT_JAVA_HEAP_MAX
+    fi
     LENS_OPTS="$LENS_OPTS $LENS_SERVER_OPTS $LENS_SERVER_HEAP"
     LENSCPPATH="$LENS_CONF" 
     LENS_EXPANDED_WEBAPP_DIR=${LENS_EXPANDED_WEBAPP_DIR:-${BASEDIR}/webapp}


[34/50] [abbrv] incubator-lens git commit: LENS-457 : Adds Configurability in QueryExecutionServiceImpl (Rajat Khandelwal via amareshwari)

Posted by am...@apache.org.
LENS-457 : Adds Configurability in QueryExecutionServiceImpl (Rajat Khandelwal via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/e90dc33b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/e90dc33b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/e90dc33b

Branch: refs/heads/current-release-line
Commit: e90dc33bcb54770e5a54d0067337ee52e345622a
Parents: 21102e6
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Thu Apr 9 06:39:31 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Thu Apr 9 06:39:31 2015 +0530

----------------------------------------------------------------------
 .../lens/server/api/LensConfConstants.java      |   8 +
 .../server/query/QueryExecutionServiceImpl.java |  88 +++++++----
 .../src/main/resources/lensserver-default.xml   |  15 ++
 .../lens/server/query/BlahQueryAcceptor.java    |  37 +++++
 .../lens/server/query/TestQueryService.java     |  16 ++
 lens-server/src/test/resources/lens-site.xml    |   6 +
 src/site/apt/admin/config.apt                   | 150 ++++++++++---------
 7 files changed, 220 insertions(+), 100 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e90dc33b/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java b/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
index 6326b42..07bcae6 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
@@ -55,7 +55,15 @@ public final class LensConfConstants {
    * The Constant DRIVER_CLASSES.
    */
   public static final String DRIVER_CLASSES = SERVER_PFX + "drivers";
+  /**
+   * The Constant DRIVER_SELECTOR_CLASS.
+   */
+  public static final String DRIVER_SELECTOR_CLASS = SERVER_PFX + "driver.selector.class";
+  /**
+   * The Constant ACCEPTOR_CLASSES.
+   */
 
+  public static final String ACCEPTOR_CLASSES = SERVER_PFX + "query.acceptors";
   /**
    * The Constant SERVICE_NAMES.
    */

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e90dc33b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index 3e0e0db..3cfd03b 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -126,7 +126,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
   /**
    * The accepted queries.
    */
-  private PriorityBlockingQueue<QueryContext> acceptedQueries = new PriorityBlockingQueue<QueryContext>();
+  private PriorityBlockingQueue<QueryContext> queuedQueries = new PriorityBlockingQueue<QueryContext>();
 
   /**
    * The launched queries.
@@ -262,7 +262,24 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
   /**
    * Initialize query acceptors and listeners.
    */
-  private void initializeQueryAcceptorsAndListeners() {
+  private void initializeQueryAcceptors() throws LensException {
+    String[] acceptorClasses = conf.getStrings(LensConfConstants.ACCEPTOR_CLASSES);
+    if (acceptorClasses != null) {
+      for (String acceptorClass : acceptorClasses) {
+        try {
+          Class<?> clazz = Class.forName(acceptorClass);
+          QueryAcceptor acceptor = (QueryAcceptor) clazz.newInstance();
+          LOG.info("initialized query acceptor: " + acceptor);
+          queryAcceptors.add(acceptor);
+        } catch (Exception e) {
+          LOG.warn("Could not load the acceptor:" + acceptorClass, e);
+          throw new LensException("Could not load acceptor" + acceptorClass, e);
+        }
+      }
+    }
+  }
+
+  private void initializeListeners() {
     if (conf.getBoolean(LensConfConstants.QUERY_STATE_LOGGER_ENABLED, true)) {
       getEventService().addListenerForType(new QueryStatusLogger(), StatusChange.class);
       LOG.info("Registered query state logger");
@@ -281,7 +298,6 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
    * @throws LensException the lens exception
    */
   private void loadDriversAndSelector() throws LensException {
-    conf.get(LensConfConstants.DRIVER_CLASSES);
     String[] driverClasses = conf.getStrings(LensConfConstants.DRIVER_CLASSES);
     if (driverClasses != null) {
       for (String driverClass : driverClasses) {
@@ -304,7 +320,17 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
     } else {
       throw new LensException("No drivers specified");
     }
-    driverSelector = new MinQueryCostSelector();
+    try {
+      Class<? extends DriverSelector> driverSelectorClass = conf.getClass(LensConfConstants.DRIVER_SELECTOR_CLASS,
+        MinQueryCostSelector.class,
+        DriverSelector.class);
+      LOG.info("Using driver selector class: " + driverSelectorClass.getCanonicalName());
+      driverSelector = driverSelectorClass.newInstance();
+    } catch (Exception e) {
+      throw new LensException("Couldn't instantiate driver selector class. Class name: "
+        + conf.get(LensConfConstants.DRIVER_SELECTOR_CLASS) + ". Please supply a valid value for "
+        + LensConfConstants.DRIVER_SELECTOR_CLASS);
+    }
   }
 
   protected LensEventService getEventService() {
@@ -462,7 +488,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
       LOG.info("Starting QuerySubmitter thread");
       while (!pausedForTest && !stopped && !querySubmitter.isInterrupted()) {
         try {
-          QueryContext ctx = acceptedQueries.take();
+          QueryContext ctx = queuedQueries.take();
           synchronized (ctx) {
             if (ctx.getStatus().getStatus().equals(Status.QUEUED)) {
               LOG.info("Launching query:" + ctx.getUserQuery());
@@ -608,7 +634,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
     // before would be null in case of server restart
     if (before != null) {
       if (before.getStatus().equals(Status.QUEUED)) {
-        acceptedQueries.remove(ctx);
+        queuedQueries.remove(ctx);
       } else {
         launchedQueries.remove(ctx);
       }
@@ -844,7 +870,12 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
   public synchronized void init(HiveConf hiveConf) {
     super.init(hiveConf);
     this.conf = hiveConf;
-    initializeQueryAcceptorsAndListeners();
+    try {
+      initializeQueryAcceptors();
+    } catch (LensException e) {
+      throw new IllegalStateException("Could not load acceptors");
+    }
+    initializeListeners();
     try {
       loadDriversAndSelector();
     } catch (LensException e) {
@@ -873,7 +904,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
     module.addSerializer(ColumnDescriptor.class, new JsonSerializer<ColumnDescriptor>() {
       @Override
       public void serialize(ColumnDescriptor columnDescriptor, JsonGenerator jsonGenerator,
-                            SerializerProvider serializerProvider) throws IOException, JsonProcessingException {
+        SerializerProvider serializerProvider) throws IOException, JsonProcessingException {
         jsonGenerator.writeStartObject();
         jsonGenerator.writeStringField("name", columnDescriptor.getName());
         jsonGenerator.writeStringField("comment", columnDescriptor.getComment());
@@ -997,8 +1028,8 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
   private static final String PARALLEL_CALL_GAUGE = "PARALLEL_ESTIMATE";
 
   /**
-   * Rewrite the query for each driver, and estimate query cost for the rewritten queries.
-   * Finally, select the driver using driver selector.
+   * Rewrite the query for each driver, and estimate query cost for the rewritten queries. Finally, select the driver
+   * using driver selector.
    *
    * @param ctx query context
    * @throws LensException the lens exception
@@ -1089,8 +1120,8 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
   }
 
   /**
-   * Chains driver specific rewrite and estimate of the query in a single runnable, which can be
-   * processed in a background thread
+   * Chains driver specific rewrite and estimate of the query in a single runnable, which can be processed in a
+   * background thread
    */
   public class RewriteEstimateRunnable implements Runnable {
     @Getter
@@ -1181,7 +1212,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
       String rejectionCause = acceptor.accept(query, conf, submitOp);
       if (rejectionCause != null) {
         getEventService().notifyEvent(new QueryRejected(System.currentTimeMillis(), query, rejectionCause, null));
-        throw new LensException("Query not accepted because " + cause);
+        throw new BadRequestException("Query not accepted because " + cause);
       }
     }
     getEventService().notifyEvent(new QueryAccepted(System.currentTimeMillis(), null, query, null));
@@ -1231,11 +1262,11 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
         if (resultSet == null) {
           if (ctx.isPersistent() && ctx.getQueryOutputFormatter() != null) {
             resultSets
-            .put(queryHandle,
-              new LensPersistentResult(
-                ctx.getQueryOutputFormatter().getMetadata(),
-                ctx.getQueryOutputFormatter().getFinalOutputPath(),
-                ctx.getQueryOutputFormatter().getNumRows()));
+              .put(queryHandle,
+                new LensPersistentResult(
+                  ctx.getQueryOutputFormatter().getMetadata(),
+                  ctx.getQueryOutputFormatter().getFinalOutputPath(),
+                  ctx.getQueryOutputFormatter().getNumRows()));
           } else if (allQueries.get(queryHandle).isResultAvailableInDriver()) {
             resultSet = allQueries.get(queryHandle).getSelectedDriver().fetchResultSet(allQueries.get(queryHandle));
             resultSets.put(queryHandle, resultSet);
@@ -1471,7 +1502,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
     ctx.setLensSessionIdentifier(sessionHandle.getPublicId().toString());
     QueryStatus before = ctx.getStatus();
     ctx.setStatus(new QueryStatus(0.0, QueryStatus.Status.QUEUED, "Query is queued", false, null, null));
-    acceptedQueries.add(ctx);
+    queuedQueries.add(ctx);
     allQueries.put(ctx.getQueryHandle(), ctx);
     fireStatusChangeEvent(ctx, ctx.getStatus(), before);
     LOG.info("Returning handle " + ctx.getQueryHandle().getHandleId());
@@ -2135,7 +2166,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
         switch (ctx.getStatus().getStatus()) {
         case NEW:
         case QUEUED:
-          acceptedQueries.add(ctx);
+          queuedQueries.add(ctx);
           break;
         case LAUNCHED:
         case RUNNING:
@@ -2289,7 +2320,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
 
   @Override
   public long getQueuedQueriesCount() {
-    return acceptedQueries.size();
+    return queuedQueries.size();
   }
 
   @Override
@@ -2321,6 +2352,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
       LOG.warn("Lens session went away for sessionid:" + lensSession);
       return;
     }
+
     try {
       LensSessionImpl session = getSession(sessionHandle);
       acquire(sessionHandle);
@@ -2353,7 +2385,8 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
 
   /**
    * Add session's resources to selected driver if needed
-   * @param ctx the query context
+   *
+   * @param ctx QueryContext for executinf queries
    * @throws LensException
    */
   protected void addSessionResourcesToDriver(final AbstractQueryContext ctx) {
@@ -2412,17 +2445,18 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
 
   /**
    * Add resources to hive driver, returning resources which failed to be added
-   * @param resources collection of resources intented to be added to hive driver
+   *
+   * @param resources     collection of resources intented to be added to hive driver
    * @param sessionHandle
    * @param hiveDriver
    * @return resources which could not be added to hive driver
    */
   private List<ResourceEntry> addResources(Collection<ResourceEntry> resources,
-                                                           LensSessionHandle sessionHandle,
-                                                           HiveDriver hiveDriver) {
+    LensSessionHandle sessionHandle,
+    HiveDriver hiveDriver) {
     List<ResourceEntry> failedResources = new ArrayList<ResourceEntry>();
     for (ResourceEntry res : resources) {
-      try{
+      try {
         addSingleResourceToHive(hiveDriver, res, sessionHandle);
       } catch (LensException exc) {
         failedResources.add(res);
@@ -2434,7 +2468,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
   }
 
   private void addSingleResourceToHive(HiveDriver driver, ResourceEntry res,
-                                       LensSessionHandle sessionHandle) throws LensException {
+    LensSessionHandle sessionHandle) throws LensException {
     String sessionIdentifier = sessionHandle.getPublicId().toString();
     String uri = res.getLocation();
     // Hive doesn't and URIs starting with file:/ correctly, so we have to change it to file:///

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e90dc33b/lens-server/src/main/resources/lensserver-default.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/main/resources/lensserver-default.xml b/lens-server/src/main/resources/lensserver-default.xml
index c153193..1a3f82f 100644
--- a/lens-server/src/main/resources/lensserver-default.xml
+++ b/lens-server/src/main/resources/lensserver-default.xml
@@ -29,6 +29,21 @@
   </property>
 
   <property>
+    <name>lens.server.driver.selector.class</name>
+    <value>org.apache.lens.server.api.driver.MinQueryCostSelector</value>
+    <description>Class for selecting best driver given the query context</description>
+  </property>
+
+  <property>
+    <name>lens.server.query.acceptors</name>
+    <value></value>
+    <description>Query Acceptors configured. Query acceptors are consulted first, before anything happens for the
+      given query. They can either return null or return a messaging indicating why the given query shouldn't be
+      accepted. These can be used to filter out queries at the earliest.
+    </description>
+  </property>
+
+  <property>
     <name>lens.server.servicenames</name>
     <value>session,query,metastore,scheduler,quota</value>
     <description>These services would be started in the specified order when lens-server starts up</description>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e90dc33b/lens-server/src/test/java/org/apache/lens/server/query/BlahQueryAcceptor.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/BlahQueryAcceptor.java b/lens-server/src/test/java/org/apache/lens/server/query/BlahQueryAcceptor.java
new file mode 100644
index 0000000..e51817f
--- /dev/null
+++ b/lens-server/src/test/java/org/apache/lens/server/query/BlahQueryAcceptor.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.server.query;
+
+import org.apache.lens.api.LensException;
+import org.apache.lens.api.query.SubmitOp;
+import org.apache.lens.server.api.query.QueryAcceptor;
+
+import org.apache.hadoop.conf.Configuration;
+
+public class BlahQueryAcceptor implements QueryAcceptor {
+  public static final String MSG = "Query can't start with blah";
+
+  @Override
+  public String accept(String query, Configuration conf, SubmitOp submitOp) throws LensException {
+    if (query.toLowerCase().startsWith("blah")) {
+      return MSG;
+    }
+    return null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e90dc33b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index bce0ca9..bd2aa4c 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -1516,4 +1516,20 @@ public class TestQueryService extends LensJerseyTest {
         "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-PARALLEL_ESTIMATE")),
       reg.getGauges().keySet().toString());
   }
+  @Test
+  public void testQueryRejection() throws InterruptedException, IOException {
+    final WebTarget target = target().path("queryapi/queries");
+
+    final FormDataMultiPart mp = new FormDataMultiPart();
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
+      MediaType.APPLICATION_XML_TYPE));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "blah select ID from "
+      + TEST_TABLE));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
+      MediaType.APPLICATION_XML_TYPE));
+
+    Response response = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
+    Assert.assertEquals(response.getStatus(), 400);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e90dc33b/lens-server/src/test/resources/lens-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/lens-site.xml b/lens-server/src/test/resources/lens-site.xml
index d966f47..3bcbf0e 100644
--- a/lens-server/src/test/resources/lens-site.xml
+++ b/lens-server/src/test/resources/lens-site.xml
@@ -28,6 +28,12 @@
   </property>
 
   <property>
+    <name>lens.server.query.acceptors</name>
+    <value>org.apache.lens.server.query.BlahQueryAcceptor</value>
+    <description>Query Acceptors configured</description>
+  </property>
+
+  <property>
     <name>test.lens.site.key</name>
     <value>gsvalue</value>
   </property>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e90dc33b/src/site/apt/admin/config.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/admin/config.apt b/src/site/apt/admin/config.apt
index 2462f85..0ff204b 100644
--- a/src/site/apt/admin/config.apt
+++ b/src/site/apt/admin/config.apt
@@ -63,150 +63,154 @@ Lens server configuration
 *--+--+---+--+
 |18|lens.server.domain|company.com|Server domain. This will be used in authentication and Sending emails to users. For the server, the username will be without domain and domain will come through conf.|
 *--+--+---+--+
-|19|lens.server.drivers|org.apache.lens.driver.hive.HiveDriver|Drivers enabled for this lens server instance|
+|19|lens.server.driver.selector.class|org.apache.lens.server.api.driver.MinQueryCostSelector|Class for selecting best driver given the query context|
 *--+--+---+--+
-|20|lens.server.enable.console.metrics|false|Enable metrics to be reported on console|
+|20|lens.server.drivers|org.apache.lens.driver.hive.HiveDriver|Drivers enabled for this lens server instance|
 *--+--+---+--+
-|21|lens.server.enable.csv.metrics|false|Enable metrics to be reported as csv files in a directory|
+|21|lens.server.enable.console.metrics|false|Enable metrics to be reported on console|
 *--+--+---+--+
-|22|lens.server.enable.ganglia.metrics|false|Enable metrics to be reported on ganglia|
+|22|lens.server.enable.csv.metrics|false|Enable metrics to be reported as csv files in a directory|
 *--+--+---+--+
-|23|lens.server.enable.graphite.metrics|false|Enable metrics to be reported on graphite|
+|23|lens.server.enable.ganglia.metrics|false|Enable metrics to be reported on ganglia|
 *--+--+---+--+
-|24|lens.server.enable.resource.method.metering|false|Whether to Enable metering for all resource methods.|
+|24|lens.server.enable.graphite.metrics|false|Enable metrics to be reported on graphite|
 *--+--+---+--+
-|25|lens.server.estimate.pool.keepalive.millis|60000|Thread keep alive time in milliseconds for the estimate thread pool. If there are no estimate requests for this period,then cached threads are released from the pool.|
+|25|lens.server.enable.resource.method.metering|false|Whether to Enable metering for all resource methods.|
 *--+--+---+--+
-|26|lens.server.estimate.pool.max.threads|100|Maximum number of threads in the estimate thread pool|
+|26|lens.server.estimate.pool.keepalive.millis|60000|Thread keep alive time in milliseconds for the estimate thread pool. If there are no estimate requests for this period,then cached threads are released from the pool.|
 *--+--+---+--+
-|27|lens.server.estimate.pool.min.threads|3|Minimum number of threads in the estimate thread pool|
+|27|lens.server.estimate.pool.max.threads|100|Maximum number of threads in the estimate thread pool|
 *--+--+---+--+
-|28|lens.server.estimate.timeout.millis|300000|Timeout for parallel query estimate calls in milliseconds. A driver needs to comeback with a query estimate within this timeout. If the timeout is reached, only the drivers that have provided an estimate would be considered for query selection. If the timeout is reached and none of the drivers have provided an estimate then estimate calls fails with an exception.|
+|28|lens.server.estimate.pool.min.threads|3|Minimum number of threads in the estimate thread pool|
 *--+--+---+--+
-|29|lens.server.event.service.thread.pool.size| |The size of thread pool for notifying events. The no value is specified, it uses the available processors as the number.|
+|29|lens.server.estimate.timeout.millis|300000|Timeout for parallel query estimate calls in milliseconds. A driver needs to comeback with a query estimate within this timeout. If the timeout is reached, only the drivers that have provided an estimate would be considered for query selection. If the timeout is reached and none of the drivers have provided an estimate then estimate calls fails with an exception.|
 *--+--+---+--+
-|30|lens.server.index.ws.resource.impl|org.apache.lens.server.IndexResource|Implementation class for Index Resource|
+|30|lens.server.event.service.thread.pool.size| |The size of thread pool for notifying events. The no value is specified, it uses the available processors as the number.|
 *--+--+---+--+
-|31|lens.server.mail.from.address|blah@company.com|The from field in the notifier mail to the submitter.|
+|31|lens.server.index.ws.resource.impl|org.apache.lens.server.IndexResource|Implementation class for Index Resource|
 *--+--+---+--+
-|32|lens.server.mail.host|mail-host.company.com|SMTP Host for sending mail|
+|32|lens.server.mail.from.address|blah@company.com|The from field in the notifier mail to the submitter.|
 *--+--+---+--+
-|33|lens.server.mail.port|25|SMTP Port|
+|33|lens.server.mail.host|mail-host.company.com|SMTP Host for sending mail|
 *--+--+---+--+
-|34|lens.server.mail.smtp.connectiontimeout|15000|Socket connection timeout value in milliseconds. This timeout is implemented by java.net.Socket. Default is 15 seconds.|
+|34|lens.server.mail.port|25|SMTP Port|
 *--+--+---+--+
-|35|lens.server.mail.smtp.timeout|30000|Socket read timeout value in milliseconds. This timeout is implemented by java.net.Socket. Default is 30 seconds.|
+|35|lens.server.mail.smtp.connectiontimeout|15000|Socket connection timeout value in milliseconds. This timeout is implemented by java.net.Socket. Default is 15 seconds.|
 *--+--+---+--+
-|36|lens.server.max.finished.queries|100|Maximum number of finished queries which lens server will keep in memory before purging.|
+|36|lens.server.mail.smtp.timeout|30000|Socket read timeout value in milliseconds. This timeout is implemented by java.net.Socket. Default is 30 seconds.|
 *--+--+---+--+
-|37|lens.server.metastore.service.impl|org.apache.lens.server.metastore.CubeMetastoreServiceImpl|Implementation class for metastore service|
+|37|lens.server.max.finished.queries|100|Maximum number of finished queries which lens server will keep in memory before purging.|
 *--+--+---+--+
-|38|lens.server.metastore.ws.resource.impl|org.apache.lens.server.metastore.MetastoreResource|Implementation class for Metastore Resource|
+|38|lens.server.metastore.service.impl|org.apache.lens.server.metastore.CubeMetastoreServiceImpl|Implementation class for metastore service|
 *--+--+---+--+
-|39|lens.server.metrics.csv.directory.path|metrics/|Path of the directory in which to report metrics as separate csv files.|
+|39|lens.server.metastore.ws.resource.impl|org.apache.lens.server.metastore.MetastoreResource|Implementation class for Metastore Resource|
 *--+--+---+--+
-|40|lens.server.metrics.ganglia.host| |The ganglia host name|
+|40|lens.server.metrics.csv.directory.path|metrics/|Path of the directory in which to report metrics as separate csv files.|
 *--+--+---+--+
-|41|lens.server.metrics.ganglia.port| |The ganglia port|
+|41|lens.server.metrics.ganglia.host| |The ganglia host name|
 *--+--+---+--+
-|42|lens.server.metrics.graphite.host| |The graphite host name|
+|42|lens.server.metrics.ganglia.port| |The ganglia port|
 *--+--+---+--+
-|43|lens.server.metrics.graphite.port| |The graphite port|
+|43|lens.server.metrics.graphite.host| |The graphite host name|
 *--+--+---+--+
-|44|lens.server.metrics.reporting.period|10|The reporting period for metrics. The value is in seconds|
+|44|lens.server.metrics.graphite.port| |The graphite port|
 *--+--+---+--+
-|45|lens.server.mode|OPEN|The mode in which server should run. Allowed values are OPEN, READ_ONLY, METASTORE_READONLY, METASTORE_NODROP. OPEN mode will allow all requests. READ_ONLY mode will allow all requests on session resouce and only GET requests on all other resources. METASTORE_READONLY will allow GET on metastore and all other requests in other services. METASTORE_NODROP will not allow DELETE on metastore, will allow all other requests.|
+|45|lens.server.metrics.reporting.period|10|The reporting period for metrics. The value is in seconds|
 *--+--+---+--+
-|46|lens.server.multipart.ws.feature.impl|org.glassfish.jersey.media.multipart.MultiPartFeature|Implementation class for query scheduler resource|
+|46|lens.server.mode|OPEN|The mode in which server should run. Allowed values are OPEN, READ_ONLY, METASTORE_READONLY, METASTORE_NODROP. OPEN mode will allow all requests. READ_ONLY mode will allow all requests on session resouce and only GET requests on all other resources. METASTORE_READONLY will allow GET on metastore and all other requests in other services. METASTORE_NODROP will not allow DELETE on metastore, will allow all other requests.|
 *--+--+---+--+
-|47|lens.server.persist.location|file:///tmp/lensserver|The directory in which lens server will persist its state when it is going down. The location be on any Hadoop compatible file system. Server will read from the location when it is restarted and recovery is enabled. So, Server should have both read and write permissions to the location|
+|47|lens.server.multipart.ws.feature.impl|org.glassfish.jersey.media.multipart.MultiPartFeature|Implementation class for query scheduler resource|
 *--+--+---+--+
-|48|lens.server.query.service.impl|org.apache.lens.server.query.QueryExecutionServiceImpl|Implementation class for query execution service|
+|48|lens.server.persist.location|file:///tmp/lensserver|The directory in which lens server will persist its state when it is going down. The location be on any Hadoop compatible file system. Server will read from the location when it is restarted and recovery is enabled. So, Server should have both read and write permissions to the location|
 *--+--+---+--+
-|49|lens.server.query.state.logger.enabled|true|Disable or enable the query state logger with this config. The location for the logger can be specified in log4j properties for the class org.apache.lens.server.query.QueryExecutionServiceImpl.QueryStatusLogger|
+|49|lens.server.query.acceptors| |Query Acceptors configured. Query acceptors are consulted first, before anything happens for the given query. They can either return null or return a messaging indicating why the given query shouldn't be accepted. These can be used to filter out queries at the earliest.|
 *--+--+---+--+
-|50|lens.server.query.ws.resource.impl|org.apache.lens.server.query.QueryServiceResource|Implementation class for Query Resource|
+|50|lens.server.query.service.impl|org.apache.lens.server.query.QueryExecutionServiceImpl|Implementation class for query execution service|
 *--+--+---+--+
-|51|lens.server.quota.service.impl|org.apache.lens.server.quota.QuotaServiceImpl|Implementation class for quota service|
+|51|lens.server.query.state.logger.enabled|true|Disable or enable the query state logger with this config. The location for the logger can be specified in log4j properties for the class org.apache.lens.server.query.QueryExecutionServiceImpl.QueryStatusLogger|
 *--+--+---+--+
-|52|lens.server.quota.ws.resource.impl|org.apache.lens.server.quota.QuotaResource|Implementation class for Quota Resource|
+|52|lens.server.query.ws.resource.impl|org.apache.lens.server.query.QueryServiceResource|Implementation class for Query Resource|
 *--+--+---+--+
-|53|lens.server.recover.onrestart|true|If the flag is enabled, all the services will be started from last saved state, if disabled all the services will start afresh|
+|53|lens.server.quota.service.impl|org.apache.lens.server.quota.QuotaServiceImpl|Implementation class for quota service|
 *--+--+---+--+
-|54|lens.server.restart.enabled|true|If flag is enabled, all the services will be persisted to persistent location passed.|
+|54|lens.server.quota.ws.resource.impl|org.apache.lens.server.quota.QuotaResource|Implementation class for Quota Resource|
 *--+--+---+--+
-|55|lens.server.scheduler.service.impl|org.apache.lens.server.scheduler.QuerySchedulerServiceImpl|Implementation class for query scheduler service|
+|55|lens.server.recover.onrestart|true|If the flag is enabled, all the services will be started from last saved state, if disabled all the services will start afresh|
 *--+--+---+--+
-|56|lens.server.scheduler.ws.resource.impl|org.apache.lens.server.scheduler.ScheduleResource|Implementation class for query scheduler resource|
+|56|lens.server.restart.enabled|true|If flag is enabled, all the services will be persisted to persistent location passed.|
 *--+--+---+--+
-|57|lens.server.serverMode.ws.filter.impl|org.apache.lens.server.ServerModeFilter|Implementation class for ServerMode Filter|
+|57|lens.server.scheduler.service.impl|org.apache.lens.server.scheduler.QuerySchedulerServiceImpl|Implementation class for query scheduler service|
 *--+--+---+--+
-|58|lens.server.service.provider.factory|org.apache.lens.server.ServiceProviderFactoryImpl|Service provider factory implementation class. This parameter is used to lookup the factory implementation class name that would provide an instance of ServiceProvider. Users should instantiate the class to obtain its instance. Example -- Class spfClass = conf.getClass("lens.server.service.provider.factory", null, ServiceProviderFactory.class); ServiceProviderFactory spf = spfClass.newInstance(); ServiceProvider serviceProvider = spf.getServiceProvider(); -- This is not supposed to be overridden by users.|
+|58|lens.server.scheduler.ws.resource.impl|org.apache.lens.server.scheduler.ScheduleResource|Implementation class for query scheduler resource|
 *--+--+---+--+
-|59|lens.server.servicenames|session,query,metastore,scheduler,quota|These services would be started in the specified order when lens-server starts up|
+|59|lens.server.serverMode.ws.filter.impl|org.apache.lens.server.ServerModeFilter|Implementation class for ServerMode Filter|
 *--+--+---+--+
-|60|lens.server.session.service.impl|org.apache.lens.server.session.HiveSessionService|Implementation class for session service|
+|60|lens.server.service.provider.factory|org.apache.lens.server.ServiceProviderFactoryImpl|Service provider factory implementation class. This parameter is used to lookup the factory implementation class name that would provide an instance of ServiceProvider. Users should instantiate the class to obtain its instance. Example -- Class spfClass = conf.getClass("lens.server.service.provider.factory", null, ServiceProviderFactory.class); ServiceProviderFactory spf = spfClass.newInstance(); ServiceProvider serviceProvider = spf.getServiceProvider(); -- This is not supposed to be overridden by users.|
 *--+--+---+--+
-|61|lens.server.session.timeout.seconds|86400|Lens session timeout in seconds.If there is no activity on the session for this period then the session will be closed.Default timeout is one day.|
+|61|lens.server.servicenames|session,query,metastore,scheduler,quota|These services would be started in the specified order when lens-server starts up|
 *--+--+---+--+
-|62|lens.server.session.ws.resource.impl|org.apache.lens.server.session.SessionResource|Implementation class for Session Resource|
+|62|lens.server.session.service.impl|org.apache.lens.server.session.HiveSessionService|Implementation class for session service|
 *--+--+---+--+
-|63|lens.server.snapshot.interval|300000|Snapshot interval time in miliseconds for saving lens server state.|
+|63|lens.server.session.timeout.seconds|86400|Lens session timeout in seconds.If there is no activity on the session for this period then the session will be closed.Default timeout is one day.|
 *--+--+---+--+
-|64|lens.server.state.persist.out.stream.buffer.size|1048576|Output Stream Buffer Size used in writing lens server state to file system. Size is in bytes.|
+|64|lens.server.session.ws.resource.impl|org.apache.lens.server.session.SessionResource|Implementation class for Session Resource|
 *--+--+---+--+
-|65|lens.server.statistics.db|lensstats|Database to which statistics tables are created and partitions are added.|
+|65|lens.server.snapshot.interval|300000|Snapshot interval time in miliseconds for saving lens server state.|
 *--+--+---+--+
-|66|lens.server.statistics.log.rollover.interval|3600000|Default rate which log statistics store scans for rollups in milliseconds.|
+|66|lens.server.state.persist.out.stream.buffer.size|1048576|Output Stream Buffer Size used in writing lens server state to file system. Size is in bytes.|
 *--+--+---+--+
-|67|lens.server.statistics.store.class|org.apache.lens.server.stats.store.log.LogStatisticsStore|Default implementation of class used to persist Lens Statistics.|
+|67|lens.server.statistics.db|lensstats|Database to which statistics tables are created and partitions are added.|
 *--+--+---+--+
-|68|lens.server.statistics.warehouse.dir|file:///tmp/lens/statistics/warehouse|Default top level location where stats are moved by the log statistics store.|
+|68|lens.server.statistics.log.rollover.interval|3600000|Default rate which log statistics store scans for rollups in milliseconds.|
 *--+--+---+--+
-|69|lens.server.ui.base.uri|http://0.0.0.0:19999/|The base url for the Lens UI Server|
+|69|lens.server.statistics.store.class|org.apache.lens.server.stats.store.log.LogStatisticsStore|Default implementation of class used to persist Lens Statistics.|
 *--+--+---+--+
-|70|lens.server.ui.enable|true|Bringing up the ui server is optional. By default it brings up UI server.|
+|70|lens.server.statistics.warehouse.dir|file:///tmp/lens/statistics/warehouse|Default top level location where stats are moved by the log statistics store.|
 *--+--+---+--+
-|71|lens.server.ui.enable.caching|true|Set this to false to disable static file caching in the UI server|
+|71|lens.server.ui.base.uri|http://0.0.0.0:19999/|The base url for the Lens UI Server|
 *--+--+---+--+
-|72|lens.server.ui.static.dir|webapp/lens-server/static|The base directory to server UI static files from|
+|72|lens.server.ui.enable|true|Bringing up the ui server is optional. By default it brings up UI server.|
 *--+--+---+--+
-|73|lens.server.user.resolver.custom.class|full.package.name.Classname|Required for CUSTOM user resolver. In case the provided implementations are not sufficient for user config resolver, a custom classname can be provided. Class should extend org.apache.lens.server.user.UserConfigLoader|
+|73|lens.server.ui.enable.caching|true|Set this to false to disable static file caching in the UI server|
 *--+--+---+--+
-|74|lens.server.user.resolver.db.keys|lens.session.cluster.user,mapred.job.queue.name|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loaders, the conf keys that will be loaded from database.|
+|74|lens.server.ui.static.dir|webapp/lens-server/static|The base directory to server UI static files from|
 *--+--+---+--+
-|75|lens.server.user.resolver.db.query|select clusteruser,queue from user_config_table where username=?|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loader, this query will be run with single argument = logged in user and the result columns will be assigned to lens.server.user.resolver.db.keys in order. For ldap backed database resolver, the argument to this query will be the intermediate values obtained from ldap.|
+|75|lens.server.user.resolver.custom.class|full.package.name.Classname|Required for CUSTOM user resolver. In case the provided implementations are not sufficient for user config resolver, a custom classname can be provided. Class should extend org.apache.lens.server.user.UserConfigLoader|
 *--+--+---+--+
-|76|lens.server.user.resolver.fixed.value| |Required for FIXED user resolver. when lens.server.user.resolver.type=FIXED, This will be the value cluster user will resolve to.|
+|76|lens.server.user.resolver.db.keys|lens.session.cluster.user,mapred.job.queue.name|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loaders, the conf keys that will be loaded from database.|
 *--+--+---+--+
-|77|lens.server.user.resolver.ldap.bind.dn| |Required for LDAP_BACKED_DATABASE user resolvers. ldap dn for admin binding example: CN=company-it-admin,ou=service-account,ou=company-service-account,dc=dc1,dc=com...|
+|77|lens.server.user.resolver.db.query|select clusteruser,queue from user_config_table where username=?|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loader, this query will be run with single argument = logged in user and the result columns will be assigned to lens.server.user.resolver.db.keys in order. For ldap backed database resolver, the argument to this query will be the intermediate values obtained from ldap.|
 *--+--+---+--+
-|78|lens.server.user.resolver.ldap.bind.password| |Required for LDAP_BACKED_DATABASE user resolvers. ldap password for admin binding above|
+|78|lens.server.user.resolver.fixed.value| |Required for FIXED user resolver. when lens.server.user.resolver.type=FIXED, This will be the value cluster user will resolve to.|
 *--+--+---+--+
-|79|lens.server.user.resolver.ldap.fields|department|Required for LDAP_BACKED_DATABASE user resolvers. list of fields to be obtained from ldap. These will be cached by the intermediate db.|
+|79|lens.server.user.resolver.ldap.bind.dn| |Required for LDAP_BACKED_DATABASE user resolvers. ldap dn for admin binding example: CN=company-it-admin,ou=service-account,ou=company-service-account,dc=dc1,dc=com...|
 *--+--+---+--+
-|80|lens.server.user.resolver.ldap.intermediate.db.delete.sql|delete from user_department where username=?|Required for LDAP_BACKED_DATABASE user resolvers. query to delete intermediate values from database backing ldap as cache. one argument: logged in user.|
+|80|lens.server.user.resolver.ldap.bind.password| |Required for LDAP_BACKED_DATABASE user resolvers. ldap password for admin binding above|
 *--+--+---+--+
-|81|lens.server.user.resolver.ldap.intermediate.db.insert.sql|insert into user_department (username, department, expiry) values (?, ?, ?)|Required for LDAP_BACKED_DATABASE user resolvers. query to insert intermediate values from database backing ldap as cache. arguments: first logged in user, then all intermediate values, then current time + expiration time|
+|81|lens.server.user.resolver.ldap.fields|department|Required for LDAP_BACKED_DATABASE user resolvers. list of fields to be obtained from ldap. These will be cached by the intermediate db.|
 *--+--+---+--+
-|82|lens.server.user.resolver.ldap.intermediate.db.query|select department from user_department where username=? and expiry>?|Required for LDAP_BACKED_DATABASE user resolvers. query to obtain intermediate values from database backing ldap as cache. two arguments: logged in user and current time.|
+|82|lens.server.user.resolver.ldap.intermediate.db.delete.sql|delete from user_department where username=?|Required for LDAP_BACKED_DATABASE user resolvers. query to delete intermediate values from database backing ldap as cache. one argument: logged in user.|
 *--+--+---+--+
-|83|lens.server.user.resolver.ldap.search.base| |Required for LDAP_BACKED_DATABASE user resolvers. for searching intermediate values for a user, the search keys. example: cn=users,dc=dc1,dc=dc2...|
+|83|lens.server.user.resolver.ldap.intermediate.db.insert.sql|insert into user_department (username, department, expiry) values (?, ?, ?)|Required for LDAP_BACKED_DATABASE user resolvers. query to insert intermediate values from database backing ldap as cache. arguments: first logged in user, then all intermediate values, then current time + expiration time|
 *--+--+---+--+
-|84|lens.server.user.resolver.ldap.search.filter|(&(objectClass=user)(sAMAccountName=%s))|Required for LDAP_BACKED_DATABASE user resolvers. filter pattern for ldap search|
+|84|lens.server.user.resolver.ldap.intermediate.db.query|select department from user_department where username=? and expiry>?|Required for LDAP_BACKED_DATABASE user resolvers. query to obtain intermediate values from database backing ldap as cache. two arguments: logged in user and current time.|
 *--+--+---+--+
-|85|lens.server.user.resolver.ldap.url| |Required for LDAP_BACKED_DATABASE user resolvers. ldap url to connect to.|
+|85|lens.server.user.resolver.ldap.search.base| |Required for LDAP_BACKED_DATABASE user resolvers. for searching intermediate values for a user, the search keys. example: cn=users,dc=dc1,dc=dc2...|
 *--+--+---+--+
-|86|lens.server.user.resolver.propertybased.filename|/path/to/propertyfile|Required for PROPERTYBASED user resolver. when lens.server.user.resolver.type is PROPERTYBASED, then this file will be read and parsed to determine cluster user. Each line should contain username followed by DOT followed by property full name followed by equal-to sign and followed by value. example schema of the file is: user1.lens.server.cluster.user=clusteruser1 user1.mapred.job.queue.name=queue1 *.lens.server.cluster.user=defaultclusteruser *.mapred.job.queue.name=default|
+|86|lens.server.user.resolver.ldap.search.filter|(&(objectClass=user)(sAMAccountName=%s))|Required for LDAP_BACKED_DATABASE user resolvers. filter pattern for ldap search|
 *--+--+---+--+
-|87|lens.server.user.resolver.type|FIXED|Type of user config resolver. allowed values are FIXED, PROPERTYBASED, DATABASE, LDAP_BACKED_DATABASE, CUSTOM.|
+|87|lens.server.user.resolver.ldap.url| |Required for LDAP_BACKED_DATABASE user resolvers. ldap url to connect to.|
 *--+--+---+--+
-|88|lens.server.ws.featurenames|multipart|These JAX-RS Feature(s) would be started in the specified order when lens-server starts up|
+|88|lens.server.user.resolver.propertybased.filename|/path/to/propertyfile|Required for PROPERTYBASED user resolver. when lens.server.user.resolver.type is PROPERTYBASED, then this file will be read and parsed to determine cluster user. Each line should contain username followed by DOT followed by property full name followed by equal-to sign and followed by value. example schema of the file is: user1.lens.server.cluster.user=clusteruser1 user1.mapred.job.queue.name=queue1 *.lens.server.cluster.user=defaultclusteruser *.mapred.job.queue.name=default|
 *--+--+---+--+
-|89|lens.server.ws.filternames|authentication,consistentState,serverMode|These JAX-RS filters would be started in the specified order when lens-server starts up|
+|89|lens.server.user.resolver.type|FIXED|Type of user config resolver. allowed values are FIXED, PROPERTYBASED, DATABASE, LDAP_BACKED_DATABASE, CUSTOM.|
 *--+--+---+--+
-|90|lens.server.ws.listenernames|appevent|These listeners would be called in the specified order when lens-server starts up|
+|90|lens.server.ws.featurenames|multipart|These JAX-RS Feature(s) would be started in the specified order when lens-server starts up|
 *--+--+---+--+
-|91|lens.server.ws.resourcenames|session,metastore,query,quota,scheduler,index|These JAX-RS resources would be started in the specified order when lens-server starts up|
+|91|lens.server.ws.filternames|authentication,consistentState,serverMode|These JAX-RS filters would be started in the specified order when lens-server starts up|
+*--+--+---+--+
+|92|lens.server.ws.listenernames|appevent|These listeners would be called in the specified order when lens-server starts up|
+*--+--+---+--+
+|93|lens.server.ws.resourcenames|session,metastore,query,quota,scheduler,index|These JAX-RS resources would be started in the specified order when lens-server starts up|
 *--+--+---+--+
 The configuration parameters and their default values


[37/50] [abbrv] incubator-lens git commit: LENS-353: Add the findbugs plugin in pom

Posted by am...@apache.org.
LENS-353: Add the findbugs plugin in pom


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/c20120c3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/c20120c3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/c20120c3

Branch: refs/heads/current-release-line
Commit: c20120c34f8149c4d5131dfde60143fddd468e89
Parents: 8750e72
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Thu Apr 9 14:46:14 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Thu Apr 9 14:46:14 2015 +0530

----------------------------------------------------------------------
 .../src/main/resources/findbugs-exclude.xml     | 17 +++++++++++++
 pom.xml                                         | 26 ++++++++++++++++++++
 2 files changed, 43 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/c20120c3/checkstyle/src/main/resources/findbugs-exclude.xml
----------------------------------------------------------------------
diff --git a/checkstyle/src/main/resources/findbugs-exclude.xml b/checkstyle/src/main/resources/findbugs-exclude.xml
new file mode 100644
index 0000000..f9c9d52
--- /dev/null
+++ b/checkstyle/src/main/resources/findbugs-exclude.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+       http://www.apache.org/licenses/LICENSE-2.0
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+ -->
+<FindBugsFilter>
+</FindBugsFilter>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/c20120c3/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 00bca47..7e42144 100644
--- a/pom.xml
+++ b/pom.xml
@@ -96,6 +96,8 @@
     <war.plugin.version>2.1.1</war.plugin.version>
     <license.plugin.version>2.6</license.plugin.version>
     <buildnumber.plugin.version>1.0</buildnumber.plugin.version>
+    <findbugs.plugin.version>3.0.1</findbugs.plugin.version>
+
     <!-- debian -->
     <mvn.deb.build.dir>${project.build.directory}/debian</mvn.deb.build.dir>
 
@@ -454,6 +456,11 @@
           <artifactId>maven-war-plugin</artifactId>
           <version>${war.plugin.version}</version>
         </plugin>
+        <plugin>
+          <groupId>org.codehaus.mojo</groupId>
+          <artifactId>findbugs-maven-plugin</artifactId>
+          <version>${findbugs.plugin.version}</version>
+        </plugin>
       </plugins>
     </pluginManagement>
     <plugins>
@@ -564,6 +571,25 @@
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+        <configuration>
+          <xmlOutput>true</xmlOutput>
+          <excludeFilterFile>checkstyle/src/main/resources/findbugs-exclude.xml</excludeFilterFile>
+          <failOnError>false</failOnError>
+          <skip>${skipCheck}</skip>
+        </configuration>
+        <executions>
+          <execution>
+             <id>findbugs-check</id>
+             <goals>
+               <goal>check</goal>
+             </goals>
+             <phase>verify</phase>
+           </execution>
+        </executions>
+      </plugin>
     </plugins>
 
   </build>


[39/50] [abbrv] incubator-lens git commit: LENS-474 : Add cube latestdate command on cli (Rajat Khandelwal via amareshwari)

Posted by am...@apache.org.
LENS-474 : Add cube latestdate command on cli (Rajat Khandelwal via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/127c0810
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/127c0810
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/127c0810

Branch: refs/heads/current-release-line
Commit: 127c081075178a90d32c8743b03959ab0c562ca5
Parents: f0798b1
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Fri Apr 10 06:36:17 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Fri Apr 10 06:36:17 2015 +0530

----------------------------------------------------------------------
 .../lens/cli/commands/BaseLensCommand.java      | 16 +++++++++++
 .../lens/cli/commands/LensCubeCommands.java     | 19 +++++++++++++
 .../apache/lens/cli/TestLensCubeCommands.java   | 11 ++++++--
 .../apache/lens/cli/TestLensFactCommands.java   | 28 +++++++++++++++++++-
 .../java/org/apache/lens/client/LensClient.java | 10 +++++++
 .../apache/lens/client/LensMetadataClient.java  | 10 +++++++
 6 files changed, 91 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/127c0810/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java
index dbb8b39..bf90cdc 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java
@@ -19,6 +19,9 @@
 package org.apache.lens.cli.commands;
 
 import java.io.IOException;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
 
 import org.apache.lens.client.LensClient;
 import org.apache.lens.client.LensClientSingletonWrapper;
@@ -49,6 +52,19 @@ public class BaseLensCommand {
 
   /** The is connection active. */
   protected static boolean isConnectionActive;
+  public static final String DATE_FMT = "yyyy-MM-dd'T'HH:mm:ss:SSS";
+
+  public static final ThreadLocal<DateFormat> DATE_PARSER =
+    new ThreadLocal<DateFormat>() {
+      @Override
+      protected SimpleDateFormat initialValue() {
+        return new SimpleDateFormat(DATE_FMT);
+      }
+    };
+
+  public static String formatDate(Date dt) {
+    return DATE_PARSER.get().format(dt);
+  }
 
   static {
     Runtime.getRuntime().addShutdownHook(new Thread() {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/127c0810/lens-cli/src/main/java/org/apache/lens/cli/commands/LensCubeCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensCubeCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensCubeCommands.java
index 9d835da..675e830 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensCubeCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensCubeCommands.java
@@ -20,6 +20,7 @@ package org.apache.lens.cli.commands;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.Date;
 import java.util.List;
 
 import org.apache.lens.api.APIResult;
@@ -140,4 +141,22 @@ public class LensCubeCommands extends BaseLensCommand implements CommandMarker {
 
     }
   }
+
+  /**
+   * Describe cube.
+   *
+   * @param specPair &lt;cube name, timePartition&gt;
+   * @return the string
+   */
+  @CliCommand(value = "cube latestdate", help = "cube get latest")
+  public String getLatest(
+    @CliOption(key = {"", "cube"}, mandatory = true, help = "<cube-name> <timePartition>") String specPair) {
+    Iterable<String> parts = Splitter.on(' ').trimResults().omitEmptyStrings().split(specPair);
+    String[] pair = Iterables.toArray(parts, String.class);
+    if (pair.length != 2) {
+      return "Syntax error, please try in following " + "format. cube get latest <cubeName> <timePartition>";
+    }
+    Date dt = getClient().getLatestDateOfCube(pair[0], pair[1]);
+    return dt == null ? "No Data Available" : formatDate(dt);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/127c0810/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java
index 41f2c93..8334317 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java
@@ -54,10 +54,17 @@ public class TestLensCubeCommands extends LensCliApplicationTest {
     Assert.assertFalse(cubeList.contains("sample_cube"));
     command.createCube(new File(cubeSpec.toURI()).getAbsolutePath());
     cubeList = command.showCubes();
+    Assert.assertEquals(command.getLatest("sample_cube dt"), "No Data Available");
     Assert.assertTrue(cubeList.contains("sample_cube"));
 
     testUpdateCommand(new File(cubeSpec.toURI()), command);
     command.dropCube("sample_cube");
+    try {
+      command.getLatest("sample_cube dt");
+      Assert.fail("should have failed as cube doesn't exist");
+    } catch (Exception e) {
+      //pass
+    }
     cubeList = command.showCubes();
     Assert.assertFalse(cubeList.contains("sample_cube"));
   }
@@ -83,8 +90,8 @@ public class TestLensCubeCommands extends LensCliApplicationTest {
     String xmlContent = sb.toString();
 
     xmlContent = xmlContent.replace("<property name=\"sample_cube.prop\" value=\"sample\" />\n",
-        "<property name=\"sample_cube.prop\" value=\"sample\" />"
-            + "\n<property name=\"sample_cube.prop1\" value=\"sample1\" />\n");
+      "<property name=\"sample_cube.prop\" value=\"sample\" />"
+        + "\n<property name=\"sample_cube.prop1\" value=\"sample1\" />\n");
 
     File newFile = new File("/tmp/sample_cube1.xml");
     try {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/127c0810/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
index f056bb7..244b9ec 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
@@ -19,8 +19,10 @@
 package org.apache.lens.cli;
 
 import java.io.*;
+import java.net.URISyntaxException;
 import java.net.URL;
 
+import org.apache.lens.cli.commands.LensCubeCommands;
 import org.apache.lens.cli.commands.LensFactCommands;
 import org.apache.lens.client.LensClient;
 
@@ -42,6 +44,7 @@ public class TestLensFactCommands extends LensCliApplicationTest {
 
   /** The command. */
   private static LensFactCommands command = null;
+  private static LensCubeCommands cubeCommands = null;
 
   /**
    * Test fact commands.
@@ -49,12 +52,25 @@ public class TestLensFactCommands extends LensCliApplicationTest {
    * @throws IOException
    */
   @Test
-  public void testFactCommands() throws IOException {
+  public void testFactCommands() throws IOException, URISyntaxException {
+    createSampleCube();
     addFact1Table();
     updateFact1Table();
     testFactStorageActions();
     testFactPartitionActions();
     dropFact1Table();
+    dropSampleCube();
+  }
+
+  private void createSampleCube() throws URISyntaxException {
+    URL cubeSpec = TestLensCubeCommands.class.getClassLoader().getResource("sample-cube.xml");
+    String cubeList = getCubeCommand().showCubes();
+    Assert.assertFalse(cubeList.contains("sample_cube"));
+    getCubeCommand().createCube(new File(cubeSpec.toURI()).getAbsolutePath());
+  }
+
+  private void dropSampleCube() {
+    getCubeCommand().dropCube("sample_cube");
   }
 
   private static LensFactCommands getCommand() {
@@ -66,6 +82,15 @@ public class TestLensFactCommands extends LensCliApplicationTest {
     return command;
   }
 
+  private static LensCubeCommands getCubeCommand() {
+    if (cubeCommands == null) {
+      LensClient client = new LensClient();
+      cubeCommands = new LensCubeCommands();
+      cubeCommands.setClient(client);
+    }
+    return cubeCommands;
+  }
+
   /**
    * Adds the fact1 table.
    *
@@ -218,6 +243,7 @@ public class TestLensFactCommands extends LensCliApplicationTest {
   }
 
   private void verifyAndDeletePartitions() {
+    Assert.assertEquals(getCubeCommand().getLatest("sample_cube dt"), "2014-03-27T12:00:00:000");
     String result = command.getAllPartitionsOfFact("fact1 " + FACT_LOCAL);
     Assert.assertTrue(result.contains("HOURLY"));
     String dropPartitionsStatus = command.dropAllPartitionsOfFact("fact1 " + FACT_LOCAL);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/127c0810/lens-client/src/main/java/org/apache/lens/client/LensClient.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensClient.java b/lens-client/src/main/java/org/apache/lens/client/LensClient.java
index 8f1b7e1..b5b4a90 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensClient.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensClient.java
@@ -18,6 +18,7 @@
  */
 package org.apache.lens.client;
 
+import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
 
@@ -83,6 +84,10 @@ public class LensClient {
     return conn;
   }
 
+  public Date getLatestDateOfCube(String cubeName, String timePartition) {
+    return mc.getLatestDateOfCube(cubeName, timePartition);
+  }
+
   public static class LensClientResultSetWithStats {
     private final LensClientResultSet resultSet;
     private final LensQuery query;
@@ -437,6 +442,7 @@ public class LensClient {
   public APIResult addPartitionToFact(String table, String storage, String partSpec) {
     return mc.addPartitionToFactTable(table, storage, partSpec);
   }
+
   public APIResult addPartitionsToFact(String table, String storage, String partsSpec) {
     return mc.addPartitionsToFactTable(table, storage, partsSpec);
   }
@@ -444,6 +450,7 @@ public class LensClient {
   public APIResult addPartitionToFact(String table, String storage, XPartition xp) {
     return mc.addPartitionToFactTable(table, storage, xp);
   }
+
   public APIResult addPartitionsToFact(String table, String storage, XPartitionList xpList) {
     return mc.addPartitionsToFactTable(table, storage, xpList);
   }
@@ -451,12 +458,15 @@ public class LensClient {
   public APIResult addPartitionToDim(String table, String storage, String partSpec) {
     return mc.addPartitionToDimensionTable(table, storage, partSpec);
   }
+
   public APIResult addPartitionToDim(String table, String storage, XPartition xp) {
     return mc.addPartitionToDimensionTable(table, storage, xp);
   }
+
   public APIResult addPartitionsToDim(String table, String storage, XPartitionList xpList) {
     return mc.addPartitionsToDimensionTable(table, storage, xpList);
   }
+
   public APIResult addPartitionsToDim(String table, String storage, String partsSpec) {
     return mc.addPartitionsToDimensionTable(table, storage, partsSpec);
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/127c0810/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java b/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
index ad79cf2..1e243e8 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
@@ -24,6 +24,7 @@ import java.io.InputStream;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.nio.charset.Charset;
+import java.util.Date;
 import java.util.List;
 
 import javax.ws.rs.client.Client;
@@ -39,6 +40,7 @@ import javax.xml.bind.Unmarshaller;
 
 import org.apache.lens.api.APIResult;
 import org.apache.lens.api.APIResult.Status;
+import org.apache.lens.api.DateTime;
 import org.apache.lens.api.StringList;
 import org.apache.lens.api.metastore.*;
 
@@ -924,4 +926,12 @@ public class LensMetadataClient {
       return new APIResult(Status.FAILED, "File not found");
     }
   }
+
+  public Date getLatestDateOfCube(String cubeName, String timePartition) {
+    return getMetastoreWebTarget().path("cubes").path(cubeName).path("latestdate")
+      .queryParam("timeDimension", timePartition)
+      .queryParam("sessionid", this.connection.getSessionHandle())
+      .request(MediaType.APPLICATION_XML)
+      .get(DateTime.class).getDate();
+  }
 }


[28/50] [abbrv] incubator-lens git commit: Lens-465 : Refactor ml packages. (sharad)

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/MLRunner.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLRunner.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/MLRunner.java
deleted file mode 100644
index bd50cba..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLRunner.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-import org.apache.lens.client.LensClient;
-import org.apache.lens.client.LensClientConfig;
-import org.apache.lens.client.LensMLClient;
-import org.apache.lens.ml.task.MLTask;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
-import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.mapred.TextInputFormat;
-
-public class MLRunner {
-
-  private static final Log LOG = LogFactory.getLog(MLRunner.class);
-
-  private LensMLClient mlClient;
-  private String algoName;
-  private String database;
-  private String trainTable;
-  private String trainFile;
-  private String testTable;
-  private String testFile;
-  private String outputTable;
-  private String[] features;
-  private String labelColumn;
-  private HiveConf conf;
-
-  public void init(LensMLClient mlClient, String confDir) throws Exception {
-    File dir = new File(confDir);
-    File propFile = new File(dir, "ml.properties");
-    Properties props = new Properties();
-    props.load(new FileInputStream(propFile));
-    String feat = props.getProperty("features");
-    String trainFile = confDir + File.separator + "train.data";
-    String testFile = confDir + File.separator + "test.data";
-    init(mlClient, props.getProperty("algo"), props.getProperty("database"),
-        props.getProperty("traintable"), trainFile,
-        props.getProperty("testtable"), testFile,
-        props.getProperty("outputtable"), feat.split(","),
-        props.getProperty("labelcolumn"));
-  }
-
-  public void init(LensMLClient mlClient, String algoName,
-      String database, String trainTable, String trainFile,
-      String testTable, String testFile, String outputTable, String[] features,
-      String labelColumn) {
-    this.mlClient = mlClient;
-    this.algoName = algoName;
-    this.database = database;
-    this.trainTable = trainTable;
-    this.trainFile = trainFile;
-    this.testTable = testTable;
-    this.testFile = testFile;
-    this.outputTable = outputTable;
-    this.features = features;
-    this.labelColumn = labelColumn;
-    //hive metastore settings are loaded via lens-site.xml, so loading LensClientConfig
-    //is required
-    this.conf = new HiveConf(new LensClientConfig(), MLRunner.class);
-  }
-
-  public MLTask train() throws Exception {
-    LOG.info("Starting train & eval");
-
-    createTable(trainTable, trainFile);
-    createTable(testTable, testFile);
-    MLTask.Builder taskBuilder = new MLTask.Builder();
-    taskBuilder.algorithm(algoName).hiveConf(conf).labelColumn(labelColumn).outputTable(outputTable)
-        .client(mlClient).trainingTable(trainTable).testTable(testTable);
-
-    // Add features
-    for (String feature : features) {
-      taskBuilder.addFeatureColumn(feature);
-    }
-    MLTask task = taskBuilder.build();
-    LOG.info("Created task " + task.toString());
-    task.run();
-    return task;
-  }
-
-  public void createTable(String tableName, String dataFile) throws HiveException {
-
-    File filedataFile = new File(dataFile);
-    Path dataFilePath = new Path(filedataFile.toURI());
-    Path partDir = dataFilePath.getParent();
-
-    // Create table
-    List<FieldSchema> columns = new ArrayList<FieldSchema>();
-
-    // Label is optional. Not used for unsupervised models.
-    // If present, label will be the first column, followed by features
-    if (labelColumn != null) {
-      columns.add(new FieldSchema(labelColumn, "double", "Labelled Column"));
-    }
-
-    for (String feature : features) {
-      columns.add(new FieldSchema(feature, "double", "Feature " + feature));
-    }
-
-    Table tbl = Hive.get(conf).newTable(database + "." + tableName);
-    tbl.setTableType(TableType.MANAGED_TABLE);
-    tbl.getTTable().getSd().setCols(columns);
-    // tbl.getTTable().getParameters().putAll(new HashMap<String, String>());
-    tbl.setInputFormatClass(TextInputFormat.class);
-    tbl.setSerdeParam(serdeConstants.LINE_DELIM, "\n");
-    tbl.setSerdeParam(serdeConstants.FIELD_DELIM, " ");
-
-    List<FieldSchema> partCols = new ArrayList<FieldSchema>(1);
-    partCols.add(new FieldSchema("dummy_partition_col", "string", ""));
-    tbl.setPartCols(partCols);
-
-    Hive.get(conf).dropTable(database, tableName, false, true);
-    Hive.get(conf).createTable(tbl, true);
-    LOG.info("Created table " + tableName);
-
-    // Add partition for the data file
-    AddPartitionDesc partitionDesc = new AddPartitionDesc(database, tableName,
-        false);
-    Map<String, String> partSpec = new HashMap<String, String>();
-    partSpec.put("dummy_partition_col", "dummy_val");
-    partitionDesc.addPartition(partSpec, partDir.toUri().toString());
-    Hive.get(conf).createPartitions(partitionDesc);
-    LOG.info(tableName + ": Added partition " + partDir.toUri().toString());
-  }
-
-  public static void main(String[] args) throws Exception {
-    if (args.length < 1) {
-      System.out.println("Usage: org.apache.lens.ml.MLRunner <ml-conf-dir>");
-      System.exit(-1);
-    }
-    String confDir = args[0];
-    LensMLClient client = new LensMLClient(new LensClient());
-    MLRunner runner = new MLRunner();
-    runner.init(client, confDir);
-    runner.train();
-    System.out.println("Created the Model successfully. Output Table: " + runner.outputTable);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/MLTestMetric.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLTestMetric.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/MLTestMetric.java
deleted file mode 100644
index 57adecc..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLTestMetric.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-/**
- * The Interface MLTestMetric.
- */
-public interface MLTestMetric {
-  String getName();
-
-  String getDescription();
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/MLTestReport.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLTestReport.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/MLTestReport.java
deleted file mode 100644
index 909e6df..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLTestReport.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.io.Serializable;
-import java.util.List;
-
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-import lombok.Setter;
-import lombok.ToString;
-
-/**
- * Instantiates a new ML test report.
- */
-@NoArgsConstructor
-@ToString
-public class MLTestReport implements Serializable {
-
-  /** The test table. */
-  @Getter
-  @Setter
-  private String testTable;
-
-  /** The output table. */
-  @Getter
-  @Setter
-  private String outputTable;
-
-  /** The output column. */
-  @Getter
-  @Setter
-  private String outputColumn;
-
-  /** The label column. */
-  @Getter
-  @Setter
-  private String labelColumn;
-
-  /** The feature columns. */
-  @Getter
-  @Setter
-  private List<String> featureColumns;
-
-  /** The algorithm. */
-  @Getter
-  @Setter
-  private String algorithm;
-
-  /** The model id. */
-  @Getter
-  @Setter
-  private String modelID;
-
-  /** The report id. */
-  @Getter
-  @Setter
-  private String reportID;
-
-  /** The query id. */
-  @Getter
-  @Setter
-  private String queryID;
-
-  /** The test output path. */
-  @Getter
-  @Setter
-  private String testOutputPath;
-
-  /** The prediction result column. */
-  @Getter
-  @Setter
-  private String predictionResultColumn;
-
-  /** The lens query id. */
-  @Getter
-  @Setter
-  private String lensQueryID;
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/MLUtils.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLUtils.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/MLUtils.java
deleted file mode 100644
index 2e240af..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/MLUtils.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import org.apache.lens.server.api.LensConfConstants;
-import org.apache.lens.server.api.ServiceProvider;
-import org.apache.lens.server.api.ServiceProviderFactory;
-import org.apache.lens.server.ml.MLService;
-import org.apache.lens.server.ml.MLServiceImpl;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-
-public final class MLUtils {
-  private MLUtils() {
-  }
-
-  private static final HiveConf HIVE_CONF;
-
-  static {
-    HIVE_CONF = new HiveConf();
-    // Add default config so that we know the service provider implementation
-    HIVE_CONF.addResource("lensserver-default.xml");
-    HIVE_CONF.addResource("lens-site.xml");
-  }
-
-  public static String getAlgoName(Class<? extends MLAlgo> algoClass) {
-    Algorithm annotation = algoClass.getAnnotation(Algorithm.class);
-    if (annotation != null) {
-      return annotation.name();
-    }
-    throw new IllegalArgumentException("Algo should be decorated with annotation - " + Algorithm.class.getName());
-  }
-
-  public static MLServiceImpl getMLService() throws Exception {
-    return getServiceProvider().getService(MLService.NAME);
-  }
-
-  public static ServiceProvider getServiceProvider() throws Exception {
-    Class<? extends ServiceProviderFactory> spfClass = HIVE_CONF.getClass(LensConfConstants.SERVICE_PROVIDER_FACTORY,
-      null, ServiceProviderFactory.class);
-    ServiceProviderFactory spf = spfClass.newInstance();
-    return spf.getServiceProvider();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/ModelLoader.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/ModelLoader.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/ModelLoader.java
deleted file mode 100644
index 429cbf9..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/ModelLoader.java
+++ /dev/null
@@ -1,239 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.io.IOException;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-
-import com.google.common.cache.Cache;
-import com.google.common.cache.CacheBuilder;
-
-/**
- * Load ML models from a FS location.
- */
-public final class ModelLoader {
-  private ModelLoader() {
-  }
-
-  /** The Constant MODEL_PATH_BASE_DIR. */
-  public static final String MODEL_PATH_BASE_DIR = "lens.ml.model.basedir";
-
-  /** The Constant MODEL_PATH_BASE_DIR_DEFAULT. */
-  public static final String MODEL_PATH_BASE_DIR_DEFAULT = "file:///tmp";
-
-  /** The Constant LOG. */
-  public static final Log LOG = LogFactory.getLog(ModelLoader.class);
-
-  /** The Constant TEST_REPORT_BASE_DIR. */
-  public static final String TEST_REPORT_BASE_DIR = "lens.ml.test.basedir";
-
-  /** The Constant TEST_REPORT_BASE_DIR_DEFAULT. */
-  public static final String TEST_REPORT_BASE_DIR_DEFAULT = "file:///tmp/ml_reports";
-
-  // Model cache settings
-  /** The Constant MODEL_CACHE_SIZE. */
-  public static final long MODEL_CACHE_SIZE = 10;
-
-  /** The Constant MODEL_CACHE_TIMEOUT. */
-  public static final long MODEL_CACHE_TIMEOUT = 3600000L; // one hour
-
-  /** The model cache. */
-  private static Cache<Path, MLModel> modelCache = CacheBuilder.newBuilder().maximumSize(MODEL_CACHE_SIZE)
-    .expireAfterAccess(MODEL_CACHE_TIMEOUT, TimeUnit.MILLISECONDS).build();
-
-  /**
-   * Gets the model location.
-   *
-   * @param conf      the conf
-   * @param algorithm the algorithm
-   * @param modelID   the model id
-   * @return the model location
-   */
-  public static Path getModelLocation(Configuration conf, String algorithm, String modelID) {
-    String modelDataBaseDir = conf.get(MODEL_PATH_BASE_DIR, MODEL_PATH_BASE_DIR_DEFAULT);
-    // Model location format - <modelDataBaseDir>/<algorithm>/modelID
-    return new Path(new Path(new Path(modelDataBaseDir), algorithm), modelID);
-  }
-
-  /**
-   * Load model.
-   *
-   * @param conf      the conf
-   * @param algorithm the algorithm
-   * @param modelID   the model id
-   * @return the ML model
-   * @throws IOException Signals that an I/O exception has occurred.
-   */
-  public static MLModel loadModel(Configuration conf, String algorithm, String modelID) throws IOException {
-    final Path modelPath = getModelLocation(conf, algorithm, modelID);
-    LOG.info("Loading model for algorithm: " + algorithm + " modelID: " + modelID + " At path: "
-      + modelPath.toUri().toString());
-    try {
-      return modelCache.get(modelPath, new Callable<MLModel>() {
-        @Override
-        public MLModel call() throws Exception {
-          FileSystem fs = modelPath.getFileSystem(new HiveConf());
-          if (!fs.exists(modelPath)) {
-            throw new IOException("Model path not found " + modelPath.toString());
-          }
-
-          ObjectInputStream ois = null;
-          try {
-            ois = new ObjectInputStream(fs.open(modelPath));
-            MLModel model = (MLModel) ois.readObject();
-            LOG.info("Loaded model " + model.getId() + " from location " + modelPath);
-            return model;
-          } catch (ClassNotFoundException e) {
-            throw new IOException(e);
-          } finally {
-            IOUtils.closeQuietly(ois);
-          }
-        }
-      });
-    } catch (ExecutionException exc) {
-      throw new IOException(exc);
-    }
-  }
-
-  /**
-   * Clear cache.
-   */
-  public static void clearCache() {
-    modelCache.cleanUp();
-  }
-
-  /**
-   * Gets the test report path.
-   *
-   * @param conf      the conf
-   * @param algorithm the algorithm
-   * @param report    the report
-   * @return the test report path
-   */
-  public static Path getTestReportPath(Configuration conf, String algorithm, String report) {
-    String testReportDir = conf.get(TEST_REPORT_BASE_DIR, TEST_REPORT_BASE_DIR_DEFAULT);
-    return new Path(new Path(testReportDir, algorithm), report);
-  }
-
-  /**
-   * Save test report.
-   *
-   * @param conf   the conf
-   * @param report the report
-   * @throws IOException Signals that an I/O exception has occurred.
-   */
-  public static void saveTestReport(Configuration conf, MLTestReport report) throws IOException {
-    Path reportDir = new Path(conf.get(TEST_REPORT_BASE_DIR, TEST_REPORT_BASE_DIR_DEFAULT));
-    FileSystem fs = reportDir.getFileSystem(conf);
-
-    if (!fs.exists(reportDir)) {
-      LOG.info("Creating test report dir " + reportDir.toUri().toString());
-      fs.mkdirs(reportDir);
-    }
-
-    Path algoDir = new Path(reportDir, report.getAlgorithm());
-
-    if (!fs.exists(algoDir)) {
-      LOG.info("Creating algorithm report dir " + algoDir.toUri().toString());
-      fs.mkdirs(algoDir);
-    }
-
-    ObjectOutputStream reportOutputStream = null;
-    Path reportSaveLocation;
-    try {
-      reportSaveLocation = new Path(algoDir, report.getReportID());
-      reportOutputStream = new ObjectOutputStream(fs.create(reportSaveLocation));
-      reportOutputStream.writeObject(report);
-      reportOutputStream.flush();
-    } catch (IOException ioexc) {
-      LOG.error("Error saving test report " + report.getReportID(), ioexc);
-      throw ioexc;
-    } finally {
-      IOUtils.closeQuietly(reportOutputStream);
-    }
-    LOG.info("Saved report " + report.getReportID() + " at location " + reportSaveLocation.toUri());
-  }
-
-  /**
-   * Load report.
-   *
-   * @param conf      the conf
-   * @param algorithm the algorithm
-   * @param reportID  the report id
-   * @return the ML test report
-   * @throws IOException Signals that an I/O exception has occurred.
-   */
-  public static MLTestReport loadReport(Configuration conf, String algorithm, String reportID) throws IOException {
-    Path reportLocation = getTestReportPath(conf, algorithm, reportID);
-    FileSystem fs = reportLocation.getFileSystem(conf);
-    ObjectInputStream reportStream = null;
-    MLTestReport report = null;
-
-    try {
-      reportStream = new ObjectInputStream(fs.open(reportLocation));
-      report = (MLTestReport) reportStream.readObject();
-    } catch (IOException ioex) {
-      LOG.error("Error reading report " + reportLocation, ioex);
-    } catch (ClassNotFoundException e) {
-      throw new IOException(e);
-    } finally {
-      IOUtils.closeQuietly(reportStream);
-    }
-    return report;
-  }
-
-  /**
-   * Delete model.
-   *
-   * @param conf      the conf
-   * @param algorithm the algorithm
-   * @param modelID   the model id
-   * @throws IOException Signals that an I/O exception has occurred.
-   */
-  public static void deleteModel(HiveConf conf, String algorithm, String modelID) throws IOException {
-    Path modelLocation = getModelLocation(conf, algorithm, modelID);
-    FileSystem fs = modelLocation.getFileSystem(conf);
-    fs.delete(modelLocation, false);
-  }
-
-  /**
-   * Delete test report.
-   *
-   * @param conf      the conf
-   * @param algorithm the algorithm
-   * @param reportID  the report id
-   * @throws IOException Signals that an I/O exception has occurred.
-   */
-  public static void deleteTestReport(HiveConf conf, String algorithm, String reportID) throws IOException {
-    Path reportPath = getTestReportPath(conf, algorithm, reportID);
-    reportPath.getFileSystem(conf).delete(reportPath, false);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/MultiPrediction.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/MultiPrediction.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/MultiPrediction.java
deleted file mode 100644
index 4794c97..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/MultiPrediction.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.util.List;
-
-/**
- * The Interface MultiPrediction.
- */
-public interface MultiPrediction {
-  List<LabelledPrediction> getPredictions();
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/QueryRunner.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/QueryRunner.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/QueryRunner.java
deleted file mode 100644
index 56f9a88..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/QueryRunner.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import org.apache.lens.api.LensException;
-import org.apache.lens.api.LensSessionHandle;
-import org.apache.lens.api.query.QueryHandle;
-
-import lombok.Getter;
-import lombok.Setter;
-
-/**
- * Run a query against a Lens server.
- */
-public abstract class QueryRunner {
-
-  /** The session handle. */
-  protected final LensSessionHandle sessionHandle;
-
-  @Getter @Setter
-  protected String queryName;
-
-  /**
-   * Instantiates a new query runner.
-   *
-   * @param sessionHandle the session handle
-   */
-  public QueryRunner(LensSessionHandle sessionHandle) {
-    this.sessionHandle = sessionHandle;
-  }
-
-  /**
-   * Run query.
-   *
-   * @param query the query
-   * @return the query handle
-   * @throws LensException the lens exception
-   */
-  public abstract QueryHandle runQuery(String query) throws LensException;
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/TableTestingSpec.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/TableTestingSpec.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/TableTestingSpec.java
deleted file mode 100644
index f7fb1f8..0000000
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/TableTestingSpec.java
+++ /dev/null
@@ -1,325 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.ml;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-
-import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.Table;
-
-import lombok.Getter;
-
-/**
- * Table specification for running test on a table.
- */
-public class TableTestingSpec {
-
-  /** The Constant LOG. */
-  public static final Log LOG = LogFactory.getLog(TableTestingSpec.class);
-
-  /** The db. */
-  private String db;
-
-  /** The table containing input data. */
-  private String inputTable;
-
-  // TODO use partition condition
-  /** The partition filter. */
-  private String partitionFilter;
-
-  /** The feature columns. */
-  private List<String> featureColumns;
-
-  /** The label column. */
-  private String labelColumn;
-
-  /** The output column. */
-  private String outputColumn;
-
-  /** The output table. */
-  private String outputTable;
-
-  /** The conf. */
-  private transient HiveConf conf;
-
-  /** The algorithm. */
-  private String algorithm;
-
-  /** The model id. */
-  private String modelID;
-
-  @Getter
-  private boolean outputTableExists;
-
-  @Getter
-  private String testID;
-
-  private HashMap<String, FieldSchema> columnNameToFieldSchema;
-
-  /**
-   * The Class TableTestingSpecBuilder.
-   */
-  public static class TableTestingSpecBuilder {
-
-    /** The spec. */
-    private final TableTestingSpec spec;
-
-    /**
-     * Instantiates a new table testing spec builder.
-     */
-    public TableTestingSpecBuilder() {
-      spec = new TableTestingSpec();
-    }
-
-    /**
-     * Database.
-     *
-     * @param database the database
-     * @return the table testing spec builder
-     */
-    public TableTestingSpecBuilder database(String database) {
-      spec.db = database;
-      return this;
-    }
-
-    /**
-     * Set the input table
-     *
-     * @param table the table
-     * @return the table testing spec builder
-     */
-    public TableTestingSpecBuilder inputTable(String table) {
-      spec.inputTable = table;
-      return this;
-    }
-
-    /**
-     * Partition filter for input table
-     *
-     * @param partFilter the part filter
-     * @return the table testing spec builder
-     */
-    public TableTestingSpecBuilder partitionFilter(String partFilter) {
-      spec.partitionFilter = partFilter;
-      return this;
-    }
-
-    /**
-     * Feature columns.
-     *
-     * @param featureColumns the feature columns
-     * @return the table testing spec builder
-     */
-    public TableTestingSpecBuilder featureColumns(List<String> featureColumns) {
-      spec.featureColumns = featureColumns;
-      return this;
-    }
-
-    /**
-     * Labe column.
-     *
-     * @param labelColumn the label column
-     * @return the table testing spec builder
-     */
-    public TableTestingSpecBuilder lableColumn(String labelColumn) {
-      spec.labelColumn = labelColumn;
-      return this;
-    }
-
-    /**
-     * Output column.
-     *
-     * @param outputColumn the output column
-     * @return the table testing spec builder
-     */
-    public TableTestingSpecBuilder outputColumn(String outputColumn) {
-      spec.outputColumn = outputColumn;
-      return this;
-    }
-
-    /**
-     * Output table.
-     *
-     * @param table the table
-     * @return the table testing spec builder
-     */
-    public TableTestingSpecBuilder outputTable(String table) {
-      spec.outputTable = table;
-      return this;
-    }
-
-    /**
-     * Hive conf.
-     *
-     * @param conf the conf
-     * @return the table testing spec builder
-     */
-    public TableTestingSpecBuilder hiveConf(HiveConf conf) {
-      spec.conf = conf;
-      return this;
-    }
-
-    /**
-     * Algorithm.
-     *
-     * @param algorithm the algorithm
-     * @return the table testing spec builder
-     */
-    public TableTestingSpecBuilder algorithm(String algorithm) {
-      spec.algorithm = algorithm;
-      return this;
-    }
-
-    /**
-     * Model id.
-     *
-     * @param modelID the model id
-     * @return the table testing spec builder
-     */
-    public TableTestingSpecBuilder modelID(String modelID) {
-      spec.modelID = modelID;
-      return this;
-    }
-
-    /**
-     * Builds the.
-     *
-     * @return the table testing spec
-     */
-    public TableTestingSpec build() {
-      return spec;
-    }
-
-    /**
-     * Set the unique test id
-     *
-     * @param testID
-     * @return
-     */
-    public TableTestingSpecBuilder testID(String testID) {
-      spec.testID = testID;
-      return this;
-    }
-  }
-
-  /**
-   * New builder.
-   *
-   * @return the table testing spec builder
-   */
-  public static TableTestingSpecBuilder newBuilder() {
-    return new TableTestingSpecBuilder();
-  }
-
-  /**
-   * Validate.
-   *
-   * @return true, if successful
-   */
-  public boolean validate() {
-    List<FieldSchema> columns;
-    try {
-      Hive metastoreClient = Hive.get(conf);
-      Table tbl = (db == null) ? metastoreClient.getTable(inputTable) : metastoreClient.getTable(db, inputTable);
-      columns = tbl.getAllCols();
-      columnNameToFieldSchema = new HashMap<String, FieldSchema>();
-
-      for (FieldSchema fieldSchema : columns) {
-        columnNameToFieldSchema.put(fieldSchema.getName(), fieldSchema);
-      }
-
-      // Check if output table exists
-      Table outTbl = metastoreClient.getTable(db == null ? "default" : db, outputTable, false);
-      outputTableExists = (outTbl != null);
-    } catch (HiveException exc) {
-      LOG.error("Error getting table info " + toString(), exc);
-      return false;
-    }
-
-    // Check if labeled column and feature columns are contained in the table
-    List<String> testTableColumns = new ArrayList<String>(columns.size());
-    for (FieldSchema column : columns) {
-      testTableColumns.add(column.getName());
-    }
-
-    if (!testTableColumns.containsAll(featureColumns)) {
-      LOG.info("Invalid feature columns: " + featureColumns + ". Actual columns in table:" + testTableColumns);
-      return false;
-    }
-
-    if (!testTableColumns.contains(labelColumn)) {
-      LOG.info("Invalid label column: " + labelColumn + ". Actual columns in table:" + testTableColumns);
-      return false;
-    }
-
-    if (StringUtils.isBlank(outputColumn)) {
-      LOG.info("Output column is required");
-      return false;
-    }
-
-    if (StringUtils.isBlank(outputTable)) {
-      LOG.info("Output table is required");
-      return false;
-    }
-    return true;
-  }
-
-  public String getTestQuery() {
-    if (!validate()) {
-      return null;
-    }
-
-    // We always insert a dynamic partition
-    StringBuilder q = new StringBuilder("INSERT OVERWRITE TABLE " + outputTable + " PARTITION (part_testid='" + testID
-      + "')  SELECT ");
-    String featureCols = StringUtils.join(featureColumns, ",");
-    q.append(featureCols).append(",").append(labelColumn).append(", ").append("predict(").append("'").append(algorithm)
-      .append("', ").append("'").append(modelID).append("', ").append(featureCols).append(") ").append(outputColumn)
-      .append(" FROM ").append(inputTable);
-
-    return q.toString();
-  }
-
-  public String getCreateOutputTableQuery() {
-    StringBuilder createTableQuery = new StringBuilder("CREATE TABLE IF NOT EXISTS ").append(outputTable).append("(");
-    // Output table contains feature columns, label column, output column
-    List<String> outputTableColumns = new ArrayList<String>();
-    for (String featureCol : featureColumns) {
-      outputTableColumns.add(featureCol + " " + columnNameToFieldSchema.get(featureCol).getType());
-    }
-
-    outputTableColumns.add(labelColumn + " " + columnNameToFieldSchema.get(labelColumn).getType());
-    outputTableColumns.add(outputColumn + " string");
-
-    createTableQuery.append(StringUtils.join(outputTableColumns, ", "));
-
-    // Append partition column
-    createTableQuery.append(") PARTITIONED BY (part_testid string)");
-
-    return createTableQuery.toString();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/AlgoParam.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/AlgoParam.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/AlgoParam.java
new file mode 100644
index 0000000..e0d13c0
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/AlgoParam.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.api;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * The Interface AlgoParam.
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.FIELD)
+public @interface AlgoParam {
+
+  /**
+   * Name.
+   *
+   * @return the string
+   */
+  String name();
+
+  /**
+   * Help.
+   *
+   * @return the string
+   */
+  String help();
+
+  /**
+   * Default value.
+   *
+   * @return the string
+   */
+  String defaultValue() default "None";
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/Algorithm.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/Algorithm.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/Algorithm.java
new file mode 100644
index 0000000..29bde29
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/Algorithm.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.api;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * The Interface Algorithm.
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface Algorithm {
+
+  /**
+   * Name.
+   *
+   * @return the string
+   */
+  String name();
+
+  /**
+   * Description.
+   *
+   * @return the string
+   */
+  String description();
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/MLAlgo.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/MLAlgo.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/MLAlgo.java
new file mode 100644
index 0000000..44b0043
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/MLAlgo.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.api;
+
+import org.apache.lens.api.LensConf;
+import org.apache.lens.api.LensException;
+
+/**
+ * The Interface MLAlgo.
+ */
+public interface MLAlgo {
+  String getName();
+
+  String getDescription();
+
+  /**
+   * Configure.
+   *
+   * @param configuration the configuration
+   */
+  void configure(LensConf configuration);
+
+  LensConf getConf();
+
+  /**
+   * Train.
+   *
+   * @param conf    the conf
+   * @param db      the db
+   * @param table   the table
+   * @param modelId the model id
+   * @param params  the params
+   * @return the ML model
+   * @throws LensException the lens exception
+   */
+  MLModel train(LensConf conf, String db, String table, String modelId, String... params) throws LensException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/MLDriver.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/MLDriver.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/MLDriver.java
new file mode 100644
index 0000000..1aa699d
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/MLDriver.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.api;
+
+import java.util.List;
+
+import org.apache.lens.api.LensConf;
+import org.apache.lens.api.LensException;
+
+/**
+ * The Interface MLDriver.
+ */
+public interface MLDriver {
+
+  /**
+   * Checks if is algo supported.
+   *
+   * @param algo the algo
+   * @return true, if is algo supported
+   */
+  boolean isAlgoSupported(String algo);
+
+  /**
+   * Gets the algo instance.
+   *
+   * @param algo the algo
+   * @return the algo instance
+   * @throws LensException the lens exception
+   */
+  MLAlgo getAlgoInstance(String algo) throws LensException;
+
+  /**
+   * Inits the.
+   *
+   * @param conf the conf
+   * @throws LensException the lens exception
+   */
+  void init(LensConf conf) throws LensException;
+
+  /**
+   * Start.
+   *
+   * @throws LensException the lens exception
+   */
+  void start() throws LensException;
+
+  /**
+   * Stop.
+   *
+   * @throws LensException the lens exception
+   */
+  void stop() throws LensException;
+
+  List<String> getAlgoNames();
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/MLModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/MLModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/MLModel.java
new file mode 100644
index 0000000..73717ac
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/MLModel.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.api;
+
+import java.io.Serializable;
+import java.util.Date;
+import java.util.List;
+
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+import lombok.ToString;
+
+/**
+ * Instantiates a new ML model.
+ */
+@NoArgsConstructor
+@ToString
+public abstract class MLModel<PREDICTION> implements Serializable {
+
+  /** The id. */
+  @Getter
+  @Setter
+  private String id;
+
+  /** The created at. */
+  @Getter
+  @Setter
+  private Date createdAt;
+
+  /** The algo name. */
+  @Getter
+  @Setter
+  private String algoName;
+
+  /** The table. */
+  @Getter
+  @Setter
+  private String table;
+
+  /** The params. */
+  @Getter
+  @Setter
+  private List<String> params;
+
+  /** The label column. */
+  @Getter
+  @Setter
+  private String labelColumn;
+
+  /** The feature columns. */
+  @Getter
+  @Setter
+  private List<String> featureColumns;
+
+  /**
+   * Predict.
+   *
+   * @param args the args
+   * @return the prediction
+   */
+  public abstract PREDICTION predict(Object... args);
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/AlgoArgParser.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/AlgoArgParser.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/AlgoArgParser.java
new file mode 100644
index 0000000..51979d8
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/AlgoArgParser.java
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.lib;
+
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lens.ml.algo.api.AlgoParam;
+import org.apache.lens.ml.algo.api.MLAlgo;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * The Class AlgoArgParser.
+ */
+public final class AlgoArgParser {
+  private AlgoArgParser() {
+  }
+
+  /**
+   * The Class CustomArgParser.
+   *
+   * @param <E> the element type
+   */
+  public abstract static class CustomArgParser<E> {
+
+    /**
+     * Parses the.
+     *
+     * @param value the value
+     * @return the e
+     */
+    public abstract E parse(String value);
+  }
+
+  /** The Constant LOG. */
+  public static final Log LOG = LogFactory.getLog(AlgoArgParser.class);
+
+  /**
+   * Extracts feature names. If the algo has any parameters associated with @AlgoParam annotation, those are set
+   * as well.
+   *
+   * @param algo the algo
+   * @param args    the args
+   * @return List of feature column names.
+   */
+  public static List<String> parseArgs(MLAlgo algo, String[] args) {
+    List<String> featureColumns = new ArrayList<String>();
+    Class<? extends MLAlgo> algoClass = algo.getClass();
+    // Get param fields
+    Map<String, Field> fieldMap = new HashMap<String, Field>();
+
+    for (Field fld : algoClass.getDeclaredFields()) {
+      fld.setAccessible(true);
+      AlgoParam paramAnnotation = fld.getAnnotation(AlgoParam.class);
+      if (paramAnnotation != null) {
+        fieldMap.put(paramAnnotation.name(), fld);
+      }
+    }
+
+    for (int i = 0; i < args.length; i += 2) {
+      String key = args[i].trim();
+      String value = args[i + 1].trim();
+
+      try {
+        if ("feature".equalsIgnoreCase(key)) {
+          featureColumns.add(value);
+        } else if (fieldMap.containsKey(key)) {
+          Field f = fieldMap.get(key);
+          if (String.class.equals(f.getType())) {
+            f.set(algo, value);
+          } else if (Integer.TYPE.equals(f.getType())) {
+            f.setInt(algo, Integer.parseInt(value));
+          } else if (Double.TYPE.equals(f.getType())) {
+            f.setDouble(algo, Double.parseDouble(value));
+          } else if (Long.TYPE.equals(f.getType())) {
+            f.setLong(algo, Long.parseLong(value));
+          } else {
+            // check if the algo provides a deserializer for this param
+            String customParserClass = algo.getConf().getProperties().get("lens.ml.args." + key);
+            if (customParserClass != null) {
+              Class<? extends CustomArgParser<?>> clz = (Class<? extends CustomArgParser<?>>) Class
+                .forName(customParserClass);
+              CustomArgParser<?> parser = clz.newInstance();
+              f.set(algo, parser.parse(value));
+            } else {
+              LOG.warn("Ignored param " + key + "=" + value + " as no parser found");
+            }
+          }
+        }
+      } catch (Exception exc) {
+        LOG.error("Error while setting param " + key + " to " + value + " for algo " + algo);
+      }
+    }
+    return featureColumns;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/Algorithms.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/Algorithms.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/Algorithms.java
new file mode 100644
index 0000000..a2fd94b
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/Algorithms.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.lib;
+
+import java.lang.reflect.Constructor;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lens.api.LensException;
+import org.apache.lens.ml.algo.api.Algorithm;
+import org.apache.lens.ml.algo.api.MLAlgo;
+
+/**
+ * The Class Algorithms.
+ */
+public class Algorithms {
+
+  /** The algorithm classes. */
+  private final Map<String, Class<? extends MLAlgo>> algorithmClasses
+    = new HashMap<String, Class<? extends MLAlgo>>();
+
+  /**
+   * Register.
+   *
+   * @param algoClass the algo class
+   */
+  public void register(Class<? extends MLAlgo> algoClass) {
+    if (algoClass != null && algoClass.getAnnotation(Algorithm.class) != null) {
+      algorithmClasses.put(algoClass.getAnnotation(Algorithm.class).name(), algoClass);
+    } else {
+      throw new IllegalArgumentException("Not a valid algorithm class: " + algoClass);
+    }
+  }
+
+  /**
+   * Gets the algo for name.
+   *
+   * @param name the name
+   * @return the algo for name
+   * @throws LensException the lens exception
+   */
+  public MLAlgo getAlgoForName(String name) throws LensException {
+    Class<? extends MLAlgo> algoClass = algorithmClasses.get(name);
+    if (algoClass == null) {
+      return null;
+    }
+    Algorithm algoAnnotation = algoClass.getAnnotation(Algorithm.class);
+    String description = algoAnnotation.description();
+    try {
+      Constructor<? extends MLAlgo> algoConstructor = algoClass.getConstructor(String.class, String.class);
+      return algoConstructor.newInstance(name, description);
+    } catch (Exception exc) {
+      throw new LensException("Unable to get algo: " + name, exc);
+    }
+  }
+
+  /**
+   * Checks if is algo supported.
+   *
+   * @param name the name
+   * @return true, if is algo supported
+   */
+  public boolean isAlgoSupported(String name) {
+    return algorithmClasses.containsKey(name);
+  }
+
+  public List<String> getAlgorithmNames() {
+    return new ArrayList<String>(algorithmClasses.keySet());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/ClassifierBaseModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/ClassifierBaseModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/ClassifierBaseModel.java
new file mode 100644
index 0000000..a960a4a
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/ClassifierBaseModel.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.lib;
+
+import org.apache.lens.ml.algo.api.MLModel;
+
+/**
+ * Return a single double value as a prediction. This is useful in classifiers where the classifier returns a single
+ * class label as a prediction.
+ */
+public abstract class ClassifierBaseModel extends MLModel<Double> {
+
+  /**
+   * Gets the feature vector.
+   *
+   * @param args the args
+   * @return the feature vector
+   */
+  public final double[] getFeatureVector(Object[] args) {
+    double[] features = new double[args.length];
+    for (int i = 0; i < args.length; i++) {
+      if (args[i] instanceof Double) {
+        features[i] = (Double) args[i];
+      } else if (args[i] instanceof String) {
+        features[i] = Double.parseDouble((String) args[i]);
+      } else {
+        features[i] = 0.0;
+      }
+    }
+    return features;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/ForecastingModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/ForecastingModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/ForecastingModel.java
new file mode 100644
index 0000000..16a6180
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/ForecastingModel.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.lib;
+
+import java.util.List;
+
+import org.apache.lens.ml.algo.api.MLModel;
+
+/**
+ * The Class ForecastingModel.
+ */
+public class ForecastingModel extends MLModel<MultiPrediction> {
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.MLModel#predict(java.lang.Object[])
+   */
+  @Override
+  public MultiPrediction predict(Object... args) {
+    return new ForecastingPredictions(null);
+  }
+
+  /**
+   * The Class ForecastingPredictions.
+   */
+  public static class ForecastingPredictions implements MultiPrediction {
+
+    /** The values. */
+    private final List<LabelledPrediction> values;
+
+    /**
+     * Instantiates a new forecasting predictions.
+     *
+     * @param values the values
+     */
+    public ForecastingPredictions(List<LabelledPrediction> values) {
+      this.values = values;
+    }
+
+    @Override
+    public List<LabelledPrediction> getPredictions() {
+      return values;
+    }
+  }
+
+  /**
+   * The Class ForecastingLabel.
+   */
+  public static class ForecastingLabel implements LabelledPrediction<Long, Double> {
+
+    /** The timestamp. */
+    private final Long timestamp;
+
+    /** The value. */
+    private final double value;
+
+    /**
+     * Instantiates a new forecasting label.
+     *
+     * @param timestamp the timestamp
+     * @param value     the value
+     */
+    public ForecastingLabel(long timestamp, double value) {
+      this.timestamp = timestamp;
+      this.value = value;
+    }
+
+    @Override
+    public Long getLabel() {
+      return timestamp;
+    }
+
+    @Override
+    public Double getPrediction() {
+      return value;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/LabelledPrediction.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/LabelledPrediction.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/LabelledPrediction.java
new file mode 100644
index 0000000..9c7737e
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/LabelledPrediction.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.lib;
+
+/**
+ * Prediction type used when the model prediction is of complex types. For example, in forecasting the predictions are a
+ * series of timestamp, and value pairs.
+ *
+ * @param <LABELTYPE>      the generic type
+ * @param <PREDICTIONTYPE> the generic type
+ */
+public interface LabelledPrediction<LABELTYPE, PREDICTIONTYPE> {
+  LABELTYPE getLabel();
+
+  PREDICTIONTYPE getPrediction();
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/MultiPrediction.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/MultiPrediction.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/MultiPrediction.java
new file mode 100644
index 0000000..e910a92
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/MultiPrediction.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.lib;
+
+import java.util.List;
+
+/**
+ * The Interface MultiPrediction.
+ */
+public interface MultiPrediction {
+  List<LabelledPrediction> getPredictions();
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/BaseSparkAlgo.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/BaseSparkAlgo.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/BaseSparkAlgo.java
new file mode 100644
index 0000000..4012085
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/BaseSparkAlgo.java
@@ -0,0 +1,287 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark;
+
+import java.lang.reflect.Field;
+import java.util.*;
+
+import org.apache.lens.api.LensConf;
+import org.apache.lens.api.LensException;
+import org.apache.lens.ml.algo.api.AlgoParam;
+import org.apache.lens.ml.algo.api.Algorithm;
+import org.apache.lens.ml.algo.api.MLAlgo;
+import org.apache.lens.ml.algo.api.MLModel;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.mllib.regression.LabeledPoint;
+import org.apache.spark.rdd.RDD;
+
+/**
+ * The Class BaseSparkAlgo.
+ */
+public abstract class BaseSparkAlgo implements MLAlgo {
+
+  /** The Constant LOG. */
+  public static final Log LOG = LogFactory.getLog(BaseSparkAlgo.class);
+
+  /** The name. */
+  private final String name;
+
+  /** The description. */
+  private final String description;
+
+  /** The spark context. */
+  protected JavaSparkContext sparkContext;
+
+  /** The params. */
+  protected Map<String, String> params;
+
+  /** The conf. */
+  protected transient LensConf conf;
+
+  /** The training fraction. */
+  @AlgoParam(name = "trainingFraction", help = "% of dataset to be used for training", defaultValue = "0")
+  protected double trainingFraction;
+
+  /** The use training fraction. */
+  private boolean useTrainingFraction;
+
+  /** The label. */
+  @AlgoParam(name = "label", help = "Name of column which is used as a training label for supervised learning")
+  protected String label;
+
+  /** The partition filter. */
+  @AlgoParam(name = "partition", help = "Partition filter used to create create HCatInputFormats")
+  protected String partitionFilter;
+
+  /** The features. */
+  @AlgoParam(name = "feature", help = "Column name(s) which are to be used as sample features")
+  protected List<String> features;
+
+  /**
+   * Instantiates a new base spark algo.
+   *
+   * @param name        the name
+   * @param description the description
+   */
+  public BaseSparkAlgo(String name, String description) {
+    this.name = name;
+    this.description = description;
+  }
+
+  public void setSparkContext(JavaSparkContext sparkContext) {
+    this.sparkContext = sparkContext;
+  }
+
+  @Override
+  public LensConf getConf() {
+    return conf;
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.MLAlgo#configure(org.apache.lens.api.LensConf)
+   */
+  @Override
+  public void configure(LensConf configuration) {
+    this.conf = configuration;
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.MLAlgo#train(org.apache.lens.api.LensConf, java.lang.String, java.lang.String,
+   * java.lang.String, java.lang.String[])
+   */
+  @Override
+  public MLModel<?> train(LensConf conf, String db, String table, String modelId, String... params)
+    throws LensException {
+    parseParams(params);
+
+    TableTrainingSpec.TableTrainingSpecBuilder builder = TableTrainingSpec.newBuilder().hiveConf(toHiveConf(conf))
+      .database(db).table(table).partitionFilter(partitionFilter).featureColumns(features).labelColumn(label);
+
+    if (useTrainingFraction) {
+      builder.trainingFraction(trainingFraction);
+    }
+
+    TableTrainingSpec spec = builder.build();
+    LOG.info("Training " + " with " + features.size() + " features");
+
+    spec.createRDDs(sparkContext);
+
+    RDD<LabeledPoint> trainingRDD = spec.getTrainingRDD();
+    BaseSparkClassificationModel<?> model = trainInternal(modelId, trainingRDD);
+    model.setTable(table);
+    model.setParams(Arrays.asList(params));
+    model.setLabelColumn(label);
+    model.setFeatureColumns(features);
+    return model;
+  }
+
+  /**
+   * To hive conf.
+   *
+   * @param conf the conf
+   * @return the hive conf
+   */
+  protected HiveConf toHiveConf(LensConf conf) {
+    HiveConf hiveConf = new HiveConf();
+    for (String key : conf.getProperties().keySet()) {
+      hiveConf.set(key, conf.getProperties().get(key));
+    }
+    return hiveConf;
+  }
+
+  /**
+   * Parses the params.
+   *
+   * @param args the args
+   */
+  public void parseParams(String[] args) {
+    if (args.length % 2 != 0) {
+      throw new IllegalArgumentException("Invalid number of params " + args.length);
+    }
+
+    params = new LinkedHashMap<String, String>();
+
+    for (int i = 0; i < args.length; i += 2) {
+      if ("f".equalsIgnoreCase(args[i]) || "feature".equalsIgnoreCase(args[i])) {
+        if (features == null) {
+          features = new ArrayList<String>();
+        }
+        features.add(args[i + 1]);
+      } else if ("l".equalsIgnoreCase(args[i]) || "label".equalsIgnoreCase(args[i])) {
+        label = args[i + 1];
+      } else {
+        params.put(args[i].replaceAll("\\-+", ""), args[i + 1]);
+      }
+    }
+
+    if (params.containsKey("trainingFraction")) {
+      // Get training Fraction
+      String trainingFractionStr = params.get("trainingFraction");
+      try {
+        trainingFraction = Double.parseDouble(trainingFractionStr);
+        useTrainingFraction = true;
+      } catch (NumberFormatException nfe) {
+        throw new IllegalArgumentException("Invalid training fraction", nfe);
+      }
+    }
+
+    if (params.containsKey("partition") || params.containsKey("p")) {
+      partitionFilter = params.containsKey("partition") ? params.get("partition") : params.get("p");
+    }
+
+    parseAlgoParams(params);
+  }
+
+  /**
+   * Gets the param value.
+   *
+   * @param param      the param
+   * @param defaultVal the default val
+   * @return the param value
+   */
+  public double getParamValue(String param, double defaultVal) {
+    if (params.containsKey(param)) {
+      try {
+        return Double.parseDouble(params.get(param));
+      } catch (NumberFormatException nfe) {
+        LOG.warn("Couldn't parse param value: " + param + " as double.");
+      }
+    }
+    return defaultVal;
+  }
+
+  /**
+   * Gets the param value.
+   *
+   * @param param      the param
+   * @param defaultVal the default val
+   * @return the param value
+   */
+  public int getParamValue(String param, int defaultVal) {
+    if (params.containsKey(param)) {
+      try {
+        return Integer.parseInt(params.get(param));
+      } catch (NumberFormatException nfe) {
+        LOG.warn("Couldn't parse param value: " + param + " as integer.");
+      }
+    }
+    return defaultVal;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public String getDescription() {
+    return description;
+  }
+
+  public Map<String, String> getArgUsage() {
+    Map<String, String> usage = new LinkedHashMap<String, String>();
+    Class<?> clz = this.getClass();
+    // Put class name and description as well as part of the usage
+    Algorithm algorithm = clz.getAnnotation(Algorithm.class);
+    if (algorithm != null) {
+      usage.put("Algorithm Name", algorithm.name());
+      usage.put("Algorithm Description", algorithm.description());
+    }
+
+    // Get all algo params including base algo params
+    while (clz != null) {
+      for (Field field : clz.getDeclaredFields()) {
+        AlgoParam param = field.getAnnotation(AlgoParam.class);
+        if (param != null) {
+          usage.put("[param] " + param.name(), param.help() + " Default Value = " + param.defaultValue());
+        }
+      }
+
+      if (clz.equals(BaseSparkAlgo.class)) {
+        break;
+      }
+      clz = clz.getSuperclass();
+    }
+    return usage;
+  }
+
+  /**
+   * Parses the algo params.
+   *
+   * @param params the params
+   */
+  public abstract void parseAlgoParams(Map<String, String> params);
+
+  /**
+   * Train internal.
+   *
+   * @param modelId     the model id
+   * @param trainingRDD the training rdd
+   * @return the base spark classification model
+   * @throws LensException the lens exception
+   */
+  protected abstract BaseSparkClassificationModel trainInternal(String modelId, RDD<LabeledPoint> trainingRDD)
+    throws LensException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/BaseSparkClassificationModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/BaseSparkClassificationModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/BaseSparkClassificationModel.java
new file mode 100644
index 0000000..806dc1f
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/BaseSparkClassificationModel.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark;
+
+import org.apache.lens.ml.algo.lib.ClassifierBaseModel;
+
+import org.apache.spark.mllib.classification.ClassificationModel;
+import org.apache.spark.mllib.linalg.Vectors;
+
+/**
+ * The Class BaseSparkClassificationModel.
+ *
+ * @param <MODEL> the generic type
+ */
+public class BaseSparkClassificationModel<MODEL extends ClassificationModel> extends ClassifierBaseModel {
+
+  /** The model id. */
+  private final String modelId;
+
+  /** The spark model. */
+  private final MODEL sparkModel;
+
+  /**
+   * Instantiates a new base spark classification model.
+   *
+   * @param modelId the model id
+   * @param model   the model
+   */
+  public BaseSparkClassificationModel(String modelId, MODEL model) {
+    this.modelId = modelId;
+    this.sparkModel = model;
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.MLModel#predict(java.lang.Object[])
+   */
+  @Override
+  public Double predict(Object... args) {
+    return sparkModel.predict(Vectors.dense(getFeatureVector(args)));
+  }
+
+  @Override
+  public String getId() {
+    return modelId;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/ColumnFeatureFunction.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/ColumnFeatureFunction.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/ColumnFeatureFunction.java
new file mode 100644
index 0000000..d75efc0
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/ColumnFeatureFunction.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark;
+
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hive.hcatalog.data.HCatRecord;
+import org.apache.log4j.Logger;
+import org.apache.spark.mllib.linalg.Vectors;
+import org.apache.spark.mllib.regression.LabeledPoint;
+
+import com.google.common.base.Preconditions;
+import scala.Tuple2;
+
+/**
+ * A feature function that directly maps an HCatRecord to a feature vector. Each column becomes a feature in the vector,
+ * with the value of the feature obtained using the value mapper for that column
+ */
+public class ColumnFeatureFunction extends FeatureFunction {
+
+  /** The Constant LOG. */
+  public static final Logger LOG = Logger.getLogger(ColumnFeatureFunction.class);
+
+  /** The feature value mappers. */
+  private final FeatureValueMapper[] featureValueMappers;
+
+  /** The feature positions. */
+  private final int[] featurePositions;
+
+  /** The label column pos. */
+  private final int labelColumnPos;
+
+  /** The num features. */
+  private final int numFeatures;
+
+  /** The default labeled point. */
+  private final LabeledPoint defaultLabeledPoint;
+
+  /**
+   * Feature positions and value mappers are parallel arrays. featurePositions[i] gives the position of ith feature in
+   * the HCatRecord, and valueMappers[i] gives the value mapper used to map that feature to a Double value
+   *
+   * @param featurePositions position number of feature column in the HCatRecord
+   * @param valueMappers     mapper for each column position
+   * @param labelColumnPos   position of the label column
+   * @param numFeatures      number of features in the feature vector
+   * @param defaultLabel     default lable to be used for null records
+   */
+  public ColumnFeatureFunction(int[] featurePositions, FeatureValueMapper[] valueMappers, int labelColumnPos,
+    int numFeatures, double defaultLabel) {
+    Preconditions.checkNotNull(valueMappers, "Value mappers argument is required");
+    Preconditions.checkNotNull(featurePositions, "Feature positions are required");
+    Preconditions.checkArgument(valueMappers.length == featurePositions.length,
+      "Mismatch between number of value mappers and feature positions");
+
+    this.featurePositions = featurePositions;
+    this.featureValueMappers = valueMappers;
+    this.labelColumnPos = labelColumnPos;
+    this.numFeatures = numFeatures;
+    defaultLabeledPoint = new LabeledPoint(defaultLabel, Vectors.dense(new double[numFeatures]));
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.spark.FeatureFunction#call(scala.Tuple2)
+   */
+  @Override
+  public LabeledPoint call(Tuple2<WritableComparable, HCatRecord> tuple) throws Exception {
+    HCatRecord record = tuple._2();
+
+    if (record == null) {
+      LOG.info("@@@ Null record");
+      return defaultLabeledPoint;
+    }
+
+    double[] features = new double[numFeatures];
+
+    for (int i = 0; i < numFeatures; i++) {
+      int featurePos = featurePositions[i];
+      features[i] = featureValueMappers[i].call(record.get(featurePos));
+    }
+
+    double label = featureValueMappers[labelColumnPos].call(record.get(labelColumnPos));
+    return new LabeledPoint(label, Vectors.dense(features));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/DoubleValueMapper.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/DoubleValueMapper.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/DoubleValueMapper.java
new file mode 100644
index 0000000..15ba9ea
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/DoubleValueMapper.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark;
+
+/**
+ * Directly return input when it is known to be double.
+ */
+public class DoubleValueMapper extends FeatureValueMapper {
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.spark.FeatureValueMapper#call(java.lang.Object)
+   */
+  @Override
+  public final Double call(Object input) {
+    if (input instanceof Double || input == null) {
+      return input == null ? Double.valueOf(0d) : (Double) input;
+    }
+
+    throw new IllegalArgumentException("Invalid input expecting only doubles, but got " + input);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/FeatureFunction.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/FeatureFunction.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/FeatureFunction.java
new file mode 100644
index 0000000..5e2ab49
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/FeatureFunction.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark;
+
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hive.hcatalog.data.HCatRecord;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.mllib.regression.LabeledPoint;
+
+import scala.Tuple2;
+
+/**
+ * Function to map an HCatRecord to a feature vector usable by MLLib.
+ */
+public abstract class FeatureFunction implements Function<Tuple2<WritableComparable, HCatRecord>, LabeledPoint> {
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.spark.api.java.function.Function#call(java.lang.Object)
+   */
+  @Override
+  public abstract LabeledPoint call(Tuple2<WritableComparable, HCatRecord> tuple) throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/FeatureValueMapper.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/FeatureValueMapper.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/FeatureValueMapper.java
new file mode 100644
index 0000000..28c8787
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/FeatureValueMapper.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark;
+
+import java.io.Serializable;
+
+import org.apache.spark.api.java.function.Function;
+
+/**
+ * Map a feature value to a Double value usable by MLLib.
+ */
+public abstract class FeatureValueMapper implements Function<Object, Double>, Serializable {
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.spark.api.java.function.Function#call(java.lang.Object)
+   */
+  public abstract Double call(Object input);
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/HiveTableRDD.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/HiveTableRDD.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/HiveTableRDD.java
new file mode 100644
index 0000000..4960e1e
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/HiveTableRDD.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hive.hcatalog.data.HCatRecord;
+import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+
+/**
+ * Create a JavaRDD based on a Hive table using HCatInputFormat.
+ */
+public final class HiveTableRDD {
+  private HiveTableRDD() {
+  }
+
+  public static final Log LOG = LogFactory.getLog(HiveTableRDD.class);
+
+  /**
+   * Creates the hive table rdd.
+   *
+   * @param javaSparkContext the java spark context
+   * @param conf             the conf
+   * @param db               the db
+   * @param table            the table
+   * @param partitionFilter  the partition filter
+   * @return the java pair rdd
+   * @throws IOException Signals that an I/O exception has occurred.
+   */
+  public static JavaPairRDD<WritableComparable, HCatRecord> createHiveTableRDD(JavaSparkContext javaSparkContext,
+    Configuration conf, String db, String table, String partitionFilter) throws IOException {
+
+    HCatInputFormat.setInput(conf, db, table, partitionFilter);
+
+    JavaPairRDD<WritableComparable, HCatRecord> rdd = javaSparkContext.newAPIHadoopRDD(conf,
+      HCatInputFormat.class, // Input
+      WritableComparable.class, // input key class
+      HCatRecord.class); // input value class
+    return rdd;
+  }
+}


[14/50] [abbrv] incubator-lens git commit: LENS-468 : Connection leak in JDBCDriver.estimate path

Posted by am...@apache.org.
LENS-468 : Connection leak in JDBCDriver.estimate path


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/56c58130
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/56c58130
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/56c58130

Branch: refs/heads/current-release-line
Commit: 56c5813024759dfa4c143f1a940442390360f55b
Parents: adf47a6
Author: jdhok <ja...@inmobi.com>
Authored: Mon Mar 30 16:21:51 2015 +0530
Committer: jdhok <ja...@inmobi.com>
Committed: Mon Mar 30 16:21:51 2015 +0530

----------------------------------------------------------------------
 .../org/apache/lens/driver/jdbc/JDBCDriver.java | 30 +++++++++++++-------
 .../apache/lens/driver/jdbc/TestJdbcDriver.java | 19 +++++++++++++
 2 files changed, 39 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/56c58130/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
index 121b56b..92f7b96 100644
--- a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
@@ -636,13 +636,9 @@ public class JDBCDriver implements LensDriver {
         DEFAULT_JDBC_VALIDATE_THROUGH_PREPARE);
     if (validateThroughPrepare) {
       PreparedStatement stmt = null;
-      try {
-        // Estimate queries need to get connection from estimate pool to make sure
-        // we are not blocked by data queries.
-        stmt = prepareInternal(pContext, getEstimateConnection(), true, "validate-");
-      } catch (SQLException e) {
-        throw new LensException(e);
-      }
+      // Estimate queries need to get connection from estimate pool to make sure
+      // we are not blocked by data queries.
+      stmt = prepareInternal(pContext, true, true, "validate-");
       if (stmt != null) {
         try {
           stmt.close();
@@ -728,12 +724,23 @@ public class JDBCDriver implements LensDriver {
       throw new NullPointerException("Null driver query for " + pContext.getUserQuery());
     }
     checkConfigured();
-    return prepareInternal(pContext, getConnection(), false, "prepare-");
+    return prepareInternal(pContext, false, false, "prepare-");
   }
 
 
-  private PreparedStatement prepareInternal(AbstractQueryContext pContext, final Connection conn,
-                                            boolean checkConfigured, String metricCallStack) throws LensException {
+  /**
+   * Prepare statment on the database server
+   * @param pContext query context
+   * @param calledForEstimate set this to true if this call will use the estimate connection pool
+   * @param checkConfigured set this to true if this call needs to check whether JDBC driver is configured
+   * @param metricCallStack stack for metrics API
+   * @return prepared statement
+   * @throws LensException
+   */
+  private PreparedStatement prepareInternal(AbstractQueryContext pContext,
+                                            boolean calledForEstimate,
+                                            boolean checkConfigured,
+                                            String metricCallStack) throws LensException {
     // Caller might have already verified configured status and driver query, so we don't have
     // to do this check twice. Caller must set checkConfigured to false in that case.
     if (checkConfigured) {
@@ -750,8 +757,11 @@ public class JDBCDriver implements LensDriver {
     sqlRewriteGauge.markSuccess();
     MethodMetricsContext jdbcPrepareGauge = MethodMetricsFactory.createMethodGauge(pContext.getDriverConf(this), true,
       metricCallStack + JDBC_PREPARE_GAUGE);
+
     PreparedStatement stmt = null;
+    Connection conn = null;
     try {
+      conn = calledForEstimate ? getEstimateConnection() : getConnection();
       stmt = conn.prepareStatement(rewrittenQuery);
       if (stmt.getWarnings() != null) {
         throw new LensException(stmt.getWarnings());

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/56c58130/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
index 550a468..09bc6a5 100644
--- a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
@@ -265,6 +265,25 @@ public class TestJdbcDriver {
     Assert.assertEquals(cost.getEstimatedExecTimeMillis(), 0);
     Assert.assertEquals(cost.getEstimatedResourceUsage(), 0.0);
     Assert.assertNotNull(ctx.getFinalDriverQuery(driver));
+
+    // Test connection leak for estimate
+    final int maxEstimateConnections =
+      driver.getEstimateConnectionConf().getInt(JDBCDriverConfConstants.JDBC_POOL_MAX_SIZE, 50);
+    for (int i = 0; i < maxEstimateConnections + 10; i++) {
+      try {
+        LOG.info("Iteration#" + (i + 1));
+        String query = i > maxEstimateConnections ? "SELECT * FROM estimate_test" : "CREATE TABLE FOO(ID INT)";
+        ExplainQueryContext context = createExplainContext(query, baseConf);
+        cost = driver.estimate(context);
+      } catch (LensException exc) {
+        Throwable th = exc.getCause();
+        while (th != null) {
+          assertFalse(th instanceof SQLException);
+          th = th.getCause();
+        }
+      }
+    }
+
   }
 
   /**


[05/50] [abbrv] incubator-lens git commit: LENS-334 : Use different port in tests than 10000 (Raju Bairishetti via amareshwari)

Posted by am...@apache.org.
LENS-334 : Use different port in tests than 10000 (Raju Bairishetti via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/46cf8153
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/46cf8153
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/46cf8153

Branch: refs/heads/current-release-line
Commit: 46cf8153dc30cefd4addc2c7f302b5ad53a57bea
Parents: 057872b
Author: Raju Bairishetti <ra...@gmail.com>
Authored: Wed Mar 25 12:52:50 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Mar 25 12:52:50 2015 +0530

----------------------------------------------------------------------
 .../src/test/java/org/apache/lens/cli/LensCliApplicationTest.java  | 2 +-
 lens-cli/src/test/resources/lens-client-site.xml                   | 2 +-
 .../src/test/java/org/apache/lens/client/TestLensClient.java       | 2 +-
 lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java     | 2 +-
 4 files changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/46cf8153/lens-cli/src/test/java/org/apache/lens/cli/LensCliApplicationTest.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/LensCliApplicationTest.java b/lens-cli/src/test/java/org/apache/lens/cli/LensCliApplicationTest.java
index dca19d4..2de0cad 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/LensCliApplicationTest.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/LensCliApplicationTest.java
@@ -34,7 +34,7 @@ public class LensCliApplicationTest extends LensAllApplicationJerseyTest {
 
   @Override
   protected int getTestPort() {
-    return 10001;
+    return 10057;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/46cf8153/lens-cli/src/test/resources/lens-client-site.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/lens-client-site.xml b/lens-cli/src/test/resources/lens-client-site.xml
index 2f13700..e500bda 100644
--- a/lens-cli/src/test/resources/lens-client-site.xml
+++ b/lens-cli/src/test/resources/lens-client-site.xml
@@ -25,7 +25,7 @@
 
   <property>
     <name>lens.server.base.url</name>
-    <value>http://localhost:10001/lensapi</value>
+    <value>http://localhost:10057/lensapi</value>
     <description>The base url for the lens server</description>
   </property>
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/46cf8153/lens-client/src/test/java/org/apache/lens/client/TestLensClient.java
----------------------------------------------------------------------
diff --git a/lens-client/src/test/java/org/apache/lens/client/TestLensClient.java b/lens-client/src/test/java/org/apache/lens/client/TestLensClient.java
index fd29466..81a536e 100644
--- a/lens-client/src/test/java/org/apache/lens/client/TestLensClient.java
+++ b/lens-client/src/test/java/org/apache/lens/client/TestLensClient.java
@@ -41,7 +41,7 @@ public class TestLensClient extends LensAllApplicationJerseyTest {
 
   @Override
   protected int getTestPort() {
-    return 10000;
+    return 10056;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/46cf8153/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
index d928ff1..d7f2f8f 100644
--- a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
+++ b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
@@ -58,7 +58,7 @@ public class TestMLRunner extends LensJerseyTest {
 
   @Override
   protected int getTestPort() {
-    return 10000;
+    return 10058;
   }
 
   @Override


[50/50] [abbrv] incubator-lens git commit: Merge branch 'current-release-line' of https://git-wip-us.apache.org/repos/asf/incubator-lens

Posted by am...@apache.org.
Merge branch 'current-release-line' of https://git-wip-us.apache.org/repos/asf/incubator-lens

Conflicts:
	checkstyle/pom.xml
	lens-api/pom.xml
	lens-cli/pom.xml
	lens-client/pom.xml
	lens-cube/pom.xml
	lens-dist/pom.xml
	lens-dist/src/main/assembly/bin-dist.xml
	lens-docker/lens-test/Dockerfile
	lens-driver-hive/pom.xml
	lens-driver-jdbc/pom.xml
	lens-examples/pom.xml
	lens-ml-dist/LICENSE.txt
	lens-ml-dist/src/main/assembly/ml-dist.xml
	lens-ml-dist/src/site/apt/index.apt
	lens-ml-lib/pom.xml
	lens-ml-lib/src/main/java/org/apache/lens/client/LensMLClient.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/AlgoArgParser.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/AlgoParam.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/MLAlgo.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/MLTrainer.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/TrainerArgParser.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/TrainerParam.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/AlgoParam.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/algo/api/MLAlgo.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/algo/lib/AlgoArgParser.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/BaseSparkAlgo.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/SparkMLDriver.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/dt/DecisionTreeAlgo.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/kmeans/KMeansAlgo.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/lr/LogisticRegressionAlgo.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/nb/NaiveBayesAlgo.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/svm/SVMAlgo.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/impl/LensMLImpl.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/BaseSparkAlgo.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/DecisionTreeAlgo.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/KMeansAlgo.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/LogisticRegressionAlgo.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/NaiveBayesAlgo.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/spark/algos/SVMAlgo.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/spark/trainers/BaseSparkTrainer.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/spark/trainers/DecisionTreeTrainer.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/spark/trainers/KMeansTrainer.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/spark/trainers/LogisticRegressionTrainer.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/spark/trainers/NaiveBayesTrainer.java
	lens-ml-lib/src/main/java/org/apache/lens/ml/spark/trainers/SVMTrainer.java
	lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLResource.java
	lens-query-lib/pom.xml
	lens-regression/pom.xml
	lens-server-api/pom.xml
	lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
	lens-server/pom.xml
	lens-server/src/main/java/org/apache/lens/server/LensServices.java
	lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
	lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
	lens-storage-db/pom.xml
	pom.xml
	src/site/apt/admin/config.apt
	src/site/apt/admin/monitoring.apt
	tools/conf-pseudo-distr/server/lens-site.xml


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/b10d15be
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/b10d15be
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/b10d15be

Branch: refs/heads/current-release-line
Commit: b10d15be5340edd01ab4227fffd7ed6646bbb9da
Parents: 28dacf6 f90a94d
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Wed Apr 15 14:46:42 2015 -0500
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Apr 15 14:46:42 2015 -0500

----------------------------------------------------------------------

----------------------------------------------------------------------



[49/50] [abbrv] incubator-lens git commit: LENS-441 : Fix checkstyle issues in tests (Rajat Khandelwal via amareshwari)

Posted by am...@apache.org.
LENS-441 : Fix checkstyle issues in tests (Rajat Khandelwal via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/28dacf6b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/28dacf6b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/28dacf6b

Branch: refs/heads/current-release-line
Commit: 28dacf6b840fd1e3bedf936c5aa96c7091f4f6f7
Parents: 309f62c
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Wed Apr 15 07:10:22 2015 -0500
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Apr 15 07:10:22 2015 -0500

----------------------------------------------------------------------
 .../java/org/apache/lens/cli/TestLensDimensionTableCommands.java   | 2 ++
 .../src/test/java/org/apache/lens/cli/TestLensFactCommands.java    | 2 ++
 2 files changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/28dacf6b/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java
index 5d8d453..3cbeed1 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java
@@ -125,11 +125,13 @@ public class TestLensDimensionTableCommands extends LensCliApplicationTest {
       Assert.assertEquals(command.showDimensionTables("blah"), dimList);
       Assert.fail();
     } catch (NotFoundException e) {
+      LOG.info("blah is not a table", e);
     }
     try {
       Assert.assertEquals(command.showDimensionTables("dim_table2"), dimList);
       Assert.fail();
     } catch (NotFoundException e) {
+      LOG.info("dim_table2 is a table, but not a dimension", e);
     }
     Assert.assertTrue(dimList.contains(tableName), "dim_table table should be found");
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/28dacf6b/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
index b906f18..02f5ac7 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
@@ -117,11 +117,13 @@ public class TestLensFactCommands extends LensCliApplicationTest {
       Assert.assertEquals(command.showFacts("blah"), factList);
       Assert.fail();
     } catch (NotFoundException e) {
+      LOG.info("blah is not a table", e);
     }
     try {
       Assert.assertEquals(command.showFacts("fact1"), factList);
       Assert.fail();
     } catch (NotFoundException e) {
+      LOG.info("fact1 is a table, but not a cube table", e);
     }
     Assert.assertEquals("fact1", factList, "Fact1 table should be found");
   }


[13/50] [abbrv] incubator-lens git commit: LENS-349 : Hive driver jars should get refreshed with db switches

Posted by am...@apache.org.
LENS-349 : Hive driver jars should get refreshed with db switches


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/adf47a64
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/adf47a64
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/adf47a64

Branch: refs/heads/current-release-line
Commit: adf47a647d80f1b5861aa95141e631beed461f79
Parents: 42ffb4e
Author: jdhok <ja...@inmobi.com>
Authored: Mon Mar 30 16:12:18 2015 +0530
Committer: jdhok <ja...@inmobi.com>
Committed: Mon Mar 30 16:12:18 2015 +0530

----------------------------------------------------------------------
 .../org/apache/lens/client/TestLensClient.java  |   7 +-
 .../org/apache/lens/driver/hive/HiveDriver.java | 117 +++++++++------
 .../apache/lens/driver/hive/TestHiveDriver.java |   6 +-
 lens-driver-jdbc/testdata/DatabaseJarSerde.java |  42 +-----
 .../server/api/query/AbstractQueryContext.java  |  19 +++
 .../server/query/QueryExecutionServiceImpl.java | 150 ++++++++++---------
 .../lens/server/session/LensSessionImpl.java    |  75 +++++++++-
 .../org/apache/lens/server/LensJerseyTest.java  |   3 +-
 .../org/apache/lens/server/LensTestUtil.java    |   2 +-
 .../apache/lens/server/TestServerRestart.java   |   4 +-
 .../lens/server/query/TestQueryService.java     |  52 +++++--
 lens-server/testdata/DatabaseJarSerde.java      |  42 +-----
 lens-server/testdata/serde.jar                  | Bin 1369 -> 1033 bytes
 lens-server/testdata/test.jar                   | Bin 697 -> 726 bytes
 14 files changed, 305 insertions(+), 214 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/adf47a64/lens-client/src/test/java/org/apache/lens/client/TestLensClient.java
----------------------------------------------------------------------
diff --git a/lens-client/src/test/java/org/apache/lens/client/TestLensClient.java b/lens-client/src/test/java/org/apache/lens/client/TestLensClient.java
index 81a536e..24f3473 100644
--- a/lens-client/src/test/java/org/apache/lens/client/TestLensClient.java
+++ b/lens-client/src/test/java/org/apache/lens/client/TestLensClient.java
@@ -19,7 +19,6 @@
 package org.apache.lens.client;
 
 import java.net.URI;
-import java.util.List;
 
 import javax.ws.rs.core.UriBuilder;
 
@@ -77,11 +76,9 @@ public class TestLensClient extends LensAllApplicationJerseyTest {
     LensClient client = new LensClient(lensClientConfig);
     Assert.assertEquals(client.getCurrentDatabae(), TEST_DB,
       "current database");
-    List<String> dbs = client.getAllDatabases();
-    Assert.assertEquals(dbs.size(), 3, "no of databases");
     client.createDatabase("testclientdb", true);
-    Assert.assertEquals(client.getAllDatabases().size(), 4, " no of databases");
+    Assert.assertTrue(client.getAllDatabases().contains("testclientdb"));
     client.dropDatabase("testclientdb");
-    Assert.assertEquals(client.getAllDatabases().size(), 3, " no of databases");
+    Assert.assertFalse(client.getAllDatabases().contains("testclientdb"));
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/adf47a64/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index 218dc53..11ab47a 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -95,6 +95,7 @@ public class HiveDriver implements LensDriver {
   public static final float MONTHLY_PARTITION_WEIGHT_DEFAULT = 0.5f;
   public static final float DAILY_PARTITION_WEIGHT_DEFAULT = 0.75f;
   public static final float HOURLY_PARTITION_WEIGHT_DEFAULT = 1.0f;
+  public static final String SESSION_KEY_DELIMITER = ".";
 
   /** The driver conf- which will merged with query conf */
   private Configuration driverConf;
@@ -138,24 +139,33 @@ public class HiveDriver implements LensDriver {
   // package-local. Test case can change.
   boolean whetherCalculatePriority;
 
+
+  private String sessionDbKey(String sessionHandle, String database) {
+    return sessionHandle + SESSION_KEY_DELIMITER + database;
+  }
+
   /**
    * Return true if resources have been added to this Hive session
-   * @param sessionHandle
-   * @return
+   * @param sessionHandle lens session identifier
+   * @param database lens database
+   * @return true if resources have been already added to this session + db pair
    */
-  public boolean areRsourcesAddedForSession(String sessionHandle) {
-    SessionHandle hiveSession = lensToHiveSession.get(sessionHandle);
+  public boolean areDBResourcesAddedForSession(String sessionHandle, String database) {
+    String key = sessionDbKey(sessionHandle, database);
+    SessionHandle hiveSession = lensToHiveSession.get(key);
     return hiveSession != null
       && resourcesAddedForSession.containsKey(hiveSession)
       && resourcesAddedForSession.get(hiveSession);
   }
 
   /**
-   * Tell Hive driver that resources have been added for this session
-   * @param sessionHandle
+   * Tell Hive driver that resources have been added for this session and for the given database
+   * @param sessionHandle lens session identifier
+   * @param database lens database
    */
-  public void setResourcesAddedForSession(String sessionHandle) {
-    resourcesAddedForSession.put(lensToHiveSession.get(sessionHandle), Boolean.TRUE);
+  public void setResourcesAddedForSession(String sessionHandle, String database) {
+    SessionHandle hiveSession = lensToHiveSession.get(sessionDbKey(sessionHandle, database));
+    resourcesAddedForSession.put(hiveSession, Boolean.TRUE);
   }
 
   /**
@@ -383,6 +393,7 @@ public class HiveDriver implements LensDriver {
 
     QueryContext explainQueryCtx = QueryContext.createContextWithSingleDriver(explainQuery,
       explainCtx.getSubmittedUser(), new LensConf(), explainConf, this, explainCtx.getLensSessionIdentifier(), false);
+
     // Get result set of explain
     HiveInMemoryResultSet inMemoryResultSet = (HiveInMemoryResultSet) execute(explainQueryCtx);
     List<String> explainOutput = new ArrayList<String>();
@@ -709,14 +720,13 @@ public class HiveDriver implements LensDriver {
     // Close this driver and release all resources
     sessionLock.lock();
     try {
-      for (String lensSession : lensToHiveSession.keySet()) {
+      for (String lensSessionDbKey : lensToHiveSession.keySet()) {
         try {
-          getClient().closeSession(lensToHiveSession.get(lensSession));
+          getClient().closeSession(lensToHiveSession.get(lensSessionDbKey));
         } catch (Exception e) {
           checkInvalidSession(e);
-          LOG.warn(
-            "Error closing session for lens session: " + lensSession + ", hive session: "
-              + lensToHiveSession.get(lensSession), e);
+          LOG.warn("Error closing session for lens session: " + lensSessionDbKey + ", hive session: "
+              + lensToHiveSession.get(lensSessionDbKey), e);
         }
       }
       lensToHiveSession.clear();
@@ -859,6 +869,7 @@ public class HiveDriver implements LensDriver {
     sessionLock.lock();
     try {
       String lensSession = ctx.getLensSessionIdentifier();
+      String sessionDbKey = sessionDbKey(lensSession, ctx.getDatabase());
       if (lensSession == null && SessionState.get() != null) {
         lensSession = SessionState.get().getSessionId();
       }
@@ -868,12 +879,12 @@ public class HiveDriver implements LensDriver {
       }
 
       SessionHandle hiveSession;
-      if (!lensToHiveSession.containsKey(lensSession)) {
+      if (!lensToHiveSession.containsKey(sessionDbKey)) {
         try {
           hiveSession = getClient().openSession(ctx.getClusterUser(), "");
-          lensToHiveSession.put(lensSession, hiveSession);
-          LOG.info("New hive session for user: " + ctx.getClusterUser() + ", lens session: " + lensSession
-            + " session handle: " + hiveSession.getHandleIdentifier());
+          lensToHiveSession.put(sessionDbKey, hiveSession);
+          LOG.info("New hive session for user: " + ctx.getClusterUser() + ", lens session: " + sessionDbKey
+            + " hive session handle: " + hiveSession.getHandleIdentifier());
           for (LensEventListener<DriverEvent> eventListener : driverListeners) {
             try {
               eventListener.onEvent(new DriverSessionStarted(System.currentTimeMillis(), this, lensSession, hiveSession
@@ -886,7 +897,7 @@ public class HiveDriver implements LensDriver {
           throw new LensException(e);
         }
       } else {
-        hiveSession = lensToHiveSession.get(lensSession);
+        hiveSession = lensToHiveSession.get(sessionDbKey);
       }
       return hiveSession;
     } finally {
@@ -1097,24 +1108,39 @@ public class HiveDriver implements LensDriver {
       lensSession = SessionState.get().getSessionId();
     }
 
-    SessionHandle session = lensToHiveSession.get(lensSession);
-
-    if (session == null || lensSession == null) {
+    if (lensSession == null) {
       return;
     }
 
-    if (isSessionInvalid(exc, session)) {
-      // We have to expire previous session
-      LOG.info("Hive server session " + session + " for lens session " + lensSession + " has become invalid");
-      sessionLock.lock();
-      try {
-        // We should close all connections and clear the session map since
-        // most likely all sessions are gone
-        closeAllConnections();
-        lensToHiveSession.clear();
-        LOG.info("Cleared all sessions");
-      } finally {
-        sessionLock.unlock();
+    // Get all hive sessions corresponding to the lens session and check if
+    // any of those sessions have become invalid
+    List<String> sessionKeys = new ArrayList<String>(lensToHiveSession.keySet());
+    List<SessionHandle> hiveSessionsToCheck = new ArrayList<SessionHandle>();
+    sessionLock.lock();
+    try {
+      for (String key : sessionKeys) {
+        if (key.startsWith(lensSession)) {
+          hiveSessionsToCheck.add(lensToHiveSession.get(key));
+        }
+      }
+    } finally {
+      sessionLock.unlock();
+    }
+
+    for (SessionHandle session : hiveSessionsToCheck) {
+      if (isSessionInvalid(exc, session)) {
+        // We have to expire previous session
+        LOG.info("Hive server session " + session + " for lens session " + lensSession + " has become invalid");
+        sessionLock.lock();
+        try {
+          // We should close all connections and clear the session map since
+          // most likely all sessions are gone
+          closeAllConnections();
+          lensToHiveSession.clear();
+          LOG.info("Cleared all sessions");
+        } finally {
+          sessionLock.unlock();
+        }
       }
     }
   }
@@ -1166,19 +1192,24 @@ public class HiveDriver implements LensDriver {
    * @param sessionHandle the session handle
    */
   public void closeSession(LensSessionHandle sessionHandle) {
+    String sessionIdentifier = sessionHandle.getPublicId().toString();
     sessionLock.lock();
     try {
-      SessionHandle hiveSession = lensToHiveSession.remove(sessionHandle.getPublicId().toString());
-      if (hiveSession != null) {
-        try {
-          getClient().closeSession(hiveSession);
-          LOG.info("Closed Hive session " + hiveSession.getHandleIdentifier() + " for lens session "
-            + sessionHandle.getPublicId());
-        } catch (Exception e) {
-          LOG.error("Error closing hive session " + hiveSession.getHandleIdentifier() + " for lens session "
-            + sessionHandle.getPublicId(), e);
+      for (String sessionDbKey : new ArrayList<String>(lensToHiveSession.keySet())) {
+        if (sessionDbKey.startsWith(sessionIdentifier)) {
+          SessionHandle hiveSession = lensToHiveSession.remove(sessionDbKey);
+          if (hiveSession != null) {
+            try {
+              getClient().closeSession(hiveSession);
+              LOG.info("Closed Hive session " + hiveSession.getHandleIdentifier() + " for lens session "
+                + sessionDbKey);
+            } catch (Exception e) {
+              LOG.error("Error closing hive session " + hiveSession.getHandleIdentifier()
+                + " for lens session " + sessionDbKey, e);
+            }
+            resourcesAddedForSession.remove(hiveSession);
+          }
         }
-        resourcesAddedForSession.remove(hiveSession);
       }
     } finally {
       sessionLock.unlock();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/adf47a64/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
index b16c346..089c496 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
@@ -661,8 +661,10 @@ public class TestHiveDriver {
    */
   @Test
   public void testExplain() throws Exception {
-    createTestTable("test_explain");
     SessionState.setCurrentSessionState(ss);
+    SessionState.get().setCurrentDatabase(dataBase);
+    createTestTable("test_explain");
+
     DriverQueryPlan plan = driver.explain(createExplainContext("SELECT ID FROM test_explain", conf));
     assertTrue(plan instanceof HiveQueryPlan);
     assertEquals(plan.getTableWeight(dataBase + ".test_explain"), 500.0);
@@ -671,6 +673,7 @@ public class TestHiveDriver {
     // test execute prepare
     PreparedQueryContext pctx = new PreparedQueryContext("SELECT ID FROM test_explain", null, conf, drivers);
     pctx.setSelectedDriver(driver);
+    pctx.setLensSessionIdentifier(sessionid);
 
     SessionState.setCurrentSessionState(ss);
     HiveConf inConf = new HiveConf(conf);
@@ -774,6 +777,7 @@ public class TestHiveDriver {
     String query2 = "SELECT DISTINCT ID FROM explain_test_1";
     PreparedQueryContext pctx = new PreparedQueryContext(query2, null, conf, drivers);
     pctx.setSelectedDriver(driver);
+    pctx.setLensSessionIdentifier(sessionid);
     DriverQueryPlan plan2 = driver.explainAndPrepare(pctx);
     // assertNotNull(plan2.getResultDestination());
     Assert.assertEquals(0, driver.getHiveHandleSize());

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/adf47a64/lens-driver-jdbc/testdata/DatabaseJarSerde.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/testdata/DatabaseJarSerde.java b/lens-driver-jdbc/testdata/DatabaseJarSerde.java
index 03caff3..4fd98c9 100644
--- a/lens-driver-jdbc/testdata/DatabaseJarSerde.java
+++ b/lens-driver-jdbc/testdata/DatabaseJarSerde.java
@@ -17,52 +17,20 @@
  * under the License.
  */
 
-import java.util.Properties;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.SerDeStats;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 
 /**
  * Simple serde used during test of database jar
  */
-public class DatabaseJarSerde extends AbstractSerDe {
+public class DatabaseJarSerde extends LazySimpleSerDe {
   // This should load class from test.jar
   public static final ClassLoaderTestClass testClassInstance = new ClassLoaderTestClass();
   static {
     System.out.println("@@@@ SUCCESSFULLY_LOADED CLASS " + DatabaseJarSerde.class);
   }
 
-  @Override
-  public void initialize(Configuration configuration, Properties properties) throws SerDeException {
-
-  }
-
-  @Override
-  public Class<? extends Writable> getSerializedClass() {
-    return null;
-  }
-
-  @Override
-  public Writable serialize(Object o, ObjectInspector objectInspector) throws SerDeException {
-    return null;
-  }
-
-  @Override
-  public SerDeStats getSerDeStats() {
-    return null;
-  }
-
-  @Override
-  public Object deserialize(Writable writable) throws SerDeException {
-    return null;
-  }
-
-  @Override
-  public ObjectInspector getObjectInspector() throws SerDeException {
-    return null;
+  public DatabaseJarSerde() throws SerDeException {
+    super();
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/adf47a64/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
index 6799e0c..225eb56 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
@@ -38,6 +38,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.session.SessionState;
 
 import lombok.Getter;
 import lombok.Setter;
@@ -112,6 +113,8 @@ public abstract class AbstractQueryContext implements Serializable {
   @Setter
   private boolean olapQuery = false;
 
+  private final String database;
+
   /** Lock used to synchronize HiveConf access */
   private transient Lock hiveConfLock = new ReentrantLock();
 
@@ -133,6 +136,14 @@ public abstract class AbstractQueryContext implements Serializable {
       this.selectedDriverQuery = query;
       setSelectedDriver(drivers.iterator().next());
     }
+
+    // If this is created under an 'acquire' current db would be set
+    if (SessionState.get() != null) {
+      String currDb = SessionState.get().getCurrentDatabase();
+      database = currDb == null ? "default" : currDb;
+    } else {
+      database = "default";
+    }
   }
 
   // called after the object is constructed from serialized object
@@ -419,6 +430,14 @@ public abstract class AbstractQueryContext implements Serializable {
     return this.getUserQuery();
   }
 
+  /**
+   * Returns database set while launching query
+   * @return
+   */
+  public String getDatabase() {
+    return database == null ? "default" : database;
+  }
+
   public void clearTransientStateAfterLaunch() {
     driverContext.clearTransientStateAfterLaunch();
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/adf47a64/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index ea2da14..3e0e0db 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -18,6 +18,8 @@
  */
 package org.apache.lens.server.query;
 
+import static org.apache.lens.server.session.LensSessionImpl.ResourceEntry;
+
 import java.io.*;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -52,6 +54,7 @@ import org.apache.lens.server.session.LensSessionImpl;
 import org.apache.lens.server.stats.StatisticsService;
 import org.apache.lens.server.util.UtilityMethods;
 
+import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -473,7 +476,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
                   LOG.info("Submitting to already selected driver");
                 }
                 // Check if we need to pass session's effective resources to selected driver
-                maybeAddSessionResourcesToDriver(ctx);
+                addSessionResourcesToDriver(ctx);
                 ctx.getSelectedDriver().executeAsync(ctx);
               } catch (Exception e) {
                 LOG.error("Error launching query " + ctx.getQueryHandle(), e);
@@ -1322,7 +1325,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
       acquire(sessionHandle);
       prepared = prepareQuery(sessionHandle, query, lensConf, SubmitOp.EXPLAIN_AND_PREPARE);
       prepared.setQueryName(queryName);
-      maybeAddSessionResourcesToDriver(prepared);
+      addSessionResourcesToDriver(prepared);
       QueryPlan plan = prepared.getSelectedDriver().explainAndPrepare(prepared).toQueryPlan();
       plan.setPrepareHandle(prepared.getPrepareHandle());
       return plan;
@@ -2016,7 +2019,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
       explainQueryContext.setLensSessionIdentifier(sessionHandle.getPublicId().toString());
       accept(query, qconf, SubmitOp.EXPLAIN);
       rewriteAndSelect(explainQueryContext);
-      maybeAddSessionResourcesToDriver(explainQueryContext);
+      addSessionResourcesToDriver(explainQueryContext);
       return explainQueryContext.getSelectedDriver().explain(explainQueryContext).toQueryPlan();
     } catch (LensException e) {
       LOG.error("Error during explain :", e);
@@ -2034,26 +2037,6 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
     }
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.apache.lens.server.LensService#addResource(org.apache.lens.api.LensSessionHandle, java.lang.String,
-   * java.lang.String)
-   */
-  public void addResource(LensSessionHandle sessionHandle, String type, String path) throws LensException {
-    try {
-      acquire(sessionHandle);
-      String command = "add " + type.toLowerCase() + " " + path;
-      for (LensDriver driver : drivers.values()) {
-        if (driver instanceof HiveDriver) {
-          driver.execute(createResourceQuery(command, sessionHandle, driver));
-        }
-      }
-    } finally {
-      release(sessionHandle);
-    }
-  }
-
   /**
    * Creates the add/delete resource query.
    *
@@ -2342,9 +2325,9 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
       LensSessionImpl session = getSession(sessionHandle);
       acquire(sessionHandle);
       // Add resources for this session
-      List<LensSessionImpl.ResourceEntry> resources = session.getLensSessionPersistInfo().getResources();
+      List<ResourceEntry> resources = session.getLensSessionPersistInfo().getResources();
       if (resources != null && !resources.isEmpty()) {
-        for (LensSessionImpl.ResourceEntry resource : resources) {
+        for (ResourceEntry resource : resources) {
           LOG.info("Restoring resource " + resource + " for session " + lensSession);
           String command = "add " + resource.getType().toLowerCase() + " " + resource.getLocation();
           try {
@@ -2370,37 +2353,14 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
 
   /**
    * Add session's resources to selected driver if needed
-   * @param ctx QueryContext for executinf queries
+   * @param ctx the query context
    * @throws LensException
    */
-  protected void maybeAddSessionResourcesToDriver(final QueryContext ctx) throws LensException {
-    maybeAddSessionResourcesToDriver(ctx.getLensSessionIdentifier(), ctx.getSelectedDriver(),
-      ctx.getQueryHandle().toString());
-  }
+  protected void addSessionResourcesToDriver(final AbstractQueryContext ctx) {
+    LensDriver driver = ctx.getSelectedDriver();
+    String sessionIdentifier = ctx.getLensSessionIdentifier();
 
-  /**
-   * Add session's resources to selected driver if needed.
-   * @param ctx ExplainQueryContext for explain queries
-   * @throws LensException
-   */
-  protected void maybeAddSessionResourcesToDriver(final ExplainQueryContext ctx) throws LensException {
-    maybeAddSessionResourcesToDriver(ctx.getLensSessionIdentifier(), ctx.getSelectedDriver(),
-      ctx.getSelectedDriverQuery());
-  }
-
-  /**
-   * Add session's resources to selected driver if needed.
-   * @param ctx PreparedQueryContext for explainAndPrepare(Async) queries
-   * @throws LensException
-   */
-  protected void maybeAddSessionResourcesToDriver(final PreparedQueryContext ctx) throws LensException {
-    maybeAddSessionResourcesToDriver(ctx.getLensSessionIdentifier(), ctx.getSelectedDriver(),
-      ctx.getPrepareHandle().toString());
-  }
-
-  private void maybeAddSessionResourcesToDriver(String sessionIdentifier, LensDriver driver, String queryHandle)
-    throws LensException {
-    if (!(driver instanceof HiveDriver) || sessionIdentifier == null || sessionIdentifier.isEmpty()) {
+    if (!(driver instanceof HiveDriver) || StringUtils.isBlank(sessionIdentifier)) {
       // Adding resources only required for Hive driver
       return;
     }
@@ -2413,36 +2373,78 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
 
     // Add resources if either they haven't been marked as added on the session, or if Hive driver says they need
     // to be added to the corresponding hive driver
-    if (!hiveDriver.areRsourcesAddedForSession(sessionIdentifier)) {
-      Collection<LensSessionImpl.ResourceEntry> dbResources = session.getCurrentDBResources();
+    if (!hiveDriver.areDBResourcesAddedForSession(sessionIdentifier, ctx.getDatabase())) {
+      Collection<ResourceEntry> dbResources = session.getDBResources(ctx.getDatabase());
 
-      if (dbResources != null && !dbResources.isEmpty()) {
+      if (CollectionUtils.isNotEmpty(dbResources)) {
         LOG.info("Proceeding to add resources for DB "
-          + session.getCurrentDatabase() + " for query " + queryHandle + " resources: " + dbResources);
-
-        for (LensSessionImpl.ResourceEntry res : dbResources) {
-          String uri = res.getLocation();
-          try {
-            // Hive doesn't and URIs starting with file:/ correctly, so we have to change it to file:///
-            // See: org.apache.hadoop.hive.ql.exec.Utilities.addToClassPath
-            if (uri.startsWith("file:") && !uri.startsWith("file://")) {
-              uri = "file://" + uri.substring("file:".length());
-            }
-            String command = "add " + res.getType().toLowerCase() + " " + uri;
-            hiveDriver.execute(createResourceQuery(command, sessionHandle, driver));
-            LOG.info("Added resource to hive driver for session "
-              + sessionIdentifier + " cmd: " + command);
-          } catch (LensException exc) {
-            LOG.error("Error adding resources for session "
-              + sessionIdentifier + " resources: " + uri, exc.getCause());
+          + session.getCurrentDatabase() + " for query " + ctx.getLogHandle() + " resources: " + dbResources);
+
+        List<ResourceEntry> failedDBResources = addResources(dbResources, sessionHandle, hiveDriver);
+        Iterator<ResourceEntry> itr = dbResources.iterator();
+        while (itr.hasNext()) {
+          ResourceEntry res = itr.next();
+          if (!failedDBResources.contains(res)) {
+            itr.remove();
           }
         }
       } else {
         LOG.info("No need to add DB resources for session: " + sessionIdentifier
           + " db= " + session.getCurrentDatabase());
       }
+      hiveDriver.setResourcesAddedForSession(sessionIdentifier, ctx.getDatabase());
+    }
+
+    // Get pending session resources which needed to be added for this database
+    Collection<ResourceEntry> pendingResources =
+      session.getPendingSessionResourcesForDatabase(ctx.getDatabase());
+    LOG.info("Adding pending " + pendingResources.size() + " session resources for session " + sessionIdentifier
+      + " for database " + ctx.getDatabase());
+    List<ResourceEntry> failedResources = addResources(pendingResources, sessionHandle, hiveDriver);
+    // Mark added resources so that we don't add them again. If any of the resources failed
+    // to be added, then they will be added again
+    for (ResourceEntry res : pendingResources) {
+      if (!failedResources.contains(res)) {
+        res.addToDatabase(ctx.getDatabase());
+      }
+    }
+  }
+
+  /**
+   * Add resources to hive driver, returning resources which failed to be added
+   * @param resources collection of resources intented to be added to hive driver
+   * @param sessionHandle
+   * @param hiveDriver
+   * @return resources which could not be added to hive driver
+   */
+  private List<ResourceEntry> addResources(Collection<ResourceEntry> resources,
+                                                           LensSessionHandle sessionHandle,
+                                                           HiveDriver hiveDriver) {
+    List<ResourceEntry> failedResources = new ArrayList<ResourceEntry>();
+    for (ResourceEntry res : resources) {
+      try{
+        addSingleResourceToHive(hiveDriver, res, sessionHandle);
+      } catch (LensException exc) {
+        failedResources.add(res);
+        LOG.error("Error adding resources for session "
+          + sessionHandle.getPublicId().toString() + " resources: " + res.getLocation(), exc.getCause());
+      }
+    }
+    return failedResources;
+  }
 
-      hiveDriver.setResourcesAddedForSession(sessionIdentifier);
+  private void addSingleResourceToHive(HiveDriver driver, ResourceEntry res,
+                                       LensSessionHandle sessionHandle) throws LensException {
+    String sessionIdentifier = sessionHandle.getPublicId().toString();
+    String uri = res.getLocation();
+    // Hive doesn't and URIs starting with file:/ correctly, so we have to change it to file:///
+    // See: org.apache.hadoop.hive.ql.exec.Utilities.addToClassPath
+    if (uri.startsWith("file:") && !uri.startsWith("file://")) {
+      uri = "file://" + uri.substring("file:".length());
     }
+    String command = "add " + res.getType().toLowerCase() + " " + uri;
+    driver.execute(createResourceQuery(command, sessionHandle, driver));
+    LOG.info("Added resource to hive driver for session "
+      + sessionIdentifier + " cmd: " + command);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/adf47a64/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java b/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
index 8c97082..6ff45ad 100644
--- a/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
@@ -37,6 +37,7 @@ import org.apache.lens.server.util.UtilityMethods;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hive.service.cli.HiveSQLException;
@@ -69,6 +70,12 @@ public class LensSessionImpl extends HiveSessionImpl {
   /** The conf. */
   private Configuration conf = createDefaultConf();
 
+  /**
+   * Keep track of DB static resources which failed to be added to this session
+   */
+  private final Map<String, List<ResourceEntry>> failedDBResources = new HashMap<String, List<ResourceEntry>>();
+
+
 
   /**
    * Cache of database specific class loaders for this session
@@ -164,6 +171,27 @@ public class LensSessionImpl extends HiveSessionImpl {
       LensConfConstants.SESSION_TIMEOUT_SECONDS_DEFAULT);
   }
 
+  @Override
+  public void close() throws HiveSQLException {
+    super.close();
+
+    // Release class loader resources
+    synchronized (sessionDbClassLoaders) {
+      for (Map.Entry<String, ClassLoader> entry : sessionDbClassLoaders.entrySet()) {
+        try {
+          // Close the class loader only if its not a class loader maintained by the DB service
+          if (entry.getValue() != getDbResService().getClassLoader(entry.getKey())) {
+            // This is a utility in hive-common
+            JavaUtils.closeClassLoader(entry.getValue());
+          }
+        } catch (Exception e) {
+          LOG.error("Error closing session classloader for session: " + getSessionHandle().getSessionId(), e);
+        }
+      }
+      sessionDbClassLoaders.clear();
+    }
+  }
+
   public CubeMetastoreClient getCubeMetastoreClient() throws LensException {
     try {
       CubeMetastoreClient cubeClient = CubeMetastoreClient.getInstance(getHiveConf());
@@ -243,7 +271,8 @@ public class LensSessionImpl extends HiveSessionImpl {
    * @param path the path
    */
   public void addResource(String type, String path) {
-    persistInfo.getResources().add(new ResourceEntry(type, path));
+    ResourceEntry resource = new ResourceEntry(type, path);
+    persistInfo.getResources().add(resource);
     synchronized (sessionDbClassLoaders) {
       // Update all DB class loaders
       updateSessionDbClassLoader(getSessionState().getCurrentDatabase());
@@ -295,7 +324,9 @@ public class LensSessionImpl extends HiveSessionImpl {
         try {
           ClassLoader classLoader = getDbResService().getClassLoader(database);
           if (classLoader == null) {
-            LOG.warn("DB resource service gave null class loader for " + database);
+            if (LOG.isDebugEnabled()) {
+              LOG.debug("DB resource service gave null class loader for " + database);
+            }
           } else {
             if (areResourcesAdded()) {
               // We need to update DB specific classloader with added resources
@@ -349,11 +380,32 @@ public class LensSessionImpl extends HiveSessionImpl {
   }
 
   /**
-   * Return resources which are added statically to the current database
+   * Return resources which are added statically to the database
    * @return
    */
-  public Collection<ResourceEntry> getCurrentDBResources() {
-    return getDbResService().getResourcesForDatabase(getCurrentDatabase());
+  public Collection<ResourceEntry> getDBResources(String database) {
+    synchronized (failedDBResources) {
+      List<ResourceEntry> failed = failedDBResources.get(database);
+      if (failed == null && getDbResService().getResourcesForDatabase(database) != null) {
+        failed = new ArrayList<ResourceEntry>(getDbResService().getResourcesForDatabase(database));
+        failedDBResources.put(database, failed);
+      }
+      return failed;
+    }
+  }
+
+
+  /**
+   * Get session's resources which have to be added for the given database
+   */
+  public Collection<ResourceEntry> getPendingSessionResourcesForDatabase(String database) {
+    List<ResourceEntry> pendingResources = new ArrayList<ResourceEntry>();
+    for (ResourceEntry res : persistInfo.getResources()) {
+      if (!res.isAddedToDatabase(database)) {
+        pendingResources.add(res);
+      }
+    }
+    return pendingResources;
   }
 
   /**
@@ -381,6 +433,10 @@ public class LensSessionImpl extends HiveSessionImpl {
     @Getter
     transient int restoreCount;
 
+    /** Set of databases for which this resource has been added */
+    final transient Set<String> databases = new HashSet<String>();
+
+
     /**
      * Instantiates a new resource entry.
      *
@@ -394,6 +450,15 @@ public class LensSessionImpl extends HiveSessionImpl {
       this.type = type;
       this.location = location;
     }
+
+    public boolean isAddedToDatabase(String database) {
+      return databases.contains(database);
+    }
+
+    public void addToDatabase(String database) {
+      databases.add(database);
+    }
+
     /**
      * Restored resource.
      */

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/adf47a64/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java b/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
index 20856a0..1f3fe68 100644
--- a/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
+++ b/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
@@ -116,7 +116,8 @@ public abstract class LensJerseyTest extends JerseyTest {
     hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_CONNECTION_RETRY_LIMIT, 3);
     hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_RETRY_LIMIT, 3);
 
-    LensTestUtil.createTestDatabaseResources(new String[]{LensTestUtil.DB_WITH_JARS}, hiveConf);
+    LensTestUtil.createTestDatabaseResources(new String[]{LensTestUtil.DB_WITH_JARS, LensTestUtil.DB_WITH_JARS_2},
+      hiveConf);
 
     LensServices.get().init(LensServerConf.get());
     LensServices.get().start();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/adf47a64/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java b/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
index e448163..30f1cb0 100644
--- a/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
+++ b/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
@@ -55,7 +55,7 @@ import org.testng.Assert;
 public final class LensTestUtil {
 
   public static final String DB_WITH_JARS = "test_db_static_jars";
-
+  public static final String DB_WITH_JARS_2 = "test_db_static_jars_2";
   private LensTestUtil() {
 
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/adf47a64/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java b/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
index 6306b51..a6927d2 100644
--- a/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
+++ b/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
@@ -38,6 +38,7 @@ import org.apache.lens.api.query.PersistentQueryResult;
 import org.apache.lens.api.query.QueryHandle;
 import org.apache.lens.api.query.QueryStatus;
 import org.apache.lens.driver.hive.TestRemoteHiveDriver;
+import org.apache.lens.server.api.session.SessionService;
 import org.apache.lens.server.query.QueryExecutionServiceImpl;
 import org.apache.lens.server.query.TestQueryService;
 import org.apache.lens.server.session.HiveSessionService;
@@ -224,7 +225,8 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     createRestartTestDataFile();
 
     // Add a resource to check if its added after server restart.
-    queryService.addResource(lensSessionId, "FILE", dataFile.toURI().toString());
+    HiveSessionService sessionService = (HiveSessionService) LensServices.get().getService(SessionService.NAME);
+    sessionService.addResource(lensSessionId, "FILE", dataFile.toURI().toString());
     queryService.getSession(lensSessionId).addResource("FILE", dataFile.toURI().toString());
     LOG.info("@@ Added resource " + dataFile.toURI());
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/adf47a64/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index e3e3d4b..a8df41d 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -51,6 +51,7 @@ import org.apache.lens.server.api.query.AbstractQueryContext;
 import org.apache.lens.server.api.query.QueryContext;
 import org.apache.lens.server.api.session.SessionService;
 import org.apache.lens.server.session.HiveSessionService;
+import org.apache.lens.server.session.LensSessionImpl;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -1368,29 +1369,62 @@ public class TestQueryService extends LensJerseyTest {
     LensSessionHandle sessionHandle =
       sessionService.openSession("foo@localhost", "bar", LensTestUtil.DB_WITH_JARS, new HashMap<String, String>());
 
+    // Add a jar in the session
+    File testJarFile = new File("testdata/test2.jar");
+    sessionService.addResourceToAllServices(sessionHandle, "jar", "file://" + testJarFile.getAbsolutePath());
+
     LOG.info("@@@ Opened session " + sessionHandle.getPublicId() + " with database " + LensTestUtil.DB_WITH_JARS);
+    LensSessionImpl session = sessionService.getSession(sessionHandle);
+
+    // Jars should be pending until query is run
+    Assert.assertEquals(session.getPendingSessionResourcesForDatabase(LensTestUtil.DB_WITH_JARS).size(), 1);
+    Assert.assertEquals(session.getPendingSessionResourcesForDatabase(LensTestUtil.DB_WITH_JARS_2).size(), 1);
 
     final String tableInDBWithJars = "testHiveDriverGetsDBJars";
     try {
       // First execute query on the session with db should load jars from DB
-      try {
-        LensTestUtil.createTable(tableInDBWithJars, target(), sessionHandle, "(ID INT, IDSTR STRING) "
-          + "ROW FORMAT SERDE \"DatabaseJarSerde\"");
-      } catch (Throwable exc) {
-        // Above fails because our serde is returning all nulls. We only want to test that serde gets loaded
-        exc.printStackTrace();
-      }
+      LensTestUtil.createTable(tableInDBWithJars, target(), sessionHandle, "(ID INT, IDSTR STRING) "
+        + "ROW FORMAT SERDE \"DatabaseJarSerde\"");
 
       boolean addedToHiveDriver = false;
 
       for (LensDriver driver : queryService.getDrivers()) {
         if (driver instanceof HiveDriver) {
-          addedToHiveDriver = ((HiveDriver) driver).areRsourcesAddedForSession(sessionHandle.getPublicId().toString());
+          addedToHiveDriver =
+            ((HiveDriver) driver).areDBResourcesAddedForSession(sessionHandle.getPublicId().toString(),
+              LensTestUtil.DB_WITH_JARS);
         }
       }
+      Assert.assertTrue(addedToHiveDriver);
+
+      // Switch database
+      LOG.info("@@@# database switch test");
+      session.setCurrentDatabase(LensTestUtil.DB_WITH_JARS_2);
+      LensTestUtil.createTable(tableInDBWithJars + "_2", target(), sessionHandle, "(ID INT, IDSTR STRING) "
+        + "ROW FORMAT SERDE \"DatabaseJarSerde\"");
+
+      // All db jars should have been added
+      Assert.assertTrue(session.getDBResources(LensTestUtil.DB_WITH_JARS_2).isEmpty());
+      Assert.assertTrue(session.getDBResources(LensTestUtil.DB_WITH_JARS).isEmpty());
+
+      // All session resources must have been added to both DBs
+      Assert.assertFalse(session.getLensSessionPersistInfo().getResources().isEmpty());
+      for (LensSessionImpl.ResourceEntry resource : session.getLensSessionPersistInfo().getResources()) {
+        Assert.assertTrue(resource.isAddedToDatabase(LensTestUtil.DB_WITH_JARS_2));
+        Assert.assertTrue(resource.isAddedToDatabase(LensTestUtil.DB_WITH_JARS));
+      }
+
+      Assert.assertTrue(session.getPendingSessionResourcesForDatabase(LensTestUtil.DB_WITH_JARS).isEmpty());
+      Assert.assertTrue(session.getPendingSessionResourcesForDatabase(LensTestUtil.DB_WITH_JARS_2).isEmpty());
+
     } finally {
       LOG.info("@@@ TEST_OVER");
-      LensTestUtil.dropTable(tableInDBWithJars, target(), sessionHandle);
+      try {
+        LensTestUtil.dropTable(tableInDBWithJars, target(), sessionHandle);
+        LensTestUtil.dropTable(tableInDBWithJars + "_2", target(), sessionHandle);
+      } catch (Throwable th) {
+        th.printStackTrace();
+      }
       sessionService.closeSession(sessionHandle);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/adf47a64/lens-server/testdata/DatabaseJarSerde.java
----------------------------------------------------------------------
diff --git a/lens-server/testdata/DatabaseJarSerde.java b/lens-server/testdata/DatabaseJarSerde.java
index 03caff3..4fd98c9 100644
--- a/lens-server/testdata/DatabaseJarSerde.java
+++ b/lens-server/testdata/DatabaseJarSerde.java
@@ -17,52 +17,20 @@
  * under the License.
  */
 
-import java.util.Properties;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.SerDeStats;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 
 /**
  * Simple serde used during test of database jar
  */
-public class DatabaseJarSerde extends AbstractSerDe {
+public class DatabaseJarSerde extends LazySimpleSerDe {
   // This should load class from test.jar
   public static final ClassLoaderTestClass testClassInstance = new ClassLoaderTestClass();
   static {
     System.out.println("@@@@ SUCCESSFULLY_LOADED CLASS " + DatabaseJarSerde.class);
   }
 
-  @Override
-  public void initialize(Configuration configuration, Properties properties) throws SerDeException {
-
-  }
-
-  @Override
-  public Class<? extends Writable> getSerializedClass() {
-    return null;
-  }
-
-  @Override
-  public Writable serialize(Object o, ObjectInspector objectInspector) throws SerDeException {
-    return null;
-  }
-
-  @Override
-  public SerDeStats getSerDeStats() {
-    return null;
-  }
-
-  @Override
-  public Object deserialize(Writable writable) throws SerDeException {
-    return null;
-  }
-
-  @Override
-  public ObjectInspector getObjectInspector() throws SerDeException {
-    return null;
+  public DatabaseJarSerde() throws SerDeException {
+    super();
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/adf47a64/lens-server/testdata/serde.jar
----------------------------------------------------------------------
diff --git a/lens-server/testdata/serde.jar b/lens-server/testdata/serde.jar
index ec86e49..01e6d7c 100644
Binary files a/lens-server/testdata/serde.jar and b/lens-server/testdata/serde.jar differ

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/adf47a64/lens-server/testdata/test.jar
----------------------------------------------------------------------
diff --git a/lens-server/testdata/test.jar b/lens-server/testdata/test.jar
index 1644d8c..a5baa57 100644
Binary files a/lens-server/testdata/test.jar and b/lens-server/testdata/test.jar differ


[35/50] [abbrv] incubator-lens git commit: LENS-398 : Move enunciate to profile activated by jdk version (Raju Bairishetti via amareshwari)

Posted by am...@apache.org.
LENS-398 : Move enunciate to profile activated by jdk version (Raju Bairishetti via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/44f4664c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/44f4664c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/44f4664c

Branch: refs/heads/current-release-line
Commit: 44f4664c73f7ed6d8e2c7953ec59e0089aeb2c74
Parents: e90dc33
Author: Raju Bairishetti <ra...@gmail.com>
Authored: Thu Apr 9 06:44:35 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Thu Apr 9 06:44:35 2015 +0530

----------------------------------------------------------------------
 lens-server/pom.xml                   | 41 +++++++++++++++++++-----------
 pom.xml                               | 15 +++++++++++
 tools/scripts/generate-site-public.sh |  4 +--
 3 files changed, 42 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/44f4664c/lens-server/pom.xml
----------------------------------------------------------------------
diff --git a/lens-server/pom.xml b/lens-server/pom.xml
index efac8b5..d7752d7 100644
--- a/lens-server/pom.xml
+++ b/lens-server/pom.xml
@@ -310,22 +310,33 @@
           </execution>
         </executions>
       </plugin>
-
-      <plugin>
-        <groupId>org.codehaus.enunciate</groupId>
-        <artifactId>maven-enunciate-plugin</artifactId>
-        <executions>
-          <execution>
-            <goals>
-              <goal>docs</goal>
-            </goals>
-          </execution>
-        </executions>
-        <configuration>
-          <configFile>enunciate.xml</configFile>
-        </configuration>
-      </plugin>
     </plugins>
   </build>
 
+  <profiles>
+    <profile>
+      <id>enunciate</id>
+      <activation>
+        <jdk>[1.6,1.7,1.8)</jdk>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.codehaus.enunciate</groupId>
+            <artifactId>maven-enunciate-plugin</artifactId>
+            <executions>
+              <execution>
+                <goals>
+                  <goal>docs</goal>
+                </goals>
+              </execution>
+            </executions>
+            <configuration>
+              <configFile>enunciate.xml</configFile>
+            </configuration>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/44f4664c/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index f306ee0..00bca47 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1239,5 +1239,20 @@
         </plugins>
       </build>
     </profile>
+    <profile>
+      <id>enunciate</id>
+      <activation>
+        <jdk>[1.6,1.7,1.8)</jdk>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.codehaus.enunciate</groupId>
+            <artifactId>maven-enunciate-plugin</artifactId>
+            <version>${enunciate.plugin.version}</version>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
   </profiles>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/44f4664c/tools/scripts/generate-site-public.sh
----------------------------------------------------------------------
diff --git a/tools/scripts/generate-site-public.sh b/tools/scripts/generate-site-public.sh
index 55307ee..8c67952 100755
--- a/tools/scripts/generate-site-public.sh
+++ b/tools/scripts/generate-site-public.sh
@@ -45,9 +45,7 @@ echo "Running site in current lens branch" $CURR_BRANCH
 mvn clean test -Dtest=TestGenerateConfigDoc || die "Unable to generate config docs"
 mvn install -DskipTests
 mvn site site:stage -Ddependency.locations.enabled=false -Ddependency.details.enabled=false || die "unable to generate site"
-cd lens-server
-mvn enunciate:docs
-cd ..
+
 echo "Site gen complete"
 
 rm -rf $TMP || die "unable to clear $TMP"


[26/50] [abbrv] incubator-lens git commit: Lens-465 : Refactor ml packages. (sharad)

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/LensMLImpl.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/LensMLImpl.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/LensMLImpl.java
new file mode 100644
index 0000000..f0c6e04
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/LensMLImpl.java
@@ -0,0 +1,744 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.impl;
+
+import java.io.IOException;
+import java.io.ObjectOutputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import javax.ws.rs.client.Client;
+import javax.ws.rs.client.ClientBuilder;
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.MediaType;
+
+import org.apache.lens.api.LensConf;
+import org.apache.lens.api.LensException;
+import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.api.query.LensQuery;
+import org.apache.lens.api.query.QueryHandle;
+import org.apache.lens.api.query.QueryStatus;
+import org.apache.lens.ml.algo.api.MLAlgo;
+import org.apache.lens.ml.algo.api.MLDriver;
+import org.apache.lens.ml.algo.api.MLModel;
+import org.apache.lens.ml.algo.spark.BaseSparkAlgo;
+import org.apache.lens.ml.algo.spark.SparkMLDriver;
+import org.apache.lens.ml.api.LensML;
+import org.apache.lens.ml.api.MLTestReport;
+import org.apache.lens.server.api.LensConfConstants;
+import org.apache.lens.server.api.session.SessionService;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.spark.api.java.JavaSparkContext;
+
+import org.glassfish.jersey.media.multipart.FormDataBodyPart;
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.glassfish.jersey.media.multipart.FormDataMultiPart;
+import org.glassfish.jersey.media.multipart.MultiPartFeature;
+
+/**
+ * The Class LensMLImpl.
+ */
+public class LensMLImpl implements LensML {
+
+  /** The Constant LOG. */
+  public static final Log LOG = LogFactory.getLog(LensMLImpl.class);
+
+  /** The drivers. */
+  protected List<MLDriver> drivers;
+
+  /** The conf. */
+  private HiveConf conf;
+
+  /** The spark context. */
+  private JavaSparkContext sparkContext;
+
+  /** Check if the predict UDF has been registered for a user */
+  private final Map<LensSessionHandle, Boolean> predictUdfStatus;
+  /** Background thread to periodically check if we need to clear expire status for a session */
+  private ScheduledExecutorService udfStatusExpirySvc;
+
+  /**
+   * Instantiates a new lens ml impl.
+   *
+   * @param conf the conf
+   */
+  public LensMLImpl(HiveConf conf) {
+    this.conf = conf;
+    this.predictUdfStatus = new ConcurrentHashMap<LensSessionHandle, Boolean>();
+  }
+
+  public HiveConf getConf() {
+    return conf;
+  }
+
+  /**
+   * Use an existing Spark context. Useful in case of
+   *
+   * @param jsc JavaSparkContext instance
+   */
+  public void setSparkContext(JavaSparkContext jsc) {
+    this.sparkContext = jsc;
+  }
+
+  public List<String> getAlgorithms() {
+    List<String> algos = new ArrayList<String>();
+    for (MLDriver driver : drivers) {
+      algos.addAll(driver.getAlgoNames());
+    }
+    return algos;
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#getAlgoForName(java.lang.String)
+   */
+  public MLAlgo getAlgoForName(String algorithm) throws LensException {
+    for (MLDriver driver : drivers) {
+      if (driver.isAlgoSupported(algorithm)) {
+        return driver.getAlgoInstance(algorithm);
+      }
+    }
+    throw new LensException("Algo not supported " + algorithm);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#train(java.lang.String, java.lang.String, java.lang.String[])
+   */
+  public String train(String table, String algorithm, String[] args) throws LensException {
+    MLAlgo algo = getAlgoForName(algorithm);
+
+    String modelId = UUID.randomUUID().toString();
+
+    LOG.info("Begin training model " + modelId + ", algo=" + algorithm + ", table=" + table + ", params="
+      + Arrays.toString(args));
+
+    String database = null;
+    if (SessionState.get() != null) {
+      database = SessionState.get().getCurrentDatabase();
+    } else {
+      database = "default";
+    }
+
+    MLModel model = algo.train(toLensConf(conf), database, table, modelId, args);
+
+    LOG.info("Done training model: " + modelId);
+
+    model.setCreatedAt(new Date());
+    model.setAlgoName(algorithm);
+
+    Path modelLocation = null;
+    try {
+      modelLocation = persistModel(model);
+      LOG.info("Model saved: " + modelId + ", algo: " + algorithm + ", path: " + modelLocation);
+      return model.getId();
+    } catch (IOException e) {
+      throw new LensException("Error saving model " + modelId + " for algo " + algorithm, e);
+    }
+  }
+
+  /**
+   * Gets the algo dir.
+   *
+   * @param algoName the algo name
+   * @return the algo dir
+   * @throws IOException Signals that an I/O exception has occurred.
+   */
+  private Path getAlgoDir(String algoName) throws IOException {
+    String modelSaveBaseDir = conf.get(ModelLoader.MODEL_PATH_BASE_DIR, ModelLoader.MODEL_PATH_BASE_DIR_DEFAULT);
+    return new Path(new Path(modelSaveBaseDir), algoName);
+  }
+
+  /**
+   * Persist model.
+   *
+   * @param model the model
+   * @return the path
+   * @throws IOException Signals that an I/O exception has occurred.
+   */
+  private Path persistModel(MLModel model) throws IOException {
+    // Get model save path
+    Path algoDir = getAlgoDir(model.getAlgoName());
+    FileSystem fs = algoDir.getFileSystem(conf);
+
+    if (!fs.exists(algoDir)) {
+      fs.mkdirs(algoDir);
+    }
+
+    Path modelSavePath = new Path(algoDir, model.getId());
+    ObjectOutputStream outputStream = null;
+
+    try {
+      outputStream = new ObjectOutputStream(fs.create(modelSavePath, false));
+      outputStream.writeObject(model);
+      outputStream.flush();
+    } catch (IOException io) {
+      LOG.error("Error saving model " + model.getId() + " reason: " + io.getMessage());
+      throw io;
+    } finally {
+      IOUtils.closeQuietly(outputStream);
+    }
+    return modelSavePath;
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#getModels(java.lang.String)
+   */
+  public List<String> getModels(String algorithm) throws LensException {
+    try {
+      Path algoDir = getAlgoDir(algorithm);
+      FileSystem fs = algoDir.getFileSystem(conf);
+      if (!fs.exists(algoDir)) {
+        return null;
+      }
+
+      List<String> models = new ArrayList<String>();
+
+      for (FileStatus stat : fs.listStatus(algoDir)) {
+        models.add(stat.getPath().getName());
+      }
+
+      if (models.isEmpty()) {
+        return null;
+      }
+
+      return models;
+    } catch (IOException ioex) {
+      throw new LensException(ioex);
+    }
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#getModel(java.lang.String, java.lang.String)
+   */
+  public MLModel getModel(String algorithm, String modelId) throws LensException {
+    try {
+      return ModelLoader.loadModel(conf, algorithm, modelId);
+    } catch (IOException e) {
+      throw new LensException(e);
+    }
+  }
+
+  /**
+   * Inits the.
+   *
+   * @param hiveConf the hive conf
+   */
+  public synchronized void init(HiveConf hiveConf) {
+    this.conf = hiveConf;
+
+    // Get all the drivers
+    String[] driverClasses = hiveConf.getStrings("lens.ml.drivers");
+
+    if (driverClasses == null || driverClasses.length == 0) {
+      throw new RuntimeException("No ML Drivers specified in conf");
+    }
+
+    LOG.info("Loading drivers " + Arrays.toString(driverClasses));
+    drivers = new ArrayList<MLDriver>(driverClasses.length);
+
+    for (String driverClass : driverClasses) {
+      Class<?> cls;
+      try {
+        cls = Class.forName(driverClass);
+      } catch (ClassNotFoundException e) {
+        LOG.error("Driver class not found " + driverClass);
+        continue;
+      }
+
+      if (!MLDriver.class.isAssignableFrom(cls)) {
+        LOG.warn("Not a driver class " + driverClass);
+        continue;
+      }
+
+      try {
+        Class<? extends MLDriver> mlDriverClass = (Class<? extends MLDriver>) cls;
+        MLDriver driver = mlDriverClass.newInstance();
+        driver.init(toLensConf(conf));
+        drivers.add(driver);
+        LOG.info("Added driver " + driverClass);
+      } catch (Exception e) {
+        LOG.error("Failed to create driver " + driverClass + " reason: " + e.getMessage(), e);
+      }
+    }
+    if (drivers.isEmpty()) {
+      throw new RuntimeException("No ML drivers loaded");
+    }
+
+    LOG.info("Inited ML service");
+  }
+
+  /**
+   * Start.
+   */
+  public synchronized void start() {
+    for (MLDriver driver : drivers) {
+      try {
+        if (driver instanceof SparkMLDriver && sparkContext != null) {
+          ((SparkMLDriver) driver).useSparkContext(sparkContext);
+        }
+        driver.start();
+      } catch (LensException e) {
+        LOG.error("Failed to start driver " + driver, e);
+      }
+    }
+
+    udfStatusExpirySvc = Executors.newSingleThreadScheduledExecutor();
+    udfStatusExpirySvc.scheduleAtFixedRate(new UDFStatusExpiryRunnable(), 60, 60, TimeUnit.SECONDS);
+
+    LOG.info("Started ML service");
+  }
+
+  /**
+   * Stop.
+   */
+  public synchronized void stop() {
+    for (MLDriver driver : drivers) {
+      try {
+        driver.stop();
+      } catch (LensException e) {
+        LOG.error("Failed to stop driver " + driver, e);
+      }
+    }
+    drivers.clear();
+    udfStatusExpirySvc.shutdownNow();
+    LOG.info("Stopped ML service");
+  }
+
+  public synchronized HiveConf getHiveConf() {
+    return conf;
+  }
+
+  /**
+   * Clear models.
+   */
+  public void clearModels() {
+    ModelLoader.clearCache();
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#getModelPath(java.lang.String, java.lang.String)
+   */
+  public String getModelPath(String algorithm, String modelID) {
+    return ModelLoader.getModelLocation(conf, algorithm, modelID).toString();
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#testModel(org.apache.lens.api.LensSessionHandle, java.lang.String, java.lang.String,
+   * java.lang.String)
+   */
+  @Override
+  public MLTestReport testModel(LensSessionHandle session, String table, String algorithm, String modelID,
+    String outputTable) throws LensException {
+    return null;
+  }
+
+  /**
+   * Test a model in embedded mode.
+   *
+   * @param sessionHandle the session handle
+   * @param table         the table
+   * @param algorithm     the algorithm
+   * @param modelID       the model id
+   * @param queryApiUrl   the query api url
+   * @return the ML test report
+   * @throws LensException the lens exception
+   */
+  public MLTestReport testModelRemote(LensSessionHandle sessionHandle, String table, String algorithm, String modelID,
+    String queryApiUrl, String outputTable) throws LensException {
+    return testModel(sessionHandle, table, algorithm, modelID, new RemoteQueryRunner(sessionHandle, queryApiUrl),
+      outputTable);
+  }
+
+  /**
+   * Evaluate a model. Evaluation is done on data selected table from an input table. The model is run as a UDF and its
+   * output is inserted into a table with a partition. Each evaluation is given a unique ID. The partition label is
+   * associated with this unique ID.
+   * <p/>
+   * <p>
+   * This call also required a query runner. Query runner is responsible for executing the evaluation query against Lens
+   * server.
+   * </p>
+   *
+   * @param sessionHandle the session handle
+   * @param table         the table
+   * @param algorithm     the algorithm
+   * @param modelID       the model id
+   * @param queryRunner   the query runner
+   * @param outputTable   table where test output will be written
+   * @return the ML test report
+   * @throws LensException the lens exception
+   */
+  public MLTestReport testModel(final LensSessionHandle sessionHandle, String table, String algorithm, String modelID,
+    QueryRunner queryRunner, String outputTable) throws LensException {
+    if (sessionHandle == null) {
+      throw new NullPointerException("Null session not allowed");
+    }
+    // check if algorithm exists
+    if (!getAlgorithms().contains(algorithm)) {
+      throw new LensException("No such algorithm " + algorithm);
+    }
+
+    MLModel<?> model;
+    try {
+      model = ModelLoader.loadModel(conf, algorithm, modelID);
+    } catch (IOException e) {
+      throw new LensException(e);
+    }
+
+    if (model == null) {
+      throw new LensException("Model not found: " + modelID + " algorithm=" + algorithm);
+    }
+
+    String database = null;
+
+    if (SessionState.get() != null) {
+      database = SessionState.get().getCurrentDatabase();
+    }
+
+    String testID = UUID.randomUUID().toString().replace("-", "_");
+    final String testTable = outputTable;
+    final String testResultColumn = "prediction_result";
+
+    // TODO support error metric UDAFs
+    TableTestingSpec spec = TableTestingSpec.newBuilder().hiveConf(conf)
+      .database(database == null ? "default" : database).inputTable(table).featureColumns(model.getFeatureColumns())
+      .outputColumn(testResultColumn).lableColumn(model.getLabelColumn()).algorithm(algorithm).modelID(modelID)
+      .outputTable(testTable).testID(testID).build();
+
+    String testQuery = spec.getTestQuery();
+    if (testQuery == null) {
+      throw new LensException("Invalid test spec. " + "table=" + table + " algorithm=" + algorithm + " modelID="
+        + modelID);
+    }
+
+    if (!spec.isOutputTableExists()) {
+      LOG.info("Output table '" + testTable + "' does not exist for test algorithm = " + algorithm + " modelid="
+        + modelID + ", Creating table using query: " + spec.getCreateOutputTableQuery());
+      // create the output table
+      String createOutputTableQuery = spec.getCreateOutputTableQuery();
+      queryRunner.runQuery(createOutputTableQuery);
+      LOG.info("Table created " + testTable);
+    }
+
+    // Check if ML UDF is registered in this session
+    registerPredictUdf(sessionHandle, queryRunner);
+
+    LOG.info("Running evaluation query " + testQuery);
+    queryRunner.setQueryName("model_test_" + modelID);
+    QueryHandle testQueryHandle = queryRunner.runQuery(testQuery);
+
+    MLTestReport testReport = new MLTestReport();
+    testReport.setReportID(testID);
+    testReport.setAlgorithm(algorithm);
+    testReport.setFeatureColumns(model.getFeatureColumns());
+    testReport.setLabelColumn(model.getLabelColumn());
+    testReport.setModelID(model.getId());
+    testReport.setOutputColumn(testResultColumn);
+    testReport.setOutputTable(testTable);
+    testReport.setTestTable(table);
+    testReport.setQueryID(testQueryHandle.toString());
+
+    // Save test report
+    persistTestReport(testReport);
+    LOG.info("Saved test report " + testReport.getReportID());
+    return testReport;
+  }
+
+  /**
+   * Persist test report.
+   *
+   * @param testReport the test report
+   * @throws LensException the lens exception
+   */
+  private void persistTestReport(MLTestReport testReport) throws LensException {
+    LOG.info("saving test report " + testReport.getReportID());
+    try {
+      ModelLoader.saveTestReport(conf, testReport);
+      LOG.info("Saved report " + testReport.getReportID());
+    } catch (IOException e) {
+      LOG.error("Error saving report " + testReport.getReportID() + " reason: " + e.getMessage());
+    }
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#getTestReports(java.lang.String)
+   */
+  public List<String> getTestReports(String algorithm) throws LensException {
+    Path reportBaseDir = new Path(conf.get(ModelLoader.TEST_REPORT_BASE_DIR, ModelLoader.TEST_REPORT_BASE_DIR_DEFAULT));
+    FileSystem fs = null;
+
+    try {
+      fs = reportBaseDir.getFileSystem(conf);
+      if (!fs.exists(reportBaseDir)) {
+        return null;
+      }
+
+      Path algoDir = new Path(reportBaseDir, algorithm);
+      if (!fs.exists(algoDir)) {
+        return null;
+      }
+
+      List<String> reports = new ArrayList<String>();
+      for (FileStatus stat : fs.listStatus(algoDir)) {
+        reports.add(stat.getPath().getName());
+      }
+      return reports;
+    } catch (IOException e) {
+      LOG.error("Error reading report list for " + algorithm, e);
+      return null;
+    }
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#getTestReport(java.lang.String, java.lang.String)
+   */
+  public MLTestReport getTestReport(String algorithm, String reportID) throws LensException {
+    try {
+      return ModelLoader.loadReport(conf, algorithm, reportID);
+    } catch (IOException e) {
+      throw new LensException(e);
+    }
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#predict(java.lang.String, java.lang.String, java.lang.Object[])
+   */
+  public Object predict(String algorithm, String modelID, Object[] features) throws LensException {
+    // Load the model instance
+    MLModel<?> model = getModel(algorithm, modelID);
+    return model.predict(features);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#deleteModel(java.lang.String, java.lang.String)
+   */
+  public void deleteModel(String algorithm, String modelID) throws LensException {
+    try {
+      ModelLoader.deleteModel(conf, algorithm, modelID);
+      LOG.info("DELETED model " + modelID + " algorithm=" + algorithm);
+    } catch (IOException e) {
+      LOG.error(
+        "Error deleting model file. algorithm=" + algorithm + " model=" + modelID + " reason: " + e.getMessage(), e);
+      throw new LensException("Unable to delete model " + modelID + " for algorithm " + algorithm, e);
+    }
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#deleteTestReport(java.lang.String, java.lang.String)
+   */
+  public void deleteTestReport(String algorithm, String reportID) throws LensException {
+    try {
+      ModelLoader.deleteTestReport(conf, algorithm, reportID);
+      LOG.info("DELETED report=" + reportID + " algorithm=" + algorithm);
+    } catch (IOException e) {
+      LOG.error("Error deleting report " + reportID + " algorithm=" + algorithm + " reason: " + e.getMessage(), e);
+      throw new LensException("Unable to delete report " + reportID + " for algorithm " + algorithm, e);
+    }
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#getAlgoParamDescription(java.lang.String)
+   */
+  public Map<String, String> getAlgoParamDescription(String algorithm) {
+    MLAlgo algo = null;
+    try {
+      algo = getAlgoForName(algorithm);
+    } catch (LensException e) {
+      LOG.error("Error getting algo description : " + algorithm, e);
+      return null;
+    }
+    if (algo instanceof BaseSparkAlgo) {
+      return ((BaseSparkAlgo) algo).getArgUsage();
+    }
+    return null;
+  }
+
+  /**
+   * Submit model test query to a remote Lens server.
+   */
+  class RemoteQueryRunner extends QueryRunner {
+
+    /** The query api url. */
+    final String queryApiUrl;
+
+    /**
+     * Instantiates a new remote query runner.
+     *
+     * @param sessionHandle the session handle
+     * @param queryApiUrl   the query api url
+     */
+    public RemoteQueryRunner(LensSessionHandle sessionHandle, String queryApiUrl) {
+      super(sessionHandle);
+      this.queryApiUrl = queryApiUrl;
+    }
+
+    /*
+     * (non-Javadoc)
+     *
+     * @see org.apache.lens.ml.TestQueryRunner#runQuery(java.lang.String)
+     */
+    @Override
+    public QueryHandle runQuery(String query) throws LensException {
+      // Create jersey client for query endpoint
+      Client client = ClientBuilder.newBuilder().register(MultiPartFeature.class).build();
+      WebTarget target = client.target(queryApiUrl);
+      final FormDataMultiPart mp = new FormDataMultiPart();
+      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), sessionHandle,
+        MediaType.APPLICATION_XML_TYPE));
+      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), query));
+      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
+
+      LensConf lensConf = new LensConf();
+      lensConf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, false + "");
+      lensConf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false + "");
+      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), lensConf,
+        MediaType.APPLICATION_XML_TYPE));
+
+      final QueryHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+        QueryHandle.class);
+
+      LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", sessionHandle).request()
+        .get(LensQuery.class);
+
+      QueryStatus stat = ctx.getStatus();
+      while (!stat.isFinished()) {
+        ctx = target.path(handle.toString()).queryParam("sessionid", sessionHandle).request().get(LensQuery.class);
+        stat = ctx.getStatus();
+        try {
+          Thread.sleep(500);
+        } catch (InterruptedException e) {
+          throw new LensException(e);
+        }
+      }
+
+      if (stat.getStatus() != QueryStatus.Status.SUCCESSFUL) {
+        throw new LensException("Query failed " + ctx.getQueryHandle().getHandleId() + " reason:"
+          + stat.getErrorMessage());
+      }
+
+      return ctx.getQueryHandle();
+    }
+  }
+
+  /**
+   * To lens conf.
+   *
+   * @param conf the conf
+   * @return the lens conf
+   */
+  private LensConf toLensConf(HiveConf conf) {
+    LensConf lensConf = new LensConf();
+    lensConf.getProperties().putAll(conf.getValByRegex(".*"));
+    return lensConf;
+  }
+
+  protected void registerPredictUdf(LensSessionHandle sessionHandle, QueryRunner queryRunner) throws LensException {
+    if (isUdfRegisterd(sessionHandle)) {
+      // Already registered, nothing to do
+      return;
+    }
+
+    LOG.info("Registering UDF for session " + sessionHandle.getPublicId().toString());
+    // We have to add UDF jars to the session
+    try {
+      SessionService sessionService = (SessionService) MLUtils.getServiceProvider().getService(SessionService.NAME);
+      String[] udfJars = conf.getStrings("lens.server.ml.predict.udf.jars");
+      if (udfJars != null) {
+        for (String jar : udfJars) {
+          sessionService.addResource(sessionHandle, "jar", jar);
+          LOG.info(jar + " added UDF session " + sessionHandle.getPublicId().toString());
+        }
+      }
+    } catch (Exception e) {
+      throw new LensException(e);
+    }
+
+    String regUdfQuery = "CREATE TEMPORARY FUNCTION " + HiveMLUDF.UDF_NAME + " AS '" + HiveMLUDF.class
+      .getCanonicalName() + "'";
+    queryRunner.setQueryName("register_predict_udf_" + sessionHandle.getPublicId().toString());
+    QueryHandle udfQuery = queryRunner.runQuery(regUdfQuery);
+    predictUdfStatus.put(sessionHandle, true);
+    LOG.info("Predict UDF registered for session " + sessionHandle.getPublicId().toString());
+  }
+
+  protected boolean isUdfRegisterd(LensSessionHandle sessionHandle) {
+    return predictUdfStatus.containsKey(sessionHandle);
+  }
+
+  /**
+   * Periodically check if sessions have been closed, and clear UDF registered status.
+   */
+  private class UDFStatusExpiryRunnable implements Runnable {
+    public void run() {
+      try {
+        SessionService sessionService = (SessionService) MLUtils.getServiceProvider().getService(SessionService.NAME);
+        // Clear status of sessions which are closed.
+        List<LensSessionHandle> sessions = new ArrayList<LensSessionHandle>(predictUdfStatus.keySet());
+        for (LensSessionHandle sessionHandle : sessions) {
+          if (!sessionService.isOpen(sessionHandle)) {
+            LOG.info("Session closed, removing UDF status: " + sessionHandle);
+            predictUdfStatus.remove(sessionHandle);
+          }
+        }
+      } catch (Exception exc) {
+        LOG.warn("Error clearing UDF statuses", exc);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/MLRunner.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/MLRunner.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/MLRunner.java
new file mode 100644
index 0000000..625d020
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/MLRunner.java
@@ -0,0 +1,172 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.impl;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.lens.client.LensClient;
+import org.apache.lens.client.LensClientConfig;
+import org.apache.lens.client.LensMLClient;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.mapred.TextInputFormat;
+
+public class MLRunner {
+
+  private static final Log LOG = LogFactory.getLog(MLRunner.class);
+
+  private LensMLClient mlClient;
+  private String algoName;
+  private String database;
+  private String trainTable;
+  private String trainFile;
+  private String testTable;
+  private String testFile;
+  private String outputTable;
+  private String[] features;
+  private String labelColumn;
+  private HiveConf conf;
+
+  public void init(LensMLClient mlClient, String confDir) throws Exception {
+    File dir = new File(confDir);
+    File propFile = new File(dir, "ml.properties");
+    Properties props = new Properties();
+    props.load(new FileInputStream(propFile));
+    String feat = props.getProperty("features");
+    String trainFile = confDir + File.separator + "train.data";
+    String testFile = confDir + File.separator + "test.data";
+    init(mlClient, props.getProperty("algo"), props.getProperty("database"),
+        props.getProperty("traintable"), trainFile,
+        props.getProperty("testtable"), testFile,
+        props.getProperty("outputtable"), feat.split(","),
+        props.getProperty("labelcolumn"));
+  }
+
+  public void init(LensMLClient mlClient, String algoName,
+      String database, String trainTable, String trainFile,
+      String testTable, String testFile, String outputTable, String[] features,
+      String labelColumn) {
+    this.mlClient = mlClient;
+    this.algoName = algoName;
+    this.database = database;
+    this.trainTable = trainTable;
+    this.trainFile = trainFile;
+    this.testTable = testTable;
+    this.testFile = testFile;
+    this.outputTable = outputTable;
+    this.features = features;
+    this.labelColumn = labelColumn;
+    //hive metastore settings are loaded via lens-site.xml, so loading LensClientConfig
+    //is required
+    this.conf = new HiveConf(new LensClientConfig(), MLRunner.class);
+  }
+
+  public MLTask train() throws Exception {
+    LOG.info("Starting train & eval");
+
+    createTable(trainTable, trainFile);
+    createTable(testTable, testFile);
+    MLTask.Builder taskBuilder = new MLTask.Builder();
+    taskBuilder.algorithm(algoName).hiveConf(conf).labelColumn(labelColumn).outputTable(outputTable)
+        .client(mlClient).trainingTable(trainTable).testTable(testTable);
+
+    // Add features
+    for (String feature : features) {
+      taskBuilder.addFeatureColumn(feature);
+    }
+    MLTask task = taskBuilder.build();
+    LOG.info("Created task " + task.toString());
+    task.run();
+    return task;
+  }
+
+  public void createTable(String tableName, String dataFile) throws HiveException {
+
+    File filedataFile = new File(dataFile);
+    Path dataFilePath = new Path(filedataFile.toURI());
+    Path partDir = dataFilePath.getParent();
+
+    // Create table
+    List<FieldSchema> columns = new ArrayList<FieldSchema>();
+
+    // Label is optional. Not used for unsupervised models.
+    // If present, label will be the first column, followed by features
+    if (labelColumn != null) {
+      columns.add(new FieldSchema(labelColumn, "double", "Labelled Column"));
+    }
+
+    for (String feature : features) {
+      columns.add(new FieldSchema(feature, "double", "Feature " + feature));
+    }
+
+    Table tbl = Hive.get(conf).newTable(database + "." + tableName);
+    tbl.setTableType(TableType.MANAGED_TABLE);
+    tbl.getTTable().getSd().setCols(columns);
+    // tbl.getTTable().getParameters().putAll(new HashMap<String, String>());
+    tbl.setInputFormatClass(TextInputFormat.class);
+    tbl.setSerdeParam(serdeConstants.LINE_DELIM, "\n");
+    tbl.setSerdeParam(serdeConstants.FIELD_DELIM, " ");
+
+    List<FieldSchema> partCols = new ArrayList<FieldSchema>(1);
+    partCols.add(new FieldSchema("dummy_partition_col", "string", ""));
+    tbl.setPartCols(partCols);
+
+    Hive.get(conf).dropTable(database, tableName, false, true);
+    Hive.get(conf).createTable(tbl, true);
+    LOG.info("Created table " + tableName);
+
+    // Add partition for the data file
+    AddPartitionDesc partitionDesc = new AddPartitionDesc(database, tableName,
+        false);
+    Map<String, String> partSpec = new HashMap<String, String>();
+    partSpec.put("dummy_partition_col", "dummy_val");
+    partitionDesc.addPartition(partSpec, partDir.toUri().toString());
+    Hive.get(conf).createPartitions(partitionDesc);
+    LOG.info(tableName + ": Added partition " + partDir.toUri().toString());
+  }
+
+  public static void main(String[] args) throws Exception {
+    if (args.length < 1) {
+      System.out.println("Usage: " + MLRunner.class.getName() + " <ml-conf-dir>");
+      System.exit(-1);
+    }
+    String confDir = args[0];
+    LensMLClient client = new LensMLClient(new LensClient());
+    MLRunner runner = new MLRunner();
+    runner.init(client, confDir);
+    runner.train();
+    System.out.println("Created the Model successfully. Output Table: " + runner.outputTable);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/MLTask.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/MLTask.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/MLTask.java
new file mode 100644
index 0000000..2867b90
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/MLTask.java
@@ -0,0 +1,285 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.impl;
+
+import java.util.*;
+
+import org.apache.lens.client.LensMLClient;
+import org.apache.lens.ml.api.LensML;
+import org.apache.lens.ml.api.MLTestReport;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+import lombok.Getter;
+import lombok.ToString;
+
+/**
+ * Run a complete cycle of train and test (evaluation) for an ML algorithm
+ */
+@ToString
+public class MLTask implements Runnable {
+  private static final Log LOG = LogFactory.getLog(MLTask.class);
+
+  public enum State {
+    RUNNING, SUCCESSFUL, FAILED
+  }
+
+  @Getter
+  private State taskState;
+
+  /**
+   * Name of the algo/algorithm.
+   */
+  @Getter
+  private String algorithm;
+
+  /**
+   * Name of the table containing training data.
+   */
+  @Getter
+  private String trainingTable;
+
+  /**
+   * Name of the table containing test data. Optional, if not provided trainingTable itself is
+   * used for testing
+   */
+  @Getter
+  private String testTable;
+
+  /**
+   * Training table partition spec
+   */
+  @Getter
+  private String partitionSpec;
+
+  /**
+   * Name of the column which is a label for supervised algorithms.
+   */
+  @Getter
+  private String labelColumn;
+
+  /**
+   * Names of columns which are features in the training data.
+   */
+  @Getter
+  private List<String> featureColumns;
+
+  /**
+   * Configuration for the example.
+   */
+  @Getter
+  private HiveConf configuration;
+
+  private LensML ml;
+  private String taskID;
+
+  /**
+   * ml client
+   */
+  @Getter
+  private LensMLClient mlClient;
+
+  /**
+   * Output table name
+   */
+  @Getter
+  private String outputTable;
+
+  /**
+   * Extra params passed to the training algorithm
+   */
+  @Getter
+  private Map<String, String> extraParams;
+
+  @Getter
+  private String modelID;
+
+  @Getter
+  private String reportID;
+
+  /**
+   * Use ExampleTask.Builder to create an instance
+   */
+  private MLTask() {
+    // Use builder to construct the example
+    extraParams = new HashMap<String, String>();
+    taskID = UUID.randomUUID().toString();
+  }
+
+  /**
+   * Builder to create an example task
+   */
+  public static class Builder {
+    private MLTask task;
+
+    public Builder() {
+      task = new MLTask();
+    }
+
+    public Builder trainingTable(String trainingTable) {
+      task.trainingTable = trainingTable;
+      return this;
+    }
+
+    public Builder testTable(String testTable) {
+      task.testTable = testTable;
+      return this;
+    }
+
+    public Builder algorithm(String algorithm) {
+      task.algorithm = algorithm;
+      return this;
+    }
+
+    public Builder labelColumn(String labelColumn) {
+      task.labelColumn = labelColumn;
+      return this;
+    }
+
+    public Builder client(LensMLClient client) {
+      task.mlClient = client;
+      return this;
+    }
+
+    public Builder addFeatureColumn(String featureColumn) {
+      if (task.featureColumns == null) {
+        task.featureColumns = new ArrayList<String>();
+      }
+      task.featureColumns.add(featureColumn);
+      return this;
+    }
+
+    public Builder hiveConf(HiveConf hiveConf) {
+      task.configuration = hiveConf;
+      return this;
+    }
+
+
+
+    public Builder extraParam(String param, String value) {
+      task.extraParams.put(param, value);
+      return this;
+    }
+
+    public Builder partitionSpec(String partitionSpec) {
+      task.partitionSpec = partitionSpec;
+      return this;
+    }
+
+    public Builder outputTable(String outputTable) {
+      task.outputTable = outputTable;
+      return this;
+    }
+
+    public MLTask build() {
+      MLTask builtTask = task;
+      task = null;
+      return builtTask;
+    }
+
+  }
+
+  @Override
+  public void run() {
+    taskState = State.RUNNING;
+    LOG.info("Starting " + taskID);
+    try {
+      runTask();
+      taskState = State.SUCCESSFUL;
+      LOG.info("Complete " + taskID);
+    } catch (Exception e) {
+      taskState = State.FAILED;
+      LOG.info("Error running task " + taskID, e);
+    }
+  }
+
+  /**
+   * Train an ML model, with specified algorithm and input data. Do model evaluation using the evaluation data and print
+   * evaluation result
+   *
+   * @throws Exception
+   */
+  private void runTask() throws Exception {
+    if (mlClient != null) {
+      // Connect to a remote Lens server
+      ml = mlClient;
+      LOG.info("Working in client mode. Lens session handle " + mlClient.getSessionHandle().getPublicId());
+    } else {
+      // In server mode session handle has to be passed by the user as a request parameter
+      ml = MLUtils.getMLService();
+      LOG.info("Working in Lens server");
+    }
+
+    String[] algoArgs = buildTrainingArgs();
+    LOG.info("Starting task " + taskID + " algo args: " + Arrays.toString(algoArgs));
+
+    modelID = ml.train(trainingTable, algorithm, algoArgs);
+    printModelMetadata(taskID, modelID);
+
+    LOG.info("Starting test " + taskID);
+    testTable = (testTable != null) ? testTable : trainingTable;
+    MLTestReport testReport = ml.testModel(mlClient.getSessionHandle(), testTable, algorithm, modelID, outputTable);
+    reportID = testReport.getReportID();
+    printTestReport(taskID, testReport);
+    saveTask();
+  }
+
+  // Save task metadata to DB
+  private void saveTask() {
+    LOG.info("Saving task details to DB");
+  }
+
+  private void printTestReport(String exampleID, MLTestReport testReport) {
+    StringBuilder builder = new StringBuilder("Example: ").append(exampleID);
+    builder.append("\n\t");
+    builder.append("EvaluationReport: ").append(testReport.toString());
+    System.out.println(builder.toString());
+  }
+
+  private String[] buildTrainingArgs() {
+    List<String> argList = new ArrayList<String>();
+    argList.add("label");
+    argList.add(labelColumn);
+
+    // Add all the features
+    for (String featureCol : featureColumns) {
+      argList.add("feature");
+      argList.add(featureCol);
+    }
+
+    // Add extra params
+    for (String param : extraParams.keySet()) {
+      argList.add(param);
+      argList.add(extraParams.get(param));
+    }
+
+    return argList.toArray(new String[argList.size()]);
+  }
+
+  // Get the model instance and print its metadat to stdout
+  private void printModelMetadata(String exampleID, String modelID) throws Exception {
+    StringBuilder builder = new StringBuilder("Example: ").append(exampleID);
+    builder.append("\n\t");
+    builder.append("Model: ");
+    builder.append(ml.getModel(algorithm, modelID).toString());
+    System.out.println(builder.toString());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/MLUtils.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/MLUtils.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/MLUtils.java
new file mode 100644
index 0000000..9c96d9b
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/MLUtils.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.impl;
+
+import org.apache.lens.ml.algo.api.Algorithm;
+import org.apache.lens.ml.algo.api.MLAlgo;
+import org.apache.lens.ml.server.MLService;
+import org.apache.lens.ml.server.MLServiceImpl;
+import org.apache.lens.server.api.LensConfConstants;
+import org.apache.lens.server.api.ServiceProvider;
+import org.apache.lens.server.api.ServiceProviderFactory;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+
+public final class MLUtils {
+  private MLUtils() {
+  }
+
+  private static final HiveConf HIVE_CONF;
+
+  static {
+    HIVE_CONF = new HiveConf();
+    // Add default config so that we know the service provider implementation
+    HIVE_CONF.addResource("lensserver-default.xml");
+    HIVE_CONF.addResource("lens-site.xml");
+  }
+
+  public static String getAlgoName(Class<? extends MLAlgo> algoClass) {
+    Algorithm annotation = algoClass.getAnnotation(Algorithm.class);
+    if (annotation != null) {
+      return annotation.name();
+    }
+    throw new IllegalArgumentException("Algo should be decorated with annotation - " + Algorithm.class.getName());
+  }
+
+  public static MLServiceImpl getMLService() throws Exception {
+    return getServiceProvider().getService(MLService.NAME);
+  }
+
+  public static ServiceProvider getServiceProvider() throws Exception {
+    Class<? extends ServiceProviderFactory> spfClass = HIVE_CONF.getClass(LensConfConstants.SERVICE_PROVIDER_FACTORY,
+      null, ServiceProviderFactory.class);
+    ServiceProviderFactory spf = spfClass.newInstance();
+    return spf.getServiceProvider();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/ModelLoader.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/ModelLoader.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/ModelLoader.java
new file mode 100644
index 0000000..c0e7953
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/ModelLoader.java
@@ -0,0 +1,242 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.impl;
+
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.lens.ml.algo.api.MLModel;
+import org.apache.lens.ml.api.MLTestReport;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+
+/**
+ * Load ML models from a FS location.
+ */
+public final class ModelLoader {
+  private ModelLoader() {
+  }
+
+  /** The Constant MODEL_PATH_BASE_DIR. */
+  public static final String MODEL_PATH_BASE_DIR = "lens.ml.model.basedir";
+
+  /** The Constant MODEL_PATH_BASE_DIR_DEFAULT. */
+  public static final String MODEL_PATH_BASE_DIR_DEFAULT = "file:///tmp";
+
+  /** The Constant LOG. */
+  public static final Log LOG = LogFactory.getLog(ModelLoader.class);
+
+  /** The Constant TEST_REPORT_BASE_DIR. */
+  public static final String TEST_REPORT_BASE_DIR = "lens.ml.test.basedir";
+
+  /** The Constant TEST_REPORT_BASE_DIR_DEFAULT. */
+  public static final String TEST_REPORT_BASE_DIR_DEFAULT = "file:///tmp/ml_reports";
+
+  // Model cache settings
+  /** The Constant MODEL_CACHE_SIZE. */
+  public static final long MODEL_CACHE_SIZE = 10;
+
+  /** The Constant MODEL_CACHE_TIMEOUT. */
+  public static final long MODEL_CACHE_TIMEOUT = 3600000L; // one hour
+
+  /** The model cache. */
+  private static Cache<Path, MLModel> modelCache = CacheBuilder.newBuilder().maximumSize(MODEL_CACHE_SIZE)
+    .expireAfterAccess(MODEL_CACHE_TIMEOUT, TimeUnit.MILLISECONDS).build();
+
+  /**
+   * Gets the model location.
+   *
+   * @param conf      the conf
+   * @param algorithm the algorithm
+   * @param modelID   the model id
+   * @return the model location
+   */
+  public static Path getModelLocation(Configuration conf, String algorithm, String modelID) {
+    String modelDataBaseDir = conf.get(MODEL_PATH_BASE_DIR, MODEL_PATH_BASE_DIR_DEFAULT);
+    // Model location format - <modelDataBaseDir>/<algorithm>/modelID
+    return new Path(new Path(new Path(modelDataBaseDir), algorithm), modelID);
+  }
+
+  /**
+   * Load model.
+   *
+   * @param conf      the conf
+   * @param algorithm the algorithm
+   * @param modelID   the model id
+   * @return the ML model
+   * @throws IOException Signals that an I/O exception has occurred.
+   */
+  public static MLModel loadModel(Configuration conf, String algorithm, String modelID) throws IOException {
+    final Path modelPath = getModelLocation(conf, algorithm, modelID);
+    LOG.info("Loading model for algorithm: " + algorithm + " modelID: " + modelID + " At path: "
+      + modelPath.toUri().toString());
+    try {
+      return modelCache.get(modelPath, new Callable<MLModel>() {
+        @Override
+        public MLModel call() throws Exception {
+          FileSystem fs = modelPath.getFileSystem(new HiveConf());
+          if (!fs.exists(modelPath)) {
+            throw new IOException("Model path not found " + modelPath.toString());
+          }
+
+          ObjectInputStream ois = null;
+          try {
+            ois = new ObjectInputStream(fs.open(modelPath));
+            MLModel model = (MLModel) ois.readObject();
+            LOG.info("Loaded model " + model.getId() + " from location " + modelPath);
+            return model;
+          } catch (ClassNotFoundException e) {
+            throw new IOException(e);
+          } finally {
+            IOUtils.closeQuietly(ois);
+          }
+        }
+      });
+    } catch (ExecutionException exc) {
+      throw new IOException(exc);
+    }
+  }
+
+  /**
+   * Clear cache.
+   */
+  public static void clearCache() {
+    modelCache.cleanUp();
+  }
+
+  /**
+   * Gets the test report path.
+   *
+   * @param conf      the conf
+   * @param algorithm the algorithm
+   * @param report    the report
+   * @return the test report path
+   */
+  public static Path getTestReportPath(Configuration conf, String algorithm, String report) {
+    String testReportDir = conf.get(TEST_REPORT_BASE_DIR, TEST_REPORT_BASE_DIR_DEFAULT);
+    return new Path(new Path(testReportDir, algorithm), report);
+  }
+
+  /**
+   * Save test report.
+   *
+   * @param conf   the conf
+   * @param report the report
+   * @throws IOException Signals that an I/O exception has occurred.
+   */
+  public static void saveTestReport(Configuration conf, MLTestReport report) throws IOException {
+    Path reportDir = new Path(conf.get(TEST_REPORT_BASE_DIR, TEST_REPORT_BASE_DIR_DEFAULT));
+    FileSystem fs = reportDir.getFileSystem(conf);
+
+    if (!fs.exists(reportDir)) {
+      LOG.info("Creating test report dir " + reportDir.toUri().toString());
+      fs.mkdirs(reportDir);
+    }
+
+    Path algoDir = new Path(reportDir, report.getAlgorithm());
+
+    if (!fs.exists(algoDir)) {
+      LOG.info("Creating algorithm report dir " + algoDir.toUri().toString());
+      fs.mkdirs(algoDir);
+    }
+
+    ObjectOutputStream reportOutputStream = null;
+    Path reportSaveLocation;
+    try {
+      reportSaveLocation = new Path(algoDir, report.getReportID());
+      reportOutputStream = new ObjectOutputStream(fs.create(reportSaveLocation));
+      reportOutputStream.writeObject(report);
+      reportOutputStream.flush();
+    } catch (IOException ioexc) {
+      LOG.error("Error saving test report " + report.getReportID(), ioexc);
+      throw ioexc;
+    } finally {
+      IOUtils.closeQuietly(reportOutputStream);
+    }
+    LOG.info("Saved report " + report.getReportID() + " at location " + reportSaveLocation.toUri());
+  }
+
+  /**
+   * Load report.
+   *
+   * @param conf      the conf
+   * @param algorithm the algorithm
+   * @param reportID  the report id
+   * @return the ML test report
+   * @throws IOException Signals that an I/O exception has occurred.
+   */
+  public static MLTestReport loadReport(Configuration conf, String algorithm, String reportID) throws IOException {
+    Path reportLocation = getTestReportPath(conf, algorithm, reportID);
+    FileSystem fs = reportLocation.getFileSystem(conf);
+    ObjectInputStream reportStream = null;
+    MLTestReport report = null;
+
+    try {
+      reportStream = new ObjectInputStream(fs.open(reportLocation));
+      report = (MLTestReport) reportStream.readObject();
+    } catch (IOException ioex) {
+      LOG.error("Error reading report " + reportLocation, ioex);
+    } catch (ClassNotFoundException e) {
+      throw new IOException(e);
+    } finally {
+      IOUtils.closeQuietly(reportStream);
+    }
+    return report;
+  }
+
+  /**
+   * Delete model.
+   *
+   * @param conf      the conf
+   * @param algorithm the algorithm
+   * @param modelID   the model id
+   * @throws IOException Signals that an I/O exception has occurred.
+   */
+  public static void deleteModel(HiveConf conf, String algorithm, String modelID) throws IOException {
+    Path modelLocation = getModelLocation(conf, algorithm, modelID);
+    FileSystem fs = modelLocation.getFileSystem(conf);
+    fs.delete(modelLocation, false);
+  }
+
+  /**
+   * Delete test report.
+   *
+   * @param conf      the conf
+   * @param algorithm the algorithm
+   * @param reportID  the report id
+   * @throws IOException Signals that an I/O exception has occurred.
+   */
+  public static void deleteTestReport(HiveConf conf, String algorithm, String reportID) throws IOException {
+    Path reportPath = getTestReportPath(conf, algorithm, reportID);
+    reportPath.getFileSystem(conf).delete(reportPath, false);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/QueryRunner.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/QueryRunner.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/QueryRunner.java
new file mode 100644
index 0000000..2f2e017
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/QueryRunner.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.impl;
+
+import org.apache.lens.api.LensException;
+import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.api.query.QueryHandle;
+
+import lombok.Getter;
+import lombok.Setter;
+
+/**
+ * Run a query against a Lens server.
+ */
+public abstract class QueryRunner {
+
+  /** The session handle. */
+  protected final LensSessionHandle sessionHandle;
+
+  @Getter @Setter
+  protected String queryName;
+
+  /**
+   * Instantiates a new query runner.
+   *
+   * @param sessionHandle the session handle
+   */
+  public QueryRunner(LensSessionHandle sessionHandle) {
+    this.sessionHandle = sessionHandle;
+  }
+
+  /**
+   * Run query.
+   *
+   * @param query the query
+   * @return the query handle
+   * @throws LensException the lens exception
+   */
+  public abstract QueryHandle runQuery(String query) throws LensException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/TableTestingSpec.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/TableTestingSpec.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/TableTestingSpec.java
new file mode 100644
index 0000000..34b2a3f
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/TableTestingSpec.java
@@ -0,0 +1,325 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.impl;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+import lombok.Getter;
+
+/**
+ * Table specification for running test on a table.
+ */
+public class TableTestingSpec {
+
+  /** The Constant LOG. */
+  public static final Log LOG = LogFactory.getLog(TableTestingSpec.class);
+
+  /** The db. */
+  private String db;
+
+  /** The table containing input data. */
+  private String inputTable;
+
+  // TODO use partition condition
+  /** The partition filter. */
+  private String partitionFilter;
+
+  /** The feature columns. */
+  private List<String> featureColumns;
+
+  /** The label column. */
+  private String labelColumn;
+
+  /** The output column. */
+  private String outputColumn;
+
+  /** The output table. */
+  private String outputTable;
+
+  /** The conf. */
+  private transient HiveConf conf;
+
+  /** The algorithm. */
+  private String algorithm;
+
+  /** The model id. */
+  private String modelID;
+
+  @Getter
+  private boolean outputTableExists;
+
+  @Getter
+  private String testID;
+
+  private HashMap<String, FieldSchema> columnNameToFieldSchema;
+
+  /**
+   * The Class TableTestingSpecBuilder.
+   */
+  public static class TableTestingSpecBuilder {
+
+    /** The spec. */
+    private final TableTestingSpec spec;
+
+    /**
+     * Instantiates a new table testing spec builder.
+     */
+    public TableTestingSpecBuilder() {
+      spec = new TableTestingSpec();
+    }
+
+    /**
+     * Database.
+     *
+     * @param database the database
+     * @return the table testing spec builder
+     */
+    public TableTestingSpecBuilder database(String database) {
+      spec.db = database;
+      return this;
+    }
+
+    /**
+     * Set the input table
+     *
+     * @param table the table
+     * @return the table testing spec builder
+     */
+    public TableTestingSpecBuilder inputTable(String table) {
+      spec.inputTable = table;
+      return this;
+    }
+
+    /**
+     * Partition filter for input table
+     *
+     * @param partFilter the part filter
+     * @return the table testing spec builder
+     */
+    public TableTestingSpecBuilder partitionFilter(String partFilter) {
+      spec.partitionFilter = partFilter;
+      return this;
+    }
+
+    /**
+     * Feature columns.
+     *
+     * @param featureColumns the feature columns
+     * @return the table testing spec builder
+     */
+    public TableTestingSpecBuilder featureColumns(List<String> featureColumns) {
+      spec.featureColumns = featureColumns;
+      return this;
+    }
+
+    /**
+     * Labe column.
+     *
+     * @param labelColumn the label column
+     * @return the table testing spec builder
+     */
+    public TableTestingSpecBuilder lableColumn(String labelColumn) {
+      spec.labelColumn = labelColumn;
+      return this;
+    }
+
+    /**
+     * Output column.
+     *
+     * @param outputColumn the output column
+     * @return the table testing spec builder
+     */
+    public TableTestingSpecBuilder outputColumn(String outputColumn) {
+      spec.outputColumn = outputColumn;
+      return this;
+    }
+
+    /**
+     * Output table.
+     *
+     * @param table the table
+     * @return the table testing spec builder
+     */
+    public TableTestingSpecBuilder outputTable(String table) {
+      spec.outputTable = table;
+      return this;
+    }
+
+    /**
+     * Hive conf.
+     *
+     * @param conf the conf
+     * @return the table testing spec builder
+     */
+    public TableTestingSpecBuilder hiveConf(HiveConf conf) {
+      spec.conf = conf;
+      return this;
+    }
+
+    /**
+     * Algorithm.
+     *
+     * @param algorithm the algorithm
+     * @return the table testing spec builder
+     */
+    public TableTestingSpecBuilder algorithm(String algorithm) {
+      spec.algorithm = algorithm;
+      return this;
+    }
+
+    /**
+     * Model id.
+     *
+     * @param modelID the model id
+     * @return the table testing spec builder
+     */
+    public TableTestingSpecBuilder modelID(String modelID) {
+      spec.modelID = modelID;
+      return this;
+    }
+
+    /**
+     * Builds the.
+     *
+     * @return the table testing spec
+     */
+    public TableTestingSpec build() {
+      return spec;
+    }
+
+    /**
+     * Set the unique test id
+     *
+     * @param testID
+     * @return
+     */
+    public TableTestingSpecBuilder testID(String testID) {
+      spec.testID = testID;
+      return this;
+    }
+  }
+
+  /**
+   * New builder.
+   *
+   * @return the table testing spec builder
+   */
+  public static TableTestingSpecBuilder newBuilder() {
+    return new TableTestingSpecBuilder();
+  }
+
+  /**
+   * Validate.
+   *
+   * @return true, if successful
+   */
+  public boolean validate() {
+    List<FieldSchema> columns;
+    try {
+      Hive metastoreClient = Hive.get(conf);
+      Table tbl = (db == null) ? metastoreClient.getTable(inputTable) : metastoreClient.getTable(db, inputTable);
+      columns = tbl.getAllCols();
+      columnNameToFieldSchema = new HashMap<String, FieldSchema>();
+
+      for (FieldSchema fieldSchema : columns) {
+        columnNameToFieldSchema.put(fieldSchema.getName(), fieldSchema);
+      }
+
+      // Check if output table exists
+      Table outTbl = metastoreClient.getTable(db == null ? "default" : db, outputTable, false);
+      outputTableExists = (outTbl != null);
+    } catch (HiveException exc) {
+      LOG.error("Error getting table info " + toString(), exc);
+      return false;
+    }
+
+    // Check if labeled column and feature columns are contained in the table
+    List<String> testTableColumns = new ArrayList<String>(columns.size());
+    for (FieldSchema column : columns) {
+      testTableColumns.add(column.getName());
+    }
+
+    if (!testTableColumns.containsAll(featureColumns)) {
+      LOG.info("Invalid feature columns: " + featureColumns + ". Actual columns in table:" + testTableColumns);
+      return false;
+    }
+
+    if (!testTableColumns.contains(labelColumn)) {
+      LOG.info("Invalid label column: " + labelColumn + ". Actual columns in table:" + testTableColumns);
+      return false;
+    }
+
+    if (StringUtils.isBlank(outputColumn)) {
+      LOG.info("Output column is required");
+      return false;
+    }
+
+    if (StringUtils.isBlank(outputTable)) {
+      LOG.info("Output table is required");
+      return false;
+    }
+    return true;
+  }
+
+  public String getTestQuery() {
+    if (!validate()) {
+      return null;
+    }
+
+    // We always insert a dynamic partition
+    StringBuilder q = new StringBuilder("INSERT OVERWRITE TABLE " + outputTable + " PARTITION (part_testid='" + testID
+      + "')  SELECT ");
+    String featureCols = StringUtils.join(featureColumns, ",");
+    q.append(featureCols).append(",").append(labelColumn).append(", ").append("predict(").append("'").append(algorithm)
+      .append("', ").append("'").append(modelID).append("', ").append(featureCols).append(") ").append(outputColumn)
+      .append(" FROM ").append(inputTable);
+
+    return q.toString();
+  }
+
+  public String getCreateOutputTableQuery() {
+    StringBuilder createTableQuery = new StringBuilder("CREATE TABLE IF NOT EXISTS ").append(outputTable).append("(");
+    // Output table contains feature columns, label column, output column
+    List<String> outputTableColumns = new ArrayList<String>();
+    for (String featureCol : featureColumns) {
+      outputTableColumns.add(featureCol + " " + columnNameToFieldSchema.get(featureCol).getType());
+    }
+
+    outputTableColumns.add(labelColumn + " " + columnNameToFieldSchema.get(labelColumn).getType());
+    outputTableColumns.add(outputColumn + " string");
+
+    createTableQuery.append(StringUtils.join(outputTableColumns, ", "));
+
+    // Append partition column
+    createTableQuery.append(") PARTITIONED BY (part_testid string)");
+
+    return createTableQuery.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLApp.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLApp.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLApp.java
new file mode 100644
index 0000000..e6e3c02
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLApp.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.server;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.ws.rs.ApplicationPath;
+import javax.ws.rs.core.Application;
+
+import org.glassfish.jersey.filter.LoggingFilter;
+import org.glassfish.jersey.media.multipart.MultiPartFeature;
+
+@ApplicationPath("/ml")
+public class MLApp extends Application {
+
+  private final Set<Class<?>> classes;
+
+  /**
+   * Pass additional classes when running in test mode
+   *
+   * @param additionalClasses
+   */
+  public MLApp(Class<?>... additionalClasses) {
+    classes = new HashSet<Class<?>>();
+
+    // register root resource
+    classes.add(MLServiceResource.class);
+    classes.add(MultiPartFeature.class);
+    classes.add(LoggingFilter.class);
+    for (Class<?> cls : additionalClasses) {
+      classes.add(cls);
+    }
+
+  }
+
+  /**
+   * Get classes for this resource
+   */
+  @Override
+  public Set<Class<?>> getClasses() {
+    return classes;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLService.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLService.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLService.java
new file mode 100644
index 0000000..f8b7cd1
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLService.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.server;
+
+import org.apache.lens.ml.api.LensML;
+
+/**
+ * The Interface MLService.
+ */
+public interface MLService extends LensML {
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLServiceImpl.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLServiceImpl.java
new file mode 100644
index 0000000..f3e8ec1
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLServiceImpl.java
@@ -0,0 +1,329 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.server;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lens.api.LensConf;
+import org.apache.lens.api.LensException;
+import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.api.query.LensQuery;
+import org.apache.lens.api.query.QueryHandle;
+import org.apache.lens.api.query.QueryStatus;
+import org.apache.lens.ml.algo.api.*;
+import org.apache.lens.ml.api.MLTestReport;
+import org.apache.lens.ml.impl.HiveMLUDF;
+import org.apache.lens.ml.impl.LensMLImpl;
+import org.apache.lens.ml.impl.ModelLoader;
+import org.apache.lens.ml.impl.QueryRunner;
+import org.apache.lens.server.api.LensConfConstants;
+import org.apache.lens.server.api.ServiceProvider;
+import org.apache.lens.server.api.ServiceProviderFactory;
+import org.apache.lens.server.api.query.QueryExecutionService;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hive.service.CompositeService;
+
+/**
+ * The Class MLServiceImpl.
+ */
+public class MLServiceImpl extends CompositeService implements MLService {
+
+  /** The Constant LOG. */
+  public static final Log LOG = LogFactory.getLog(LensMLImpl.class);
+
+  /** The ml. */
+  private LensMLImpl ml;
+
+  /** The service provider. */
+  private ServiceProvider serviceProvider;
+
+  /** The service provider factory. */
+  private ServiceProviderFactory serviceProviderFactory;
+
+  /**
+   * Instantiates a new ML service impl.
+   */
+  public MLServiceImpl() {
+    this(NAME);
+  }
+
+  /**
+   * Instantiates a new ML service impl.
+   *
+   * @param name the name
+   */
+  public MLServiceImpl(String name) {
+    super(name);
+  }
+
+  @Override
+  public List<String> getAlgorithms() {
+    return ml.getAlgorithms();
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#getAlgoForName(java.lang.String)
+   */
+  @Override
+  public MLAlgo getAlgoForName(String algorithm) throws LensException {
+    return ml.getAlgoForName(algorithm);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#train(java.lang.String, java.lang.String, java.lang.String[])
+   */
+  @Override
+  public String train(String table, String algorithm, String[] args) throws LensException {
+    return ml.train(table, algorithm, args);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#getModels(java.lang.String)
+   */
+  @Override
+  public List<String> getModels(String algorithm) throws LensException {
+    return ml.getModels(algorithm);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#getModel(java.lang.String, java.lang.String)
+   */
+  @Override
+  public MLModel getModel(String algorithm, String modelId) throws LensException {
+    return ml.getModel(algorithm, modelId);
+  }
+
+  private ServiceProvider getServiceProvider() {
+    if (serviceProvider == null) {
+      serviceProvider = serviceProviderFactory.getServiceProvider();
+    }
+    return serviceProvider;
+  }
+
+  /**
+   * Gets the service provider factory.
+   *
+   * @param conf the conf
+   * @return the service provider factory
+   */
+  private ServiceProviderFactory getServiceProviderFactory(HiveConf conf) {
+    Class<?> spfClass = conf.getClass(LensConfConstants.SERVICE_PROVIDER_FACTORY, ServiceProviderFactory.class);
+    try {
+      return (ServiceProviderFactory) spfClass.newInstance();
+    } catch (InstantiationException e) {
+      throw new RuntimeException(e);
+    } catch (IllegalAccessException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.hive.service.CompositeService#init(org.apache.hadoop.hive.conf.HiveConf)
+   */
+  @Override
+  public synchronized void init(HiveConf hiveConf) {
+    ml = new LensMLImpl(hiveConf);
+    ml.init(hiveConf);
+    super.init(hiveConf);
+    serviceProviderFactory = getServiceProviderFactory(hiveConf);
+    LOG.info("Inited ML service");
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.hive.service.CompositeService#start()
+   */
+  @Override
+  public synchronized void start() {
+    ml.start();
+    super.start();
+    LOG.info("Started ML service");
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.hive.service.CompositeService#stop()
+   */
+  @Override
+  public synchronized void stop() {
+    ml.stop();
+    super.stop();
+    LOG.info("Stopped ML service");
+  }
+
+  /**
+   * Clear models.
+   */
+  public void clearModels() {
+    ModelLoader.clearCache();
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#getModelPath(java.lang.String, java.lang.String)
+   */
+  @Override
+  public String getModelPath(String algorithm, String modelID) {
+    return ml.getModelPath(algorithm, modelID);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#testModel(org.apache.lens.api.LensSessionHandle, java.lang.String, java.lang.String,
+   * java.lang.String)
+   */
+  @Override
+  public MLTestReport testModel(LensSessionHandle sessionHandle, String table, String algorithm, String modelID,
+    String outputTable) throws LensException {
+    return ml.testModel(sessionHandle, table, algorithm, modelID, new DirectQueryRunner(sessionHandle), outputTable);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#getTestReports(java.lang.String)
+   */
+  @Override
+  public List<String> getTestReports(String algorithm) throws LensException {
+    return ml.getTestReports(algorithm);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#getTestReport(java.lang.String, java.lang.String)
+   */
+  @Override
+  public MLTestReport getTestReport(String algorithm, String reportID) throws LensException {
+    return ml.getTestReport(algorithm, reportID);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#predict(java.lang.String, java.lang.String, java.lang.Object[])
+   */
+  @Override
+  public Object predict(String algorithm, String modelID, Object[] features) throws LensException {
+    return ml.predict(algorithm, modelID, features);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#deleteModel(java.lang.String, java.lang.String)
+   */
+  @Override
+  public void deleteModel(String algorithm, String modelID) throws LensException {
+    ml.deleteModel(algorithm, modelID);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#deleteTestReport(java.lang.String, java.lang.String)
+   */
+  @Override
+  public void deleteTestReport(String algorithm, String reportID) throws LensException {
+    ml.deleteTestReport(algorithm, reportID);
+  }
+
+  /**
+   * Run the test model query directly in the current lens server process.
+   */
+  private class DirectQueryRunner extends QueryRunner {
+
+    /**
+     * Instantiates a new direct query runner.
+     *
+     * @param sessionHandle the session handle
+     */
+    public DirectQueryRunner(LensSessionHandle sessionHandle) {
+      super(sessionHandle);
+    }
+
+    /*
+     * (non-Javadoc)
+     *
+     * @see org.apache.lens.ml.TestQueryRunner#runQuery(java.lang.String)
+     */
+    @Override
+    public QueryHandle runQuery(String testQuery) throws LensException {
+      FunctionRegistry.registerTemporaryFunction("predict", HiveMLUDF.class);
+      LOG.info("Registered predict UDF");
+      // Run the query in query executions service
+      QueryExecutionService queryService = (QueryExecutionService) getServiceProvider().getService("query");
+
+      LensConf queryConf = new LensConf();
+      queryConf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, false + "");
+      queryConf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false + "");
+
+      QueryHandle testQueryHandle = queryService.executeAsync(sessionHandle, testQuery, queryConf, queryName);
+
+      // Wait for test query to complete
+      LensQuery query = queryService.getQuery(sessionHandle, testQueryHandle);
+      LOG.info("Submitted query " + testQueryHandle.getHandleId());
+      while (!query.getStatus().isFinished()) {
+        try {
+          Thread.sleep(500);
+        } catch (InterruptedException e) {
+          throw new LensException(e);
+        }
+
+        query = queryService.getQuery(sessionHandle, testQueryHandle);
+      }
+
+      if (query.getStatus().getStatus() != QueryStatus.Status.SUCCESSFUL) {
+        throw new LensException("Failed to run test query: " + testQueryHandle.getHandleId() + " reason= "
+          + query.getStatus().getErrorMessage());
+      }
+
+      return testQueryHandle;
+    }
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.LensML#getAlgoParamDescription(java.lang.String)
+   */
+  @Override
+  public Map<String, String> getAlgoParamDescription(String algorithm) {
+    return ml.getAlgoParamDescription(algorithm);
+  }
+}


[42/50] [abbrv] incubator-lens git commit: LENS-68: Add number of possible distinct values for dim-attributes

Posted by am...@apache.org.
LENS-68: Add number of possible distinct values for dim-attributes


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/3aa5fa31
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/3aa5fa31
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/3aa5fa31

Branch: refs/heads/current-release-line
Commit: 3aa5fa31ff113ad0bffd9fb19addcdd6ed003717
Parents: 20949e0
Author: Raju Bairishetti <ra...@inmobi.com>
Authored: Fri Apr 10 20:06:10 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Fri Apr 10 20:06:10 2015 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/cube-0.1.xsd        | 16 ++++++
 .../lens/cube/metadata/BaseDimAttribute.java    | 51 +++++++++++++++-----
 .../lens/cube/metadata/InlineDimAttribute.java  |  2 +-
 .../lens/cube/metadata/MetastoreConstants.java  |  1 +
 .../lens/cube/metadata/MetastoreUtil.java       |  6 ++-
 .../cube/metadata/ReferencedDimAtrribute.java   | 35 ++++++++++----
 .../cube/metadata/TestCubeMetastoreClient.java  |  6 ++-
 .../apache/lens/server/metastore/JAXBUtils.java | 22 +++++++--
 .../server/metastore/TestMetastoreService.java  | 13 +++++
 9 files changed, 123 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3aa5fa31/lens-api/src/main/resources/cube-0.1.xsd
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/cube-0.1.xsd b/lens-api/src/main/resources/cube-0.1.xsd
index 18ee682..1de4258 100644
--- a/lens-api/src/main/resources/cube-0.1.xsd
+++ b/lens-api/src/main/resources/cube-0.1.xsd
@@ -352,6 +352,22 @@
         </xs:documentation>
       </xs:annotation>
     </xs:attribute>
+    <xs:attribute type="xs:long" name="num_distinct_values" use="optional">
+      <xs:annotation>
+        <xs:documentation>
+          Specifies an indicative value of how many distinct values the dim attribute can take.
+          This would give an idea of how big the grouping will be when an attribute is chosen for groupby expressions.
+          This is just an approximate value.
+        </xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
+    <xs:attribute type="xs:boolean" name="join_key" default="false">
+      <xs:annotation>
+        <xs:documentation>
+          This flag will tell whether the attribute can be used as a join key or not
+        </xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
   </xs:complexType>
 
   <xs:complexType name="x_dim_attributes">

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3aa5fa31/lens-cube/src/main/java/org/apache/lens/cube/metadata/BaseDimAttribute.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/BaseDimAttribute.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/BaseDimAttribute.java
index c4efe52..385a8d2 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/BaseDimAttribute.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/BaseDimAttribute.java
@@ -23,27 +23,47 @@ import java.util.Map;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 
+import static com.google.common.base.Preconditions.*;
+
+import com.google.common.base.Optional;
+
+import lombok.Getter;
+import lombok.ToString;
+import lombok.extern.apachecommons.CommonsLog;
+
+@CommonsLog @ToString(callSuper=true, includeFieldNames=true)
 public class BaseDimAttribute extends CubeDimAttribute {
-  private final String type;
+  @Getter private final String type;
+  @Getter private Optional<Long> numOfDistinctValues = Optional.absent();
 
   public BaseDimAttribute(FieldSchema column) {
     this(column, null, null, null, null);
   }
 
   public BaseDimAttribute(FieldSchema column, String displayString, Date startTime, Date endTime, Double cost) {
-    super(column.getName(), column.getComment(), displayString, startTime, endTime, cost);
-    this.type = column.getType();
-    assert (type != null);
+    this(column, displayString, startTime, endTime, cost, null);
   }
 
-  public String getType() {
-    return type;
+  public BaseDimAttribute(FieldSchema column, String displayString, Date startTime, Date endTime, Double cost,
+      Long numOfDistinctValues) {
+    super(column.getName(), column.getComment(), displayString, startTime, endTime, cost);
+    this.type = column.getType();
+    checkNotNull(type);
+    Optional<Long> optionalNumOfDistnctValues = Optional.fromNullable(numOfDistinctValues);
+    if (optionalNumOfDistnctValues.isPresent()) {
+      this.numOfDistinctValues = optionalNumOfDistnctValues;
+      checkArgument(this.numOfDistinctValues.get() > 0);
+    }
   }
 
   @Override
   public void addProperties(Map<String, String> props) {
     super.addProperties(props);
     props.put(MetastoreUtil.getDimTypePropertyKey(getName()), type);
+    if (numOfDistinctValues.isPresent()) {
+      props.put(MetastoreUtil.getDimNumOfDistinctValuesPropertyKey(getName()),
+          String.valueOf(numOfDistinctValues.get()));
+    }
   }
 
   /**
@@ -55,12 +75,25 @@ public class BaseDimAttribute extends CubeDimAttribute {
   public BaseDimAttribute(String name, Map<String, String> props) {
     super(name, props);
     this.type = getDimType(name, props);
+    this.numOfDistinctValues = getDimNumOfDistinctValues(name, props);
   }
 
   public static String getDimType(String name, Map<String, String> props) {
     return props.get(MetastoreUtil.getDimTypePropertyKey(name));
   }
 
+  public static Optional<Long> getDimNumOfDistinctValues(String name, Map<String, String> props) {
+    if (props.containsKey(MetastoreUtil.getDimNumOfDistinctValuesPropertyKey(name))) {
+      try {
+        return Optional.of(Long.parseLong((props.get(MetastoreUtil.getDimNumOfDistinctValuesPropertyKey(name)))));
+      } catch (NumberFormatException ne) {
+        log.error("NumberFormat exception while parsing the num of distinct vlaues "
+            + props.get(MetastoreUtil.getDimNumOfDistinctValuesPropertyKey(name)));
+      }
+    }
+    return Optional.absent();
+  }
+
   @Override
   public int hashCode() {
     final int prime = 31;
@@ -84,10 +117,4 @@ public class BaseDimAttribute extends CubeDimAttribute {
     }
     return true;
   }
-
-  @Override
-  public String toString() {
-    String str = super.toString() + ":" + getType();
-    return str;
-  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3aa5fa31/lens-cube/src/main/java/org/apache/lens/cube/metadata/InlineDimAttribute.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/InlineDimAttribute.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/InlineDimAttribute.java
index e23bd8c..e8d2fae 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/InlineDimAttribute.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/InlineDimAttribute.java
@@ -36,7 +36,7 @@ public class InlineDimAttribute extends BaseDimAttribute {
 
   public InlineDimAttribute(FieldSchema column, String displayString, Date startTime, Date endTime, Double cost,
     List<String> values) {
-    super(column, displayString, startTime, endTime, cost);
+    super(column, displayString, startTime, endTime, cost, Long.valueOf(values.size()));
     this.values = values;
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3aa5fa31/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
index 7e6360e..e25fc81 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
@@ -66,6 +66,7 @@ public final class MetastoreConstants {
   public static final String COST_SFX = ".cost";
   public static final String DESC_SFX = ".description";
   public static final String DISPLAY_SFX = ".displaystring";
+  public static final String NUM_DISTINCT_VALUES = ".num.distinct.values";
 
   // measure constants
   public static final String MEASURE_KEY_PFX = "cube.measure.";

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3aa5fa31/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
index 203ff58..30253d3 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
@@ -89,6 +89,10 @@ public class MetastoreUtil {
     return getDimensionKeyPrefix(dimName) + TYPE_SFX;
   }
 
+  public static final String getDimNumOfDistinctValuesPropertyKey(String dimName) {
+    return getDimensionKeyPrefix(dimName) + NUM_DISTINCT_VALUES;
+  }
+
   public static String getHierachyElementKeyPFX(String dimName) {
     return getDimensionKeyPrefix(dimName) + HIERARCHY_SFX;
   }
@@ -393,7 +397,7 @@ public class MetastoreUtil {
   }
 
   static <E extends Named> void addNameStrings(Map<String, String> props, String key,
-    Collection<E> set, int maxLength) {
+      Collection<E> set, int maxLength) {
     List<String> namedStrings = getNamedStrs(set, maxLength);
     props.put(key + ".size", String.valueOf(namedStrings.size()));
     for (int i = 0; i < namedStrings.size(); i++) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3aa5fa31/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java
index a8ece2d..742c6a0 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java
@@ -34,7 +34,7 @@ public class ReferencedDimAtrribute extends BaseDimAttribute {
   private final List<TableReference> references = new ArrayList<TableReference>();
   // boolean whether to say the key is only a denormalized variable kept or can
   // be used in join resolution as well
-  private Boolean isJoinKey = true;
+  @Getter private Boolean isJoinKey = true;
   @Getter
   private String chainName = null;
   @Getter
@@ -45,13 +45,18 @@ public class ReferencedDimAtrribute extends BaseDimAttribute {
   }
 
   public ReferencedDimAtrribute(FieldSchema column, String displayString, TableReference reference, Date startTime,
-    Date endTime, Double cost) {
+      Date endTime, Double cost) {
     this(column, displayString, reference, startTime, endTime, cost, true);
   }
 
   public ReferencedDimAtrribute(FieldSchema column, String displayString, TableReference reference, Date startTime,
-    Date endTime, Double cost, boolean isJoinKey) {
-    super(column, displayString, startTime, endTime, cost);
+      Date endTime, Double cost, boolean isJoinKey) {
+    this(column, displayString, reference, startTime, endTime, cost, isJoinKey, null);
+  }
+
+  public ReferencedDimAtrribute(FieldSchema column, String displayString, TableReference reference, Date startTime,
+      Date endTime, Double cost, boolean isJoinKey, Long numOfDistinctValues) {
+    super(column, displayString, startTime, endTime, cost, numOfDistinctValues);
     this.references.add(reference);
     this.isJoinKey = isJoinKey;
   }
@@ -61,20 +66,30 @@ public class ReferencedDimAtrribute extends BaseDimAttribute {
   }
 
   public ReferencedDimAtrribute(FieldSchema column, String displayString, Collection<TableReference> references,
-    Date startTime, Date endTime, Double cost) {
+      Date startTime, Date endTime, Double cost) {
     this(column, displayString, references, startTime, endTime, cost, true);
   }
 
   public ReferencedDimAtrribute(FieldSchema column, String displayString, Collection<TableReference> references,
-    Date startTime, Date endTime, Double cost, boolean isJoinKey) {
-    super(column, displayString, startTime, endTime, cost);
+      Date startTime, Date endTime, Double cost, boolean isJoinKey) {
+    this(column, displayString, references, startTime, endTime, cost, isJoinKey, null);
+  }
+
+  public ReferencedDimAtrribute(FieldSchema column, String displayString, Collection<TableReference> references,
+      Date startTime, Date endTime, Double cost, boolean isJoinKey, Long numOfDistinctValues) {
+    super(column, displayString, startTime, endTime, cost, numOfDistinctValues);
     this.references.addAll(references);
     this.isJoinKey = isJoinKey;
   }
 
   public ReferencedDimAtrribute(FieldSchema column, String displayString, String chainName, String refColumn,
-    Date startTime, Date endTime, Double cost) {
-    super(column, displayString, startTime, endTime, cost);
+      Date startTime, Date endTime, Double cost) {
+    this(column, displayString, chainName, refColumn, startTime, endTime, cost, null);
+  }
+
+  public ReferencedDimAtrribute(FieldSchema column, String displayString, String chainName, String refColumn,
+      Date startTime, Date endTime, Double cost, Long numOfDistinctValues) {
+    super(column, displayString, startTime, endTime, cost, numOfDistinctValues);
     this.chainName = chainName.toLowerCase();
     this.refColumn = refColumn.toLowerCase();
     this.isJoinKey = false;
@@ -100,7 +115,7 @@ public class ReferencedDimAtrribute extends BaseDimAttribute {
       props.put(MetastoreUtil.getDimRefChainColumnKey(getName()), refColumn);
     } else {
       props.put(MetastoreUtil.getDimensionSrcReferenceKey(getName()),
-        MetastoreUtil.getReferencesString(references));
+          MetastoreUtil.getReferencesString(references));
       props.put(MetastoreUtil.getDimUseAsJoinKey(getName()), isJoinKey.toString());
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3aa5fa31/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index 7337ee9..fe29d25 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -198,7 +198,7 @@ public class TestCubeMetastoreClient {
 
     List<CubeDimAttribute> locationHierarchyWithStartTime = new ArrayList<CubeDimAttribute>();
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("zipcode2", "int", "zip"),
-      "Zip refer2", new TableReference("zipdim", "zipcode"), now, now, 100.0));
+      "Zip refer2", new TableReference("zipdim", "zipcode"), now, now, 100.0, true, 1000L));
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("cityid2", "int", "city"),
       "City refer2", new TableReference("citydim", "id"), now, null, null));
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("stateid2", "int", "state"),
@@ -413,7 +413,8 @@ public class TestCubeMetastoreClient {
     // alter dimension
     Table tbl = client.getHiveTable(zipDim.getName());
     Dimension toAlter = new Dimension(tbl);
-    toAlter.alterAttribute(new BaseDimAttribute(new FieldSchema("newZipDim", "int", "new dim added")));
+    toAlter.alterAttribute(new BaseDimAttribute(new FieldSchema("newZipDim", "int", "new dim added"), null, null, null,
+      null, 1000L));
     toAlter.alterAttribute(new ReferencedDimAtrribute(new FieldSchema("newRefDim", "int", "new ref-dim added"),
       "New city ref", new TableReference("citydim", "id")));
     toAlter.alterAttribute(new BaseDimAttribute(new FieldSchema("f2", "varchar", "modified field")));
@@ -444,6 +445,7 @@ public class TestCubeMetastoreClient {
     CubeDimAttribute newzipdim = altered.getAttributeByName("newZipDim");
     Assert.assertTrue(newzipdim instanceof BaseDimAttribute);
     Assert.assertEquals(((BaseDimAttribute) newzipdim).getType(), "int");
+    Assert.assertEquals((((BaseDimAttribute) newzipdim).getNumOfDistinctValues().get()), Long.valueOf(1000));
 
     CubeDimAttribute newrefdim = altered.getAttributeByName("newRefDim");
     Assert.assertTrue(newrefdim instanceof ReferencedDimAtrribute);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3aa5fa31/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
index b76ddc0..3cf7011 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
@@ -40,6 +40,8 @@ import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 
+import com.google.common.base.Optional;
+
 /**
  * Utilities for converting to and from JAXB types to hive.ql.metadata.cube types
  */
@@ -173,7 +175,9 @@ public final class JAXBUtils {
         dimRefs,
         startDate,
         endDate,
-        null
+        null,
+        xd.isJoinKey(),
+        xd.getNumDistinctValues()
       );
     } else if (xd.getRefSpec() != null && xd.getRefSpec().getChainRefColumn() != null) {
       hiveDim = new ReferencedDimAtrribute(new FieldSchema(xd.getName(), xd.getType().toLowerCase(),
@@ -183,7 +187,8 @@ public final class JAXBUtils {
         xd.getRefSpec().getChainRefColumn().getRefCol(),
         startDate,
         endDate,
-        null
+        null,
+        xd.getNumDistinctValues()
       );
     } else {
       hiveDim = new BaseDimAttribute(new FieldSchema(xd.getName(), xd.getType().toLowerCase(),
@@ -191,7 +196,8 @@ public final class JAXBUtils {
         xd.getDisplayString(),
         startDate,
         endDate,
-        null
+        null,
+        xd.getNumDistinctValues()
       );
     }
 
@@ -282,15 +288,25 @@ public final class JAXBUtils {
           xcc.setDestTable(baseTable.getChainByName(rd.getChainName()).getDestTable());
         }
         refspec.setChainRefColumn(xcc);
+        xd.setJoinKey(Boolean.valueOf(false));
       } else {
         refspec.setTableReferences(new XTableReferences());
         refspec.getTableReferences().getTableReference().addAll(xTabReferencesFromHiveTabReferences(dimRefs));
+        xd.setJoinKey(rd.useAsJoinKey());
       }
       xd.setRefSpec(refspec);
       xd.setType(rd.getType());
+      Optional<Long> numOfDistinctValues = rd.getNumOfDistinctValues();
+      if (numOfDistinctValues.isPresent()) {
+        xd.setNumDistinctValues(numOfDistinctValues.get());
+      }
     } else if (cd instanceof BaseDimAttribute) {
       BaseDimAttribute bd = (BaseDimAttribute) cd;
       xd.setType(bd.getType());
+      Optional<Long> numOfDistinctValues = bd.getNumOfDistinctValues();
+      if (numOfDistinctValues.isPresent()) {
+        xd.setNumDistinctValues(numOfDistinctValues.get());
+      }
     }
     return xd;
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3aa5fa31/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
index 7795c73..a35a8ff 100644
--- a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
@@ -281,6 +281,7 @@ public class TestMetastoreService extends LensJerseyTest {
     xd1.setDisplayString("Dimension1");
     // Don't set endtime on this dim to validate null handling on server side
     xd1.setStartTime(startDate);
+    xd1.setNumDistinctValues(2000L);
 
     XDimAttribute xd2 = cubeObjectFactory.createXDimAttribute();
     xd2.setName("dim2");
@@ -300,6 +301,7 @@ public class TestMetastoreService extends LensJerseyTest {
     xcc.setRefCol("col2");
     xd3.setRefSpec(cubeObjectFactory.createXDimAttributeRefSpec());
     xd3.getRefSpec().setChainRefColumn(xcc);
+    xd3.setNumDistinctValues(1000L);
 
     // add attribute with complex type
     XDimAttribute xd4 = cubeObjectFactory.createXDimAttribute();
@@ -623,6 +625,9 @@ public class TestMetastoreService extends LensJerseyTest {
       Cube hcube = (Cube) JAXBUtils.hiveCubeFromXCube(actual, null);
       assertEquals(hcube.getDimAttributeByName("dim1").getDescription(), "first dimension");
       assertEquals(hcube.getDimAttributeByName("dim1").getDisplayString(), "Dimension1");
+      assertEquals((((BaseDimAttribute) hcube.getDimAttributeByName("dim1")).getNumOfDistinctValues().get()),
+          Long.valueOf(2000));
+
       assertNotNull(hcube.getDimAttributeByName("testdim2col2"));
       assertEquals(hcube.getDimAttributeByName("testdim2col2").getDisplayString(), "Chained Dimension");
       assertEquals(hcube.getDimAttributeByName("testdim2col2").getDescription(), "ref chained dimension");
@@ -631,6 +636,14 @@ public class TestMetastoreService extends LensJerseyTest {
       assertEquals(((ReferencedDimAtrribute) hcube.getDimAttributeByName("testdim2col2")).getType(), "string");
       assertEquals(((ReferencedDimAtrribute) hcube.getDimAttributeByName("testdim2col2")).getChainName(), "chain1");
       assertEquals(((ReferencedDimAtrribute) hcube.getDimAttributeByName("testdim2col2")).getRefColumn(), "col2");
+      assertEquals((((ReferencedDimAtrribute) hcube.getDimAttributeByName("testdim2col2"))
+          .getNumOfDistinctValues().get()), Long.valueOf(1000));
+      assertEquals((((ReferencedDimAtrribute) hcube.getDimAttributeByName("testdim2col2"))
+          .getNumOfDistinctValues().get()), Long.valueOf(1000));
+
+      assertEquals(((BaseDimAttribute) hcube.getDimAttributeByName("dim2")).getNumOfDistinctValues().isPresent(),
+          false);
+
       assertNotNull(hcube.getMeasureByName("msr1"));
       assertEquals(hcube.getMeasureByName("msr1").getDescription(), "first measure");
       assertEquals(hcube.getMeasureByName("msr1").getDisplayString(), "Measure1");


[27/50] [abbrv] incubator-lens git commit: Lens-465 : Refactor ml packages. (sharad)

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/SparkMLDriver.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/SparkMLDriver.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/SparkMLDriver.java
new file mode 100644
index 0000000..c955268
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/SparkMLDriver.java
@@ -0,0 +1,278 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.lens.api.LensConf;
+import org.apache.lens.api.LensException;
+import org.apache.lens.ml.algo.api.MLAlgo;
+import org.apache.lens.ml.algo.api.MLDriver;
+import org.apache.lens.ml.algo.lib.Algorithms;
+import org.apache.lens.ml.algo.spark.dt.DecisionTreeAlgo;
+import org.apache.lens.ml.algo.spark.lr.LogisticRegressionAlgo;
+import org.apache.lens.ml.algo.spark.nb.NaiveBayesAlgo;
+import org.apache.lens.ml.algo.spark.svm.SVMAlgo;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.spark.SparkConf;
+import org.apache.spark.api.java.JavaSparkContext;
+
+/**
+ * The Class SparkMLDriver.
+ */
+public class SparkMLDriver implements MLDriver {
+
+  /** The Constant LOG. */
+  public static final Log LOG = LogFactory.getLog(SparkMLDriver.class);
+
+  /** The owns spark context. */
+  private boolean ownsSparkContext = true;
+
+  /**
+   * The Enum SparkMasterMode.
+   */
+  private enum SparkMasterMode {
+    // Embedded mode used in tests
+    /** The embedded. */
+    EMBEDDED,
+    // Yarn client and Yarn cluster modes are used when deploying the app to Yarn cluster
+    /** The yarn client. */
+    YARN_CLIENT,
+
+    /** The yarn cluster. */
+    YARN_CLUSTER
+  }
+
+  /** The algorithms. */
+  private final Algorithms algorithms = new Algorithms();
+
+  /** The client mode. */
+  private SparkMasterMode clientMode = SparkMasterMode.EMBEDDED;
+
+  /** The is started. */
+  private boolean isStarted;
+
+  /** The spark conf. */
+  private SparkConf sparkConf;
+
+  /** The spark context. */
+  private JavaSparkContext sparkContext;
+
+  /**
+   * Use spark context.
+   *
+   * @param jsc the jsc
+   */
+  public void useSparkContext(JavaSparkContext jsc) {
+    ownsSparkContext = false;
+    this.sparkContext = jsc;
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.MLDriver#isAlgoSupported(java.lang.String)
+   */
+  @Override
+  public boolean isAlgoSupported(String name) {
+    return algorithms.isAlgoSupported(name);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.MLDriver#getAlgoInstance(java.lang.String)
+   */
+  @Override
+  public MLAlgo getAlgoInstance(String name) throws LensException {
+    checkStarted();
+
+    if (!isAlgoSupported(name)) {
+      return null;
+    }
+
+    MLAlgo algo = null;
+    try {
+      algo = algorithms.getAlgoForName(name);
+      if (algo instanceof BaseSparkAlgo) {
+        ((BaseSparkAlgo) algo).setSparkContext(sparkContext);
+      }
+    } catch (LensException exc) {
+      LOG.error("Error creating algo object", exc);
+    }
+    return algo;
+  }
+
+  /**
+   * Register algos.
+   */
+  private void registerAlgos() {
+    algorithms.register(NaiveBayesAlgo.class);
+    algorithms.register(SVMAlgo.class);
+    algorithms.register(LogisticRegressionAlgo.class);
+    algorithms.register(DecisionTreeAlgo.class);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.MLDriver#init(org.apache.lens.api.LensConf)
+   */
+  @Override
+  public void init(LensConf conf) throws LensException {
+    sparkConf = new SparkConf();
+    registerAlgos();
+    for (String key : conf.getProperties().keySet()) {
+      if (key.startsWith("lens.ml.sparkdriver.")) {
+        sparkConf.set(key.substring("lens.ml.sparkdriver.".length()), conf.getProperties().get(key));
+      }
+    }
+
+    String sparkAppMaster = sparkConf.get("spark.master");
+    if ("yarn-client".equalsIgnoreCase(sparkAppMaster)) {
+      clientMode = SparkMasterMode.YARN_CLIENT;
+    } else if ("yarn-cluster".equalsIgnoreCase(sparkAppMaster)) {
+      clientMode = SparkMasterMode.YARN_CLUSTER;
+    } else if ("local".equalsIgnoreCase(sparkAppMaster) || StringUtils.isBlank(sparkAppMaster)) {
+      clientMode = SparkMasterMode.EMBEDDED;
+    } else {
+      throw new IllegalArgumentException("Invalid master mode " + sparkAppMaster);
+    }
+
+    if (clientMode == SparkMasterMode.YARN_CLIENT || clientMode == SparkMasterMode.YARN_CLUSTER) {
+      String sparkHome = System.getenv("SPARK_HOME");
+      if (StringUtils.isNotBlank(sparkHome)) {
+        sparkConf.setSparkHome(sparkHome);
+      }
+
+      // If SPARK_HOME is not set, SparkConf can read from the Lens-site.xml or System properties.
+      if (StringUtils.isBlank(sparkConf.get("spark.home"))) {
+        throw new IllegalArgumentException("Spark home is not set");
+      }
+
+      LOG.info("Spark home is set to " + sparkConf.get("spark.home"));
+    }
+
+    sparkConf.setAppName("lens-ml");
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.MLDriver#start()
+   */
+  @Override
+  public void start() throws LensException {
+    if (sparkContext == null) {
+      sparkContext = new JavaSparkContext(sparkConf);
+    }
+
+    // Adding jars to spark context is only required when running in yarn-client mode
+    if (clientMode != SparkMasterMode.EMBEDDED) {
+      // TODO Figure out only necessary set of JARs to be added for HCatalog
+      // Add hcatalog and hive jars
+      String hiveLocation = System.getenv("HIVE_HOME");
+
+      if (StringUtils.isBlank(hiveLocation)) {
+        throw new LensException("HIVE_HOME is not set");
+      }
+
+      LOG.info("HIVE_HOME at " + hiveLocation);
+
+      File hiveLibDir = new File(hiveLocation, "lib");
+      FilenameFilter jarFileFilter = new FilenameFilter() {
+        @Override
+        public boolean accept(File file, String s) {
+          return s.endsWith(".jar");
+        }
+      };
+
+      List<String> jarFiles = new ArrayList<String>();
+      // Add hive jars
+      for (File jarFile : hiveLibDir.listFiles(jarFileFilter)) {
+        jarFiles.add(jarFile.getAbsolutePath());
+        LOG.info("Adding HIVE jar " + jarFile.getAbsolutePath());
+        sparkContext.addJar(jarFile.getAbsolutePath());
+      }
+
+      // Add hcatalog jars
+      File hcatalogDir = new File(hiveLocation + "/hcatalog/share/hcatalog");
+      for (File jarFile : hcatalogDir.listFiles(jarFileFilter)) {
+        jarFiles.add(jarFile.getAbsolutePath());
+        LOG.info("Adding HCATALOG jar " + jarFile.getAbsolutePath());
+        sparkContext.addJar(jarFile.getAbsolutePath());
+      }
+
+      // Add the current jar
+      String[] lensSparkLibJars = JavaSparkContext.jarOfClass(SparkMLDriver.class);
+      for (String lensSparkJar : lensSparkLibJars) {
+        LOG.info("Adding Lens JAR " + lensSparkJar);
+        sparkContext.addJar(lensSparkJar);
+      }
+    }
+
+    isStarted = true;
+    LOG.info("Created Spark context for app: '" + sparkContext.appName() + "', Spark master: " + sparkContext.master());
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.MLDriver#stop()
+   */
+  @Override
+  public void stop() throws LensException {
+    if (!isStarted) {
+      LOG.warn("Spark driver was not started");
+      return;
+    }
+    isStarted = false;
+    if (ownsSparkContext) {
+      sparkContext.stop();
+    }
+    LOG.info("Stopped spark context " + this);
+  }
+
+  @Override
+  public List<String> getAlgoNames() {
+    return algorithms.getAlgorithmNames();
+  }
+
+  /**
+   * Check started.
+   *
+   * @throws LensException the lens exception
+   */
+  public void checkStarted() throws LensException {
+    if (!isStarted) {
+      throw new LensException("Spark driver is not started yet");
+    }
+  }
+
+  public JavaSparkContext getSparkContext() {
+    return sparkContext;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/TableTrainingSpec.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/TableTrainingSpec.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/TableTrainingSpec.java
new file mode 100644
index 0000000..33fd801
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/TableTrainingSpec.java
@@ -0,0 +1,433 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.lens.api.LensException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hive.hcatalog.data.HCatRecord;
+import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hive.hcatalog.data.schema.HCatSchema;
+import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.mllib.regression.LabeledPoint;
+import org.apache.spark.rdd.RDD;
+
+import com.google.common.base.Preconditions;
+import lombok.Getter;
+import lombok.ToString;
+
+/**
+ * The Class TableTrainingSpec.
+ */
+@ToString
+public class TableTrainingSpec implements Serializable {
+
+  /** The Constant LOG. */
+  public static final Log LOG = LogFactory.getLog(TableTrainingSpec.class);
+
+  /** The training rdd. */
+  @Getter
+  private transient RDD<LabeledPoint> trainingRDD;
+
+  /** The testing rdd. */
+  @Getter
+  private transient RDD<LabeledPoint> testingRDD;
+
+  /** The database. */
+  @Getter
+  private String database;
+
+  /** The table. */
+  @Getter
+  private String table;
+
+  /** The partition filter. */
+  @Getter
+  private String partitionFilter;
+
+  /** The feature columns. */
+  @Getter
+  private List<String> featureColumns;
+
+  /** The label column. */
+  @Getter
+  private String labelColumn;
+
+  /** The conf. */
+  @Getter
+  private transient HiveConf conf;
+
+  // By default all samples are considered for training
+  /** The split training. */
+  private boolean splitTraining;
+
+  /** The training fraction. */
+  private double trainingFraction = 1.0;
+
+  /** The label pos. */
+  int labelPos;
+
+  /** The feature positions. */
+  int[] featurePositions;
+
+  /** The num features. */
+  int numFeatures;
+
+  /** The labeled rdd. */
+  transient JavaRDD<LabeledPoint> labeledRDD;
+
+  /**
+   * New builder.
+   *
+   * @return the table training spec builder
+   */
+  public static TableTrainingSpecBuilder newBuilder() {
+    return new TableTrainingSpecBuilder();
+  }
+
+  /**
+   * The Class TableTrainingSpecBuilder.
+   */
+  public static class TableTrainingSpecBuilder {
+
+    /** The spec. */
+    final TableTrainingSpec spec;
+
+    /**
+     * Instantiates a new table training spec builder.
+     */
+    public TableTrainingSpecBuilder() {
+      spec = new TableTrainingSpec();
+    }
+
+    /**
+     * Hive conf.
+     *
+     * @param conf the conf
+     * @return the table training spec builder
+     */
+    public TableTrainingSpecBuilder hiveConf(HiveConf conf) {
+      spec.conf = conf;
+      return this;
+    }
+
+    /**
+     * Database.
+     *
+     * @param db the db
+     * @return the table training spec builder
+     */
+    public TableTrainingSpecBuilder database(String db) {
+      spec.database = db;
+      return this;
+    }
+
+    /**
+     * Table.
+     *
+     * @param table the table
+     * @return the table training spec builder
+     */
+    public TableTrainingSpecBuilder table(String table) {
+      spec.table = table;
+      return this;
+    }
+
+    /**
+     * Partition filter.
+     *
+     * @param partFilter the part filter
+     * @return the table training spec builder
+     */
+    public TableTrainingSpecBuilder partitionFilter(String partFilter) {
+      spec.partitionFilter = partFilter;
+      return this;
+    }
+
+    /**
+     * Label column.
+     *
+     * @param labelColumn the label column
+     * @return the table training spec builder
+     */
+    public TableTrainingSpecBuilder labelColumn(String labelColumn) {
+      spec.labelColumn = labelColumn;
+      return this;
+    }
+
+    /**
+     * Feature columns.
+     *
+     * @param featureColumns the feature columns
+     * @return the table training spec builder
+     */
+    public TableTrainingSpecBuilder featureColumns(List<String> featureColumns) {
+      spec.featureColumns = featureColumns;
+      return this;
+    }
+
+    /**
+     * Builds the.
+     *
+     * @return the table training spec
+     */
+    public TableTrainingSpec build() {
+      return spec;
+    }
+
+    /**
+     * Training fraction.
+     *
+     * @param trainingFraction the training fraction
+     * @return the table training spec builder
+     */
+    public TableTrainingSpecBuilder trainingFraction(double trainingFraction) {
+      Preconditions.checkArgument(trainingFraction >= 0 && trainingFraction <= 1.0,
+        "Training fraction shoule be between 0 and 1");
+      spec.trainingFraction = trainingFraction;
+      spec.splitTraining = true;
+      return this;
+    }
+  }
+
+  /**
+   * The Class DataSample.
+   */
+  public static class DataSample implements Serializable {
+
+    /** The labeled point. */
+    private final LabeledPoint labeledPoint;
+
+    /** The sample. */
+    private final double sample;
+
+    /**
+     * Instantiates a new data sample.
+     *
+     * @param labeledPoint the labeled point
+     */
+    public DataSample(LabeledPoint labeledPoint) {
+      sample = Math.random();
+      this.labeledPoint = labeledPoint;
+    }
+  }
+
+  /**
+   * The Class TrainingFilter.
+   */
+  public static class TrainingFilter implements Function<DataSample, Boolean> {
+
+    /** The training fraction. */
+    private double trainingFraction;
+
+    /**
+     * Instantiates a new training filter.
+     *
+     * @param fraction the fraction
+     */
+    public TrainingFilter(double fraction) {
+      trainingFraction = fraction;
+    }
+
+    /*
+     * (non-Javadoc)
+     *
+     * @see org.apache.spark.api.java.function.Function#call(java.lang.Object)
+     */
+    @Override
+    public Boolean call(DataSample v1) throws Exception {
+      return v1.sample <= trainingFraction;
+    }
+  }
+
+  /**
+   * The Class TestingFilter.
+   */
+  public static class TestingFilter implements Function<DataSample, Boolean> {
+
+    /** The training fraction. */
+    private double trainingFraction;
+
+    /**
+     * Instantiates a new testing filter.
+     *
+     * @param fraction the fraction
+     */
+    public TestingFilter(double fraction) {
+      trainingFraction = fraction;
+    }
+
+    /*
+     * (non-Javadoc)
+     *
+     * @see org.apache.spark.api.java.function.Function#call(java.lang.Object)
+     */
+    @Override
+    public Boolean call(DataSample v1) throws Exception {
+      return v1.sample > trainingFraction;
+    }
+  }
+
+  /**
+   * The Class GetLabeledPoint.
+   */
+  public static class GetLabeledPoint implements Function<DataSample, LabeledPoint> {
+
+    /*
+     * (non-Javadoc)
+     *
+     * @see org.apache.spark.api.java.function.Function#call(java.lang.Object)
+     */
+    @Override
+    public LabeledPoint call(DataSample v1) throws Exception {
+      return v1.labeledPoint;
+    }
+  }
+
+  /**
+   * Validate.
+   *
+   * @return true, if successful
+   */
+  boolean validate() {
+    List<HCatFieldSchema> columns;
+    try {
+      HCatInputFormat.setInput(conf, database == null ? "default" : database, table, partitionFilter);
+      HCatSchema tableSchema = HCatInputFormat.getTableSchema(conf);
+      columns = tableSchema.getFields();
+    } catch (IOException exc) {
+      LOG.error("Error getting table info " + toString(), exc);
+      return false;
+    }
+
+    LOG.info(table + " columns " + columns.toString());
+
+    boolean valid = false;
+    if (columns != null && !columns.isEmpty()) {
+      // Check labeled column
+      List<String> columnNames = new ArrayList<String>();
+      for (HCatFieldSchema col : columns) {
+        columnNames.add(col.getName());
+      }
+
+      // Need at least one feature column and one label column
+      valid = columnNames.contains(labelColumn) && columnNames.size() > 1;
+
+      if (valid) {
+        labelPos = columnNames.indexOf(labelColumn);
+
+        // Check feature columns
+        if (featureColumns == null || featureColumns.isEmpty()) {
+          // feature columns are not provided, so all columns except label column are feature columns
+          featurePositions = new int[columnNames.size() - 1];
+          int p = 0;
+          for (int i = 0; i < columnNames.size(); i++) {
+            if (i == labelPos) {
+              continue;
+            }
+            featurePositions[p++] = i;
+          }
+
+          columnNames.remove(labelPos);
+          featureColumns = columnNames;
+        } else {
+          // Feature columns were provided, verify all feature columns are present in the table
+          valid = columnNames.containsAll(featureColumns);
+          if (valid) {
+            // Get feature positions
+            featurePositions = new int[featureColumns.size()];
+            for (int i = 0; i < featureColumns.size(); i++) {
+              featurePositions[i] = columnNames.indexOf(featureColumns.get(i));
+            }
+          }
+        }
+        numFeatures = featureColumns.size();
+      }
+    }
+
+    return valid;
+  }
+
+  /**
+   * Creates the rd ds.
+   *
+   * @param sparkContext the spark context
+   * @throws LensException the lens exception
+   */
+  public void createRDDs(JavaSparkContext sparkContext) throws LensException {
+    // Validate the spec
+    if (!validate()) {
+      throw new LensException("Table spec not valid: " + toString());
+    }
+
+    LOG.info("Creating RDDs with spec " + toString());
+
+    // Get the RDD for table
+    JavaPairRDD<WritableComparable, HCatRecord> tableRDD;
+    try {
+      tableRDD = HiveTableRDD.createHiveTableRDD(sparkContext, conf, database, table, partitionFilter);
+    } catch (IOException e) {
+      throw new LensException(e);
+    }
+
+    // Map into trainable RDD
+    // TODO: Figure out a way to use custom value mappers
+    FeatureValueMapper[] valueMappers = new FeatureValueMapper[numFeatures];
+    final DoubleValueMapper doubleMapper = new DoubleValueMapper();
+    for (int i = 0; i < numFeatures; i++) {
+      valueMappers[i] = doubleMapper;
+    }
+
+    ColumnFeatureFunction trainPrepFunction = new ColumnFeatureFunction(featurePositions, valueMappers, labelPos,
+      numFeatures, 0);
+    labeledRDD = tableRDD.map(trainPrepFunction);
+
+    if (splitTraining) {
+      // We have to split the RDD between a training RDD and a testing RDD
+      LOG.info("Splitting RDD for table " + database + "." + table + " with split fraction " + trainingFraction);
+      JavaRDD<DataSample> sampledRDD = labeledRDD.map(new Function<LabeledPoint, DataSample>() {
+        @Override
+        public DataSample call(LabeledPoint v1) throws Exception {
+          return new DataSample(v1);
+        }
+      });
+
+      trainingRDD = sampledRDD.filter(new TrainingFilter(trainingFraction)).map(new GetLabeledPoint()).rdd();
+      testingRDD = sampledRDD.filter(new TestingFilter(trainingFraction)).map(new GetLabeledPoint()).rdd();
+    } else {
+      LOG.info("Using same RDD for train and test");
+      trainingRDD = labeledRDD.rdd();
+      testingRDD = trainingRDD;
+    }
+    LOG.info("Generated RDDs");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/dt/DecisionTreeAlgo.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/dt/DecisionTreeAlgo.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/dt/DecisionTreeAlgo.java
new file mode 100644
index 0000000..6c7619a
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/dt/DecisionTreeAlgo.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark.dt;
+
+import java.util.Map;
+
+import org.apache.lens.api.LensException;
+import org.apache.lens.ml.algo.api.AlgoParam;
+import org.apache.lens.ml.algo.api.Algorithm;
+import org.apache.lens.ml.algo.spark.BaseSparkAlgo;
+import org.apache.lens.ml.algo.spark.BaseSparkClassificationModel;
+
+import org.apache.spark.mllib.regression.LabeledPoint;
+import org.apache.spark.mllib.tree.DecisionTree$;
+import org.apache.spark.mllib.tree.configuration.Algo$;
+import org.apache.spark.mllib.tree.impurity.Entropy$;
+import org.apache.spark.mllib.tree.impurity.Gini$;
+import org.apache.spark.mllib.tree.impurity.Impurity;
+import org.apache.spark.mllib.tree.impurity.Variance$;
+import org.apache.spark.mllib.tree.model.DecisionTreeModel;
+import org.apache.spark.rdd.RDD;
+
+import scala.Enumeration;
+
+/**
+ * The Class DecisionTreeAlgo.
+ */
+@Algorithm(name = "spark_decision_tree", description = "Spark Decision Tree classifier algo")
+public class DecisionTreeAlgo extends BaseSparkAlgo {
+
+  /** The algo. */
+  @AlgoParam(name = "algo", help = "Decision tree algorithm. Allowed values are 'classification' and 'regression'")
+  private Enumeration.Value algo;
+
+  /** The decision tree impurity. */
+  @AlgoParam(name = "impurity", help = "Impurity measure used by the decision tree. "
+    + "Allowed values are 'gini', 'entropy' and 'variance'")
+  private Impurity decisionTreeImpurity;
+
+  /** The max depth. */
+  @AlgoParam(name = "maxDepth", help = "Max depth of the decision tree. Integer values expected.",
+    defaultValue = "100")
+  private int maxDepth;
+
+  /**
+   * Instantiates a new decision tree algo.
+   *
+   * @param name        the name
+   * @param description the description
+   */
+  public DecisionTreeAlgo(String name, String description) {
+    super(name, description);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#parseAlgoParams(java.util.Map)
+   */
+  @Override
+  public void parseAlgoParams(Map<String, String> params) {
+    String dtreeAlgoName = params.get("algo");
+    if ("classification".equalsIgnoreCase(dtreeAlgoName)) {
+      algo = Algo$.MODULE$.Classification();
+    } else if ("regression".equalsIgnoreCase(dtreeAlgoName)) {
+      algo = Algo$.MODULE$.Regression();
+    }
+
+    String impurity = params.get("impurity");
+    if ("gini".equals(impurity)) {
+      decisionTreeImpurity = Gini$.MODULE$;
+    } else if ("entropy".equals(impurity)) {
+      decisionTreeImpurity = Entropy$.MODULE$;
+    } else if ("variance".equals(impurity)) {
+      decisionTreeImpurity = Variance$.MODULE$;
+    }
+
+    maxDepth = getParamValue("maxDepth", 100);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#trainInternal(java.lang.String, org.apache.spark.rdd.RDD)
+   */
+  @Override
+  protected BaseSparkClassificationModel trainInternal(String modelId, RDD<LabeledPoint> trainingRDD)
+    throws LensException {
+    DecisionTreeModel model = DecisionTree$.MODULE$.train(trainingRDD, algo, decisionTreeImpurity, maxDepth);
+    return new DecisionTreeClassificationModel(modelId, new SparkDecisionTreeModel(model));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/dt/DecisionTreeClassificationModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/dt/DecisionTreeClassificationModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/dt/DecisionTreeClassificationModel.java
new file mode 100644
index 0000000..27c32f4
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/dt/DecisionTreeClassificationModel.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark.dt;
+
+import org.apache.lens.ml.algo.spark.BaseSparkClassificationModel;
+
+/**
+ * The Class DecisionTreeClassificationModel.
+ */
+public class DecisionTreeClassificationModel extends BaseSparkClassificationModel<SparkDecisionTreeModel> {
+
+  /**
+   * Instantiates a new decision tree classification model.
+   *
+   * @param modelId the model id
+   * @param model   the model
+   */
+  public DecisionTreeClassificationModel(String modelId, SparkDecisionTreeModel model) {
+    super(modelId, model);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/dt/SparkDecisionTreeModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/dt/SparkDecisionTreeModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/dt/SparkDecisionTreeModel.java
new file mode 100644
index 0000000..e561a8d
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/dt/SparkDecisionTreeModel.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark.dt;
+
+import org.apache.lens.ml.algo.spark.DoubleValueMapper;
+
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.mllib.classification.ClassificationModel;
+import org.apache.spark.mllib.linalg.Vector;
+import org.apache.spark.mllib.tree.model.DecisionTreeModel;
+import org.apache.spark.rdd.RDD;
+
+/**
+ * This class is created because the Spark decision tree model doesn't extend ClassificationModel.
+ */
+public class SparkDecisionTreeModel implements ClassificationModel {
+
+  /** The model. */
+  private final DecisionTreeModel model;
+
+  /**
+   * Instantiates a new spark decision tree model.
+   *
+   * @param model the model
+   */
+  public SparkDecisionTreeModel(DecisionTreeModel model) {
+    this.model = model;
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.spark.mllib.classification.ClassificationModel#predict(org.apache.spark.rdd.RDD)
+   */
+  @Override
+  public RDD<Object> predict(RDD<Vector> testData) {
+    return model.predict(testData);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.spark.mllib.classification.ClassificationModel#predict(org.apache.spark.mllib.linalg.Vector)
+   */
+  @Override
+  public double predict(Vector testData) {
+    return model.predict(testData);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.spark.mllib.classification.ClassificationModel#predict(org.apache.spark.api.java.JavaRDD)
+   */
+  @Override
+  public JavaRDD<Double> predict(JavaRDD<Vector> testData) {
+    return model.predict(testData.rdd()).toJavaRDD().map(new DoubleValueMapper());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/kmeans/KMeansAlgo.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/kmeans/KMeansAlgo.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/kmeans/KMeansAlgo.java
new file mode 100644
index 0000000..6450f70
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/kmeans/KMeansAlgo.java
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark.kmeans;
+
+import java.util.List;
+
+import org.apache.lens.api.LensConf;
+import org.apache.lens.api.LensException;
+import org.apache.lens.ml.algo.api.*;
+import org.apache.lens.ml.algo.lib.AlgoArgParser;
+import org.apache.lens.ml.algo.spark.HiveTableRDD;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hive.hcatalog.data.HCatRecord;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.mllib.clustering.KMeans;
+import org.apache.spark.mllib.clustering.KMeansModel;
+import org.apache.spark.mllib.linalg.Vector;
+import org.apache.spark.mllib.linalg.Vectors;
+
+import scala.Tuple2;
+
+/**
+ * The Class KMeansAlgo.
+ */
+@Algorithm(name = "spark_kmeans_algo", description = "Spark MLLib KMeans algo")
+public class KMeansAlgo implements MLAlgo {
+
+  /** The conf. */
+  private transient LensConf conf;
+
+  /** The spark context. */
+  private JavaSparkContext sparkContext;
+
+  /** The part filter. */
+  @AlgoParam(name = "partition", help = "Partition filter to be used while constructing table RDD")
+  private String partFilter = null;
+
+  /** The k. */
+  @AlgoParam(name = "k", help = "Number of cluster")
+  private int k;
+
+  /** The max iterations. */
+  @AlgoParam(name = "maxIterations", help = "Maximum number of iterations", defaultValue = "100")
+  private int maxIterations = 100;
+
+  /** The runs. */
+  @AlgoParam(name = "runs", help = "Number of parallel run", defaultValue = "1")
+  private int runs = 1;
+
+  /** The initialization mode. */
+  @AlgoParam(name = "initializationMode",
+    help = "initialization model, either \"random\" or \"k-means||\" (default).", defaultValue = "k-means||")
+  private String initializationMode = "k-means||";
+
+  @Override
+  public String getName() {
+    return getClass().getAnnotation(Algorithm.class).name();
+  }
+
+  @Override
+  public String getDescription() {
+    return getClass().getAnnotation(Algorithm.class).description();
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.MLAlgo#configure(org.apache.lens.api.LensConf)
+   */
+  @Override
+  public void configure(LensConf configuration) {
+    this.conf = configuration;
+  }
+
+  @Override
+  public LensConf getConf() {
+    return conf;
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.MLAlgo#train(org.apache.lens.api.LensConf, java.lang.String, java.lang.String,
+   * java.lang.String, java.lang.String[])
+   */
+  @Override
+  public MLModel train(LensConf conf, String db, String table, String modelId, String... params) throws LensException {
+    List<String> features = AlgoArgParser.parseArgs(this, params);
+    final int[] featurePositions = new int[features.size()];
+    final int NUM_FEATURES = features.size();
+
+    JavaPairRDD<WritableComparable, HCatRecord> rdd = null;
+    try {
+      // Map feature names to positions
+      Table tbl = Hive.get(toHiveConf(conf)).getTable(db, table);
+      List<FieldSchema> allCols = tbl.getAllCols();
+      int f = 0;
+      for (int i = 0; i < tbl.getAllCols().size(); i++) {
+        String colName = allCols.get(i).getName();
+        if (features.contains(colName)) {
+          featurePositions[f++] = i;
+        }
+      }
+
+      rdd = HiveTableRDD.createHiveTableRDD(sparkContext, toHiveConf(conf), db, table, partFilter);
+      JavaRDD<Vector> trainableRDD = rdd.map(new Function<Tuple2<WritableComparable, HCatRecord>, Vector>() {
+        @Override
+        public Vector call(Tuple2<WritableComparable, HCatRecord> v1) throws Exception {
+          HCatRecord hCatRecord = v1._2();
+          double[] arr = new double[NUM_FEATURES];
+          for (int i = 0; i < NUM_FEATURES; i++) {
+            Object val = hCatRecord.get(featurePositions[i]);
+            arr[i] = val == null ? 0d : (Double) val;
+          }
+          return Vectors.dense(arr);
+        }
+      });
+
+      KMeansModel model = KMeans.train(trainableRDD.rdd(), k, maxIterations, runs, initializationMode);
+      return new KMeansClusteringModel(modelId, model);
+    } catch (Exception e) {
+      throw new LensException("KMeans algo failed for " + db + "." + table, e);
+    }
+  }
+
+  /**
+   * To hive conf.
+   *
+   * @param conf the conf
+   * @return the hive conf
+   */
+  private HiveConf toHiveConf(LensConf conf) {
+    HiveConf hiveConf = new HiveConf();
+    for (String key : conf.getProperties().keySet()) {
+      hiveConf.set(key, conf.getProperties().get(key));
+    }
+    return hiveConf;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/kmeans/KMeansClusteringModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/kmeans/KMeansClusteringModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/kmeans/KMeansClusteringModel.java
new file mode 100644
index 0000000..62dc536
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/kmeans/KMeansClusteringModel.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark.kmeans;
+
+import org.apache.lens.ml.algo.api.MLModel;
+
+import org.apache.spark.mllib.clustering.KMeansModel;
+import org.apache.spark.mllib.linalg.Vectors;
+
+/**
+ * The Class KMeansClusteringModel.
+ */
+public class KMeansClusteringModel extends MLModel<Integer> {
+
+  /** The model. */
+  private final KMeansModel model;
+
+  /** The model id. */
+  private final String modelId;
+
+  /**
+   * Instantiates a new k means clustering model.
+   *
+   * @param modelId the model id
+   * @param model   the model
+   */
+  public KMeansClusteringModel(String modelId, KMeansModel model) {
+    this.model = model;
+    this.modelId = modelId;
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.MLModel#predict(java.lang.Object[])
+   */
+  @Override
+  public Integer predict(Object... args) {
+    // Convert the params to array of double
+    double[] arr = new double[args.length];
+    for (int i = 0; i < args.length; i++) {
+      if (args[i] != null) {
+        arr[i] = (Double) args[i];
+      } else {
+        arr[i] = 0d;
+      }
+    }
+
+    return model.predict(Vectors.dense(arr));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/lr/LogisticRegressionAlgo.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/lr/LogisticRegressionAlgo.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/lr/LogisticRegressionAlgo.java
new file mode 100644
index 0000000..55caf59
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/lr/LogisticRegressionAlgo.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark.lr;
+
+import java.util.Map;
+
+import org.apache.lens.api.LensException;
+import org.apache.lens.ml.algo.api.AlgoParam;
+import org.apache.lens.ml.algo.api.Algorithm;
+import org.apache.lens.ml.algo.spark.BaseSparkAlgo;
+import org.apache.lens.ml.algo.spark.BaseSparkClassificationModel;
+
+import org.apache.spark.mllib.classification.LogisticRegressionModel;
+import org.apache.spark.mllib.classification.LogisticRegressionWithSGD;
+import org.apache.spark.mllib.regression.LabeledPoint;
+import org.apache.spark.rdd.RDD;
+
+/**
+ * The Class LogisticRegressionAlgo.
+ */
+@Algorithm(name = "spark_logistic_regression", description = "Spark logistic regression algo")
+public class LogisticRegressionAlgo extends BaseSparkAlgo {
+
+  /** The iterations. */
+  @AlgoParam(name = "iterations", help = "Max number of iterations", defaultValue = "100")
+  private int iterations;
+
+  /** The step size. */
+  @AlgoParam(name = "stepSize", help = "Step size", defaultValue = "1.0d")
+  private double stepSize;
+
+  /** The min batch fraction. */
+  @AlgoParam(name = "minBatchFraction", help = "Fraction for batched learning", defaultValue = "1.0d")
+  private double minBatchFraction;
+
+  /**
+   * Instantiates a new logistic regression algo.
+   *
+   * @param name        the name
+   * @param description the description
+   */
+  public LogisticRegressionAlgo(String name, String description) {
+    super(name, description);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#parseAlgoParams(java.util.Map)
+   */
+  @Override
+  public void parseAlgoParams(Map<String, String> params) {
+    iterations = getParamValue("iterations", 100);
+    stepSize = getParamValue("stepSize", 1.0d);
+    minBatchFraction = getParamValue("minBatchFraction", 1.0d);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#trainInternal(java.lang.String, org.apache.spark.rdd.RDD)
+   */
+  @Override
+  protected BaseSparkClassificationModel trainInternal(String modelId, RDD<LabeledPoint> trainingRDD)
+    throws LensException {
+    LogisticRegressionModel lrModel = LogisticRegressionWithSGD.train(trainingRDD, iterations, stepSize,
+      minBatchFraction);
+    return new LogitRegressionClassificationModel(modelId, lrModel);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/lr/LogitRegressionClassificationModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/lr/LogitRegressionClassificationModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/lr/LogitRegressionClassificationModel.java
new file mode 100644
index 0000000..a4206e5
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/lr/LogitRegressionClassificationModel.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark.lr;
+
+import org.apache.lens.ml.algo.spark.BaseSparkClassificationModel;
+
+import org.apache.spark.mllib.classification.LogisticRegressionModel;
+
+/**
+ * The Class LogitRegressionClassificationModel.
+ */
+public class LogitRegressionClassificationModel extends BaseSparkClassificationModel<LogisticRegressionModel> {
+
+  /**
+   * Instantiates a new logit regression classification model.
+   *
+   * @param modelId the model id
+   * @param model   the model
+   */
+  public LogitRegressionClassificationModel(String modelId, LogisticRegressionModel model) {
+    super(modelId, model);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/nb/NaiveBayesAlgo.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/nb/NaiveBayesAlgo.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/nb/NaiveBayesAlgo.java
new file mode 100644
index 0000000..b4e1e78
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/nb/NaiveBayesAlgo.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark.nb;
+
+import java.util.Map;
+
+import org.apache.lens.api.LensException;
+import org.apache.lens.ml.algo.api.AlgoParam;
+import org.apache.lens.ml.algo.api.Algorithm;
+import org.apache.lens.ml.algo.spark.BaseSparkAlgo;
+import org.apache.lens.ml.algo.spark.BaseSparkClassificationModel;
+
+import org.apache.spark.mllib.classification.NaiveBayes;
+import org.apache.spark.mllib.regression.LabeledPoint;
+import org.apache.spark.rdd.RDD;
+
+/**
+ * The Class NaiveBayesAlgo.
+ */
+@Algorithm(name = "spark_naive_bayes", description = "Spark Naive Bayes classifier algo")
+public class NaiveBayesAlgo extends BaseSparkAlgo {
+
+  /** The lambda. */
+  @AlgoParam(name = "lambda", help = "Lambda parameter for naive bayes learner", defaultValue = "1.0d")
+  private double lambda = 1.0;
+
+  /**
+   * Instantiates a new naive bayes algo.
+   *
+   * @param name        the name
+   * @param description the description
+   */
+  public NaiveBayesAlgo(String name, String description) {
+    super(name, description);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#parseAlgoParams(java.util.Map)
+   */
+  @Override
+  public void parseAlgoParams(Map<String, String> params) {
+    lambda = getParamValue("lambda", 1.0d);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#trainInternal(java.lang.String, org.apache.spark.rdd.RDD)
+   */
+  @Override
+  protected BaseSparkClassificationModel trainInternal(String modelId, RDD<LabeledPoint> trainingRDD)
+    throws LensException {
+    return new NaiveBayesClassificationModel(modelId, NaiveBayes.train(trainingRDD, lambda));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/nb/NaiveBayesClassificationModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/nb/NaiveBayesClassificationModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/nb/NaiveBayesClassificationModel.java
new file mode 100644
index 0000000..26d39df
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/nb/NaiveBayesClassificationModel.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark.nb;
+
+import org.apache.lens.ml.algo.spark.BaseSparkClassificationModel;
+
+import org.apache.spark.mllib.classification.NaiveBayesModel;
+
+/**
+ * The Class NaiveBayesClassificationModel.
+ */
+public class NaiveBayesClassificationModel extends BaseSparkClassificationModel<NaiveBayesModel> {
+
+  /**
+   * Instantiates a new naive bayes classification model.
+   *
+   * @param modelId the model id
+   * @param model   the model
+   */
+  public NaiveBayesClassificationModel(String modelId, NaiveBayesModel model) {
+    super(modelId, model);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/svm/SVMAlgo.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/svm/SVMAlgo.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/svm/SVMAlgo.java
new file mode 100644
index 0000000..21a036a
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/svm/SVMAlgo.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark.svm;
+
+import java.util.Map;
+
+import org.apache.lens.api.LensException;
+import org.apache.lens.ml.algo.api.AlgoParam;
+import org.apache.lens.ml.algo.api.Algorithm;
+import org.apache.lens.ml.algo.spark.BaseSparkAlgo;
+import org.apache.lens.ml.algo.spark.BaseSparkClassificationModel;
+
+import org.apache.spark.mllib.classification.SVMModel;
+import org.apache.spark.mllib.classification.SVMWithSGD;
+import org.apache.spark.mllib.regression.LabeledPoint;
+import org.apache.spark.rdd.RDD;
+
+/**
+ * The Class SVMAlgo.
+ */
+@Algorithm(name = "spark_svm", description = "Spark SVML classifier algo")
+public class SVMAlgo extends BaseSparkAlgo {
+
+  /** The min batch fraction. */
+  @AlgoParam(name = "minBatchFraction", help = "Fraction for batched learning", defaultValue = "1.0d")
+  private double minBatchFraction;
+
+  /** The reg param. */
+  @AlgoParam(name = "regParam", help = "regularization parameter for gradient descent", defaultValue = "1.0d")
+  private double regParam;
+
+  /** The step size. */
+  @AlgoParam(name = "stepSize", help = "Iteration step size", defaultValue = "1.0d")
+  private double stepSize;
+
+  /** The iterations. */
+  @AlgoParam(name = "iterations", help = "Number of iterations", defaultValue = "100")
+  private int iterations;
+
+  /**
+   * Instantiates a new SVM algo.
+   *
+   * @param name        the name
+   * @param description the description
+   */
+  public SVMAlgo(String name, String description) {
+    super(name, description);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#parseAlgoParams(java.util.Map)
+   */
+  @Override
+  public void parseAlgoParams(Map<String, String> params) {
+    minBatchFraction = getParamValue("minBatchFraction", 1.0);
+    regParam = getParamValue("regParam", 1.0);
+    stepSize = getParamValue("stepSize", 1.0);
+    iterations = getParamValue("iterations", 100);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.ml.spark.algos.BaseSparkAlgo#trainInternal(java.lang.String, org.apache.spark.rdd.RDD)
+   */
+  @Override
+  protected BaseSparkClassificationModel trainInternal(String modelId, RDD<LabeledPoint> trainingRDD)
+    throws LensException {
+    SVMModel svmModel = SVMWithSGD.train(trainingRDD, iterations, stepSize, regParam, minBatchFraction);
+    return new SVMClassificationModel(modelId, svmModel);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/svm/SVMClassificationModel.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/svm/SVMClassificationModel.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/svm/SVMClassificationModel.java
new file mode 100644
index 0000000..433c0f9
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/algo/spark/svm/SVMClassificationModel.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.algo.spark.svm;
+
+import org.apache.lens.ml.algo.spark.BaseSparkClassificationModel;
+
+import org.apache.spark.mllib.classification.SVMModel;
+
+/**
+ * The Class SVMClassificationModel.
+ */
+public class SVMClassificationModel extends BaseSparkClassificationModel<SVMModel> {
+
+  /**
+   * Instantiates a new SVM classification model.
+   *
+   * @param modelId the model id
+   * @param model   the model
+   */
+  public SVMClassificationModel(String modelId, SVMModel model) {
+    super(modelId, model);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/api/LensML.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/api/LensML.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/api/LensML.java
new file mode 100644
index 0000000..e124fb0
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/api/LensML.java
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.api;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lens.api.LensException;
+import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.ml.algo.api.MLAlgo;
+import org.apache.lens.ml.algo.api.MLModel;
+
+/**
+ * Lens's machine learning interface used by client code as well as Lens ML service.
+ */
+public interface LensML {
+
+  /** Name of ML service */
+  String NAME = "ml";
+
+  /**
+   * Get list of available machine learning algorithms
+   *
+   * @return
+   */
+  List<String> getAlgorithms();
+
+  /**
+   * Get user friendly information about parameters accepted by the algorithm.
+   *
+   * @param algorithm the algorithm
+   * @return map of param key to its help message
+   */
+  Map<String, String> getAlgoParamDescription(String algorithm);
+
+  /**
+   * Get a algo object instance which could be used to generate a model of the given algorithm.
+   *
+   * @param algorithm the algorithm
+   * @return the algo for name
+   * @throws LensException the lens exception
+   */
+  MLAlgo getAlgoForName(String algorithm) throws LensException;
+
+  /**
+   * Create a model using the given HCatalog table as input. The arguments should contain information needeed to
+   * generate the model.
+   *
+   * @param table     the table
+   * @param algorithm the algorithm
+   * @param args      the args
+   * @return Unique ID of the model created after training is complete
+   * @throws LensException the lens exception
+   */
+  String train(String table, String algorithm, String[] args) throws LensException;
+
+  /**
+   * Get model IDs for the given algorithm.
+   *
+   * @param algorithm the algorithm
+   * @return the models
+   * @throws LensException the lens exception
+   */
+  List<String> getModels(String algorithm) throws LensException;
+
+  /**
+   * Get a model instance given the algorithm name and model ID.
+   *
+   * @param algorithm the algorithm
+   * @param modelId   the model id
+   * @return the model
+   * @throws LensException the lens exception
+   */
+  MLModel getModel(String algorithm, String modelId) throws LensException;
+
+  /**
+   * Get the FS location where model instance is saved.
+   *
+   * @param algorithm the algorithm
+   * @param modelID   the model id
+   * @return the model path
+   */
+  String getModelPath(String algorithm, String modelID);
+
+  /**
+   * Evaluate model by running it against test data contained in the given table.
+   *
+   * @param session   the session
+   * @param table     the table
+   * @param algorithm the algorithm
+   * @param modelID   the model id
+   * @return Test report object containing test output table, and various evaluation metrics
+   * @throws LensException the lens exception
+   */
+  MLTestReport testModel(LensSessionHandle session, String table, String algorithm, String modelID,
+    String outputTable) throws LensException;
+
+  /**
+   * Get test reports for an algorithm.
+   *
+   * @param algorithm the algorithm
+   * @return the test reports
+   * @throws LensException the lens exception
+   */
+  List<String> getTestReports(String algorithm) throws LensException;
+
+  /**
+   * Get a test report by ID.
+   *
+   * @param algorithm the algorithm
+   * @param reportID  the report id
+   * @return the test report
+   * @throws LensException the lens exception
+   */
+  MLTestReport getTestReport(String algorithm, String reportID) throws LensException;
+
+  /**
+   * Online predict call given a model ID, algorithm name and sample feature values.
+   *
+   * @param algorithm the algorithm
+   * @param modelID   the model id
+   * @param features  the features
+   * @return prediction result
+   * @throws LensException the lens exception
+   */
+  Object predict(String algorithm, String modelID, Object[] features) throws LensException;
+
+  /**
+   * Permanently delete a model instance.
+   *
+   * @param algorithm the algorithm
+   * @param modelID   the model id
+   * @throws LensException the lens exception
+   */
+  void deleteModel(String algorithm, String modelID) throws LensException;
+
+  /**
+   * Permanently delete a test report instance.
+   *
+   * @param algorithm the algorithm
+   * @param reportID  the report id
+   * @throws LensException the lens exception
+   */
+  void deleteTestReport(String algorithm, String reportID) throws LensException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/api/MLTestReport.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/api/MLTestReport.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/api/MLTestReport.java
new file mode 100644
index 0000000..965161a
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/api/MLTestReport.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.api;
+
+import java.io.Serializable;
+import java.util.List;
+
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+import lombok.ToString;
+
+/**
+ * Instantiates a new ML test report.
+ */
+@NoArgsConstructor
+@ToString
+public class MLTestReport implements Serializable {
+
+  /** The test table. */
+  @Getter
+  @Setter
+  private String testTable;
+
+  /** The output table. */
+  @Getter
+  @Setter
+  private String outputTable;
+
+  /** The output column. */
+  @Getter
+  @Setter
+  private String outputColumn;
+
+  /** The label column. */
+  @Getter
+  @Setter
+  private String labelColumn;
+
+  /** The feature columns. */
+  @Getter
+  @Setter
+  private List<String> featureColumns;
+
+  /** The algorithm. */
+  @Getter
+  @Setter
+  private String algorithm;
+
+  /** The model id. */
+  @Getter
+  @Setter
+  private String modelID;
+
+  /** The report id. */
+  @Getter
+  @Setter
+  private String reportID;
+
+  /** The query id. */
+  @Getter
+  @Setter
+  private String queryID;
+
+  /** The test output path. */
+  @Getter
+  @Setter
+  private String testOutputPath;
+
+  /** The prediction result column. */
+  @Getter
+  @Setter
+  private String predictionResultColumn;
+
+  /** The lens query id. */
+  @Getter
+  @Setter
+  private String lensQueryID;
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/api/ModelMetadata.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/api/ModelMetadata.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/api/ModelMetadata.java
new file mode 100644
index 0000000..3f7dff1
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/api/ModelMetadata.java
@@ -0,0 +1,118 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.api;
+
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+
+/**
+ * The Class ModelMetadata.
+ */
+@XmlRootElement
+/**
+ * Instantiates a new model metadata.
+ *
+ * @param modelID
+ *          the model id
+ * @param table
+ *          the table
+ * @param algorithm
+ *          the algorithm
+ * @param params
+ *          the params
+ * @param createdAt
+ *          the created at
+ * @param modelPath
+ *          the model path
+ * @param labelColumn
+ *          the label column
+ * @param features
+ *          the features
+ */
+@AllArgsConstructor
+/**
+ * Instantiates a new model metadata.
+ */
+@NoArgsConstructor
+public class ModelMetadata {
+
+  /** The model id. */
+  @XmlElement
+  @Getter
+  private String modelID;
+
+  /** The table. */
+  @XmlElement
+  @Getter
+  private String table;
+
+  /** The algorithm. */
+  @XmlElement
+  @Getter
+  private String algorithm;
+
+  /** The params. */
+  @XmlElement
+  @Getter
+  private String params;
+
+  /** The created at. */
+  @XmlElement
+  @Getter
+  private String createdAt;
+
+  /** The model path. */
+  @XmlElement
+  @Getter
+  private String modelPath;
+
+  /** The label column. */
+  @XmlElement
+  @Getter
+  private String labelColumn;
+
+  /** The features. */
+  @XmlElement
+  @Getter
+  private String features;
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see java.lang.Object#toString()
+   */
+  @Override
+  public String toString() {
+    StringBuilder builder = new StringBuilder();
+
+    builder.append("Algorithm: ").append(algorithm).append('\n');
+    builder.append("Model ID: ").append(modelID).append('\n');
+    builder.append("Training table: ").append(table).append('\n');
+    builder.append("Features: ").append(features).append('\n');
+    builder.append("Labelled Column: ").append(labelColumn).append('\n');
+    builder.append("Training params: ").append(params).append('\n');
+    builder.append("Created on: ").append(createdAt).append('\n');
+    builder.append("Model saved at: ").append(modelPath).append('\n');
+    return builder.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/api/TestReport.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/api/TestReport.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/api/TestReport.java
new file mode 100644
index 0000000..294fef3
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/api/TestReport.java
@@ -0,0 +1,125 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.api;
+
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+
+/**
+ * The Class TestReport.
+ */
+@XmlRootElement
+/**
+ * Instantiates a new test report.
+ *
+ * @param testTable
+ *          the test table
+ * @param outputTable
+ *          the output table
+ * @param outputColumn
+ *          the output column
+ * @param labelColumn
+ *          the label column
+ * @param featureColumns
+ *          the feature columns
+ * @param algorithm
+ *          the algorithm
+ * @param modelID
+ *          the model id
+ * @param reportID
+ *          the report id
+ * @param queryID
+ *          the query id
+ */
+@AllArgsConstructor
+/**
+ * Instantiates a new test report.
+ */
+@NoArgsConstructor
+public class TestReport {
+
+  /** The test table. */
+  @XmlElement
+  @Getter
+  private String testTable;
+
+  /** The output table. */
+  @XmlElement
+  @Getter
+  private String outputTable;
+
+  /** The output column. */
+  @XmlElement
+  @Getter
+  private String outputColumn;
+
+  /** The label column. */
+  @XmlElement
+  @Getter
+  private String labelColumn;
+
+  /** The feature columns. */
+  @XmlElement
+  @Getter
+  private String featureColumns;
+
+  /** The algorithm. */
+  @XmlElement
+  @Getter
+  private String algorithm;
+
+  /** The model id. */
+  @XmlElement
+  @Getter
+  private String modelID;
+
+  /** The report id. */
+  @XmlElement
+  @Getter
+  private String reportID;
+
+  /** The query id. */
+  @XmlElement
+  @Getter
+  private String queryID;
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see java.lang.Object#toString()
+   */
+  @Override
+  public String toString() {
+    StringBuilder builder = new StringBuilder();
+    builder.append("Input test table: ").append(testTable).append('\n');
+    builder.append("Algorithm: ").append(algorithm).append('\n');
+    builder.append("Report id: ").append(reportID).append('\n');
+    builder.append("Model id: ").append(modelID).append('\n');
+    builder.append("Lens Query id: ").append(queryID).append('\n');
+    builder.append("Feature columns: ").append(featureColumns).append('\n');
+    builder.append("Labelled column: ").append(labelColumn).append('\n');
+    builder.append("Predicted column: ").append(outputColumn).append('\n');
+    builder.append("Test output table: ").append(outputTable).append('\n');
+    return builder.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/dao/MLDBUtils.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/dao/MLDBUtils.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/dao/MLDBUtils.java
index 5e4d307..d444a32 100644
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/dao/MLDBUtils.java
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/dao/MLDBUtils.java
@@ -18,9 +18,9 @@
  */
 package org.apache.lens.ml.dao;
 
-import org.apache.lens.ml.MLModel;
-import org.apache.lens.ml.MLTestReport;
-import org.apache.lens.ml.task.MLTask;
+import org.apache.lens.ml.algo.api.MLModel;
+import org.apache.lens.ml.api.MLTestReport;
+import org.apache.lens.ml.impl.MLTask;
 
 public class MLDBUtils {
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/0f5ea4c7/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/HiveMLUDF.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/HiveMLUDF.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/HiveMLUDF.java
new file mode 100644
index 0000000..60a4008
--- /dev/null
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/HiveMLUDF.java
@@ -0,0 +1,138 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.ml.impl;
+
+import java.io.IOException;
+
+import org.apache.lens.ml.algo.api.MLModel;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.MapredContext;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.serde2.lazy.LazyDouble;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.mapred.JobConf;
+
+/**
+ * Generic UDF to laod ML Models saved in HDFS and apply the model on list of columns passed as argument.
+ */
+@Description(name = "predict",
+  value = "_FUNC_(algorithm, modelID, features...) - Run prediction algorithm with given "
+    + "algorithm name, model ID and input feature columns")
+public final class HiveMLUDF extends GenericUDF {
+  private HiveMLUDF() {
+  }
+
+  /** The Constant UDF_NAME. */
+  public static final String UDF_NAME = "predict";
+
+  /** The Constant LOG. */
+  public static final Log LOG = LogFactory.getLog(HiveMLUDF.class);
+
+  /** The conf. */
+  private JobConf conf;
+
+  /** The soi. */
+  private StringObjectInspector soi;
+
+  /** The doi. */
+  private LazyDoubleObjectInspector doi;
+
+  /** The model. */
+  private MLModel model;
+
+  /**
+   * Currently we only support double as the return value.
+   *
+   * @param objectInspectors the object inspectors
+   * @return the object inspector
+   * @throws UDFArgumentException the UDF argument exception
+   */
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] objectInspectors) throws UDFArgumentException {
+    // We require algo name, model id and at least one feature
+    if (objectInspectors.length < 3) {
+      throw new UDFArgumentLengthException("Algo name, model ID and at least one feature should be passed to "
+        + UDF_NAME);
+    }
+    LOG.info(UDF_NAME + " initialized");
+    return PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.hadoop.hive.ql.udf.generic.GenericUDF#evaluate(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.
+   * DeferredObject[])
+   */
+  @Override
+  public Object evaluate(DeferredObject[] deferredObjects) throws HiveException {
+    String algorithm = soi.getPrimitiveJavaObject(deferredObjects[0].get());
+    String modelId = soi.getPrimitiveJavaObject(deferredObjects[1].get());
+
+    Double[] features = new Double[deferredObjects.length - 2];
+    for (int i = 2; i < deferredObjects.length; i++) {
+      LazyDouble lazyDouble = (LazyDouble) deferredObjects[i].get();
+      features[i - 2] = (lazyDouble == null) ? 0d : doi.get(lazyDouble);
+    }
+
+    try {
+      if (model == null) {
+        model = ModelLoader.loadModel(conf, algorithm, modelId);
+      }
+    } catch (IOException e) {
+      throw new HiveException(e);
+    }
+
+    return model.predict(features);
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.hadoop.hive.ql.udf.generic.GenericUDF#getDisplayString(java.lang.String[])
+   */
+  @Override
+  public String getDisplayString(String[] strings) {
+    return UDF_NAME;
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.hadoop.hive.ql.udf.generic.GenericUDF#configure(org.apache.hadoop.hive.ql.exec.MapredContext)
+   */
+  @Override
+  public void configure(MapredContext context) {
+    super.configure(context);
+    conf = context.getJobConf();
+    soi = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
+    doi = LazyPrimitiveObjectInspectorFactory.LAZY_DOUBLE_OBJECT_INSPECTOR;
+    LOG.info(UDF_NAME + " configured. Model base dir path: " + conf.get(ModelLoader.MODEL_PATH_BASE_DIR));
+  }
+}


[47/50] [abbrv] incubator-lens git commit: LENS-507 : Fix example failures (amareshwari)

Posted by am...@apache.org.
LENS-507 : Fix example failures (amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/e3486e03
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/e3486e03
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/e3486e03

Branch: refs/heads/current-release-line
Commit: e3486e03a932af0abcc83dbdacce3768a0b9f00d
Parents: 9b97c19
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Wed Apr 15 05:49:29 2015 -0500
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Apr 15 05:49:29 2015 -0500

----------------------------------------------------------------------
 lens-examples/src/main/resources/sample-cube.xml              | 2 +-
 lens-examples/src/main/resources/sample-db-only-dimension.xml | 2 +-
 lens-examples/src/main/resources/sample-dimension.xml         | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e3486e03/lens-examples/src/main/resources/sample-cube.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sample-cube.xml b/lens-examples/src/main/resources/sample-cube.xml
index 8e35cc8..f24a8cc 100644
--- a/lens-examples/src/main/resources/sample-cube.xml
+++ b/lens-examples/src/main/resources/sample-cube.xml
@@ -33,7 +33,7 @@
   <dim_attributes>
     <dim_attribute name="dim1" type="INT"/>
     <dim_attribute name="dim2" type="INT" start_time='2013-12-01-00:00'/>
-    <dim_attribute name="dim3" type="INT">
+    <dim_attribute name="dim3" type="INT" join_key="true">
       <ref_spec>
         <table_references>
           <table_reference table="sample_dim" column="id"/>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e3486e03/lens-examples/src/main/resources/sample-db-only-dimension.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sample-db-only-dimension.xml b/lens-examples/src/main/resources/sample-db-only-dimension.xml
index e7a0b37..63a8bf8 100644
--- a/lens-examples/src/main/resources/sample-db-only-dimension.xml
+++ b/lens-examples/src/main/resources/sample-db-only-dimension.xml
@@ -25,7 +25,7 @@
     <dim_attribute name="id" type="INT"/>
     <dim_attribute name="name" type="STRING"/>
     <dim_attribute name="detail" type="STRING" start_time='2013-12-01-00:00'/>
-    <dim_attribute name="d2id" type="INT" start_time='2013-12-01-00:00'>
+    <dim_attribute name="d2id" type="INT" start_time='2013-12-01-00:00' join_key="true">
       <ref_spec>
         <table_references>
           <table_reference table="sample_dim2" column="id"/>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e3486e03/lens-examples/src/main/resources/sample-dimension.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sample-dimension.xml b/lens-examples/src/main/resources/sample-dimension.xml
index a3c7cd4..af4ab21 100644
--- a/lens-examples/src/main/resources/sample-dimension.xml
+++ b/lens-examples/src/main/resources/sample-dimension.xml
@@ -25,7 +25,7 @@
     <dim_attribute name="id" type="INT"/>
     <dim_attribute name="name" type="STRING"/>
     <dim_attribute name="detail" type="STRING" start_time='2013-12-01-00:00'/>
-    <dim_attribute name="d2id" type="INT" start_time='2013-12-01-00:00'>
+    <dim_attribute name="d2id" type="INT" start_time='2013-12-01-00:00' join_key="true">
       <ref_spec>
         <table_references>
           <table_reference table="sample_dim2" column="id"/>


[12/50] [abbrv] incubator-lens git commit: LENS-412 : Fix missing partitions message propagation across storages (Rajat Khandelwal via amareshwari)

Posted by am...@apache.org.
LENS-412 : Fix missing partitions message propagation across storages (Rajat Khandelwal via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/42ffb4e1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/42ffb4e1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/42ffb4e1

Branch: refs/heads/current-release-line
Commit: 42ffb4e1a837320c0014fae7ce0665fb182d97d6
Parents: c6c593c
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Fri Mar 27 14:19:09 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Fri Mar 27 14:19:09 2015 +0530

----------------------------------------------------------------------
 .../lens/cube/metadata/CubeMetastoreClient.java |  24 +-
 .../lens/cube/metadata/StorageConstants.java    |  10 +-
 .../lens/cube/metadata/TimePartition.java       |  76 ++++++
 .../timeline/EndsAndHolesPartitionTimeline.java |  22 +-
 .../metadata/timeline/PartitionTimeline.java    |  32 +--
 .../timeline/PartitionTimelineFactory.java      |   6 +-
 .../timeline/RangesPartitionTimeline.java       | 243 +++++++++++++++++++
 .../timeline/StoreAllPartitionTimeline.java     |   8 +-
 .../cube/parse/CandidateTablePruneCause.java    |   7 +-
 .../lens/cube/parse/CubeQueryConfUtil.java      |   2 -
 .../lens/cube/parse/CubeQueryContext.java       |   2 +-
 .../org/apache/lens/cube/parse/PruneCauses.java |  40 +--
 .../lens/cube/parse/StorageTableResolver.java   |  70 +++---
 .../org/apache/lens/cube/parse/StorageUtil.java |   6 +-
 .../timeline/TestPartitionTimelines.java        |  75 +++++-
 .../lens/cube/parse/TestCubeRewriter.java       |  32 +--
 16 files changed, 508 insertions(+), 147 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index 42bf98f..1835d2f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -44,7 +44,6 @@ import org.apache.thrift.TException;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
-
 import lombok.extern.apachecommons.CommonsLog;
 
 /**
@@ -330,14 +329,19 @@ public class CubeMetastoreClient {
     }
 
     /** update partition timeline cache for deletion of time partition */
-    public void updateForDeletion(String cubeTableName, String storageName, UpdatePeriod updatePeriod,
+    public boolean updateForDeletion(String cubeTableName, String storageName, UpdatePeriod updatePeriod,
       Map<String, Date> timePartSpec) throws HiveException, LensException {
+      boolean updated = false;
       for (Map.Entry<String, Date> entry : timePartSpec.entrySet()) {
-        get(cubeTableName, storageName, updatePeriod, entry.getKey()).drop(TimePartition.of(
-          updatePeriod, entry.getValue()));
+        TimePartition part = TimePartition.of(updatePeriod, entry.getValue());
+        if (!partitionExistsByFilter(cubeTableName, storageName, StorageConstants.getPartFilter(entry.getKey(),
+          part.getDateString()))) {
+          get(cubeTableName, storageName, updatePeriod, entry.getKey()).drop(part);
+          updated = true;
+        }
       }
+      return updated;
     }
-
   }
 
 
@@ -937,8 +941,9 @@ public class CubeMetastoreClient {
     } else {
       // dropping fact partition
       getStorage(storageName).dropPartition(getClient(), storageTableName, partVals, null);
-      partitionTimelineCache.updateForDeletion(cubeTableName, storageName, updatePeriod, timePartSpec);
-      this.alterTablePartitionCache(storageTableName);
+      if (partitionTimelineCache.updateForDeletion(cubeTableName, storageName, updatePeriod, timePartSpec)) {
+        this.alterTablePartitionCache(storageTableName);
+      }
     }
   }
 
@@ -978,6 +983,11 @@ public class CubeMetastoreClient {
     return partitionExists(storageTableName, getPartitionSpec(updatePeriod, partitionTimestamps));
   }
 
+  public boolean partitionExistsByFilter(String cubeTableName, String storageName, String filter) throws HiveException {
+    return partitionExistsByFilter(MetastoreUtil.getStorageTableName(cubeTableName, Storage.getPrefix(storageName)),
+      filter);
+  }
+
   public boolean partitionExistsByFilter(String storageTableName, String filter) throws HiveException {
     int parts;
     Table tbl = null;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageConstants.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageConstants.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageConstants.java
index aee9f7b..610d168 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageConstants.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageConstants.java
@@ -19,10 +19,10 @@
 
 package org.apache.lens.cube.metadata;
 
-import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 public final class StorageConstants {
   private StorageConstants() {
@@ -52,10 +52,8 @@ public final class StorageConstants {
    *
    * @return List
    */
-  public static List<String> getPartitionsForLatest() {
-    List<String> parts = new ArrayList<String>();
-    parts.add(LATEST_PARTITION_VALUE);
-    return parts;
+  public static Set<String> getPartitionsForLatest() {
+    return Collections.singleton(LATEST_PARTITION_VALUE);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
index 6a5b31d..b948467 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
@@ -21,6 +21,7 @@ package org.apache.lens.cube.metadata;
 import java.text.ParseException;
 import java.util.Calendar;
 import java.util.Date;
+import java.util.Iterator;
 
 import org.apache.lens.api.LensException;
 
@@ -123,4 +124,79 @@ public class TimePartition implements Comparable<TimePartition> {
   protected static String getWrongUpdatePeriodMessage(UpdatePeriod up, String dateString) {
     return String.format(UPDATE_PERIOD_WRONG_ERROR_MESSAGE, up, dateString);
   }
+
+  public TimePartitionRange rangeUpto(TimePartition to) {
+    return new TimePartitionRange(this, to);
+  }
+
+  public TimePartitionRange rangeFrom(TimePartition from) {
+    return new TimePartitionRange(from, this);
+  }
+
+  public TimePartitionRange singletonRange() {
+    return rangeUpto(next());
+  }
+
+  /**
+   * Range of time partition. [begin,end). i.e. inclusive begin and exclusive end.
+   */
+  @Data
+  public static class TimePartitionRange implements Iterable<TimePartition> {
+    private TimePartition begin;
+    private TimePartition end;
+
+    public TimePartitionRange(TimePartition from, TimePartition to) {
+      this.begin = from;
+      this.end = to;
+    }
+
+    @Override
+    public String toString() {
+      return "[" + begin.getDateString() + ", " + end.getDateString() + ")";
+    }
+
+    /**
+     * returns TimePartition objects starting from begin and upto(excluding) end. interval of iteration is the update
+     * period of the partitions. Assumes both partitions have same update period.
+     */
+    @Override
+    public Iterator<TimePartition> iterator() {
+
+      return new Iterator<TimePartition>() {
+        TimePartition current = begin;
+
+        @Override
+        public boolean hasNext() {
+          return current.before(end);
+        }
+
+        @Override
+        public TimePartition next() {
+          TimePartition ret = current;
+          current = current.next();
+          return ret;
+        }
+
+        @Override
+        public void remove() {
+          throw new UnsupportedOperationException("remove not supported");
+        }
+      };
+    }
+
+    /**
+     * @param partition
+     * @return begin <= partition < end
+     */
+    public boolean contains(TimePartition partition) {
+      return !partition.before(begin) && partition.before(end);
+    }
+
+    /**
+     * @return if range is empty range.
+     */
+    public boolean isEmpty() {
+      return begin.equals(end);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
index 3e323e1..79e8a62 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
@@ -22,7 +22,6 @@ package org.apache.lens.cube.metadata.timeline;
 import java.util.*;
 
 import org.apache.lens.api.LensException;
-import org.apache.lens.cube.metadata.CubeMetastoreClient;
 import org.apache.lens.cube.metadata.TimePartition;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.cube.parse.TimeRange;
@@ -48,13 +47,13 @@ public class EndsAndHolesPartitionTimeline extends PartitionTimeline {
   private TreeSet<TimePartition> holes = Sets.newTreeSet();
   private TimePartition latest;
 
-  public EndsAndHolesPartitionTimeline(CubeMetastoreClient client, String storageTableName, UpdatePeriod updatePeriod,
+  public EndsAndHolesPartitionTimeline(String storageTableName, UpdatePeriod updatePeriod,
     String partCol) {
-    super(client, storageTableName, updatePeriod, partCol);
+    super(storageTableName, updatePeriod, partCol);
   }
 
   @Override
-  public boolean add(TimePartition partition) throws LensException {
+  public boolean add(@NonNull TimePartition partition) throws LensException {
     if (isEmpty()) {
       // First partition being added
       first = partition;
@@ -75,20 +74,7 @@ public class EndsAndHolesPartitionTimeline extends PartitionTimeline {
   }
 
   @Override
-  public boolean add(@NonNull Collection<TimePartition> partitions) throws LensException {
-    boolean result = true;
-    for (TimePartition partition : partitions) {
-      result &= add(partition);
-    }
-    // Can also return the failed to add items.
-    return result;
-  }
-
-  @Override
-  public boolean drop(TimePartition toDrop) throws LensException {
-    if (morePartitionsExist(toDrop.getDateString())) {
-      return true;
-    }
+  public boolean drop(@NonNull TimePartition toDrop) throws LensException {
     if (first.equals(latest) && first.equals(toDrop)) {
       this.first = null;
       this.latest = null;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java
index 7eda58a..237164f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java
@@ -22,9 +22,10 @@ package org.apache.lens.cube.metadata.timeline;
 import java.util.*;
 
 import org.apache.lens.api.LensException;
-import org.apache.lens.cube.metadata.*;
+import org.apache.lens.cube.metadata.MetastoreUtil;
+import org.apache.lens.cube.metadata.TimePartition;
+import org.apache.lens.cube.metadata.UpdatePeriod;
 
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 
 import com.google.common.collect.Maps;
@@ -45,7 +46,6 @@ import lombok.extern.apachecommons.CommonsLog;
 @ToString(exclude = {"client"})
 @CommonsLog
 public abstract class PartitionTimeline implements Iterable<TimePartition> {
-  private final CubeMetastoreClient client;
   private final String storageTableName;
   private final UpdatePeriod updatePeriod;
   private final String partCol;
@@ -121,23 +121,6 @@ public abstract class PartitionTimeline implements Iterable<TimePartition> {
     return result;
   }
 
-  /**
-   * goes to metastore and queries if more partitions exist associated with (partCol = value) in storage table
-   * #getStorageTableName for update period #getUpdatePeriod. This might be useful for implementations while
-   * implementing drop.
-   *
-   * @param value
-   * @return
-   * @throws LensException
-   */
-  public boolean morePartitionsExist(String value) throws LensException {
-    try {
-      return getClient().partitionExistsByFilter(getStorageTableName(), StorageConstants.getPartFilter(getPartCol(),
-        value));
-    } catch (HiveException e) {
-      throw new LensException(e);
-    }
-  }
 
   /**
    * Add partition to timeline
@@ -155,7 +138,14 @@ public abstract class PartitionTimeline implements Iterable<TimePartition> {
    * @return whether add was successful
    * @throws LensException
    */
-  public abstract boolean add(@NonNull Collection<TimePartition> partitions) throws LensException;
+  public boolean add(@NonNull Collection<TimePartition> partitions) throws LensException {
+    boolean result = true;
+    for (TimePartition partition : partitions) {
+      result &= add(partition);
+    }
+    // Can also return the failed to add items.
+    return result;
+  }
 
   /**
    * drop partition.

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimelineFactory.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimelineFactory.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimelineFactory.java
index 5626a03..b018a1a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimelineFactory.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimelineFactory.java
@@ -49,11 +49,11 @@ public final class PartitionTimelineFactory {
           updatePeriod, partitionColumn));
       Class<? extends PartitionTimeline> clz = (Class<? extends PartitionTimeline>) Class.forName(storageClassName);
       Constructor<? extends PartitionTimeline> constructor = clz.getConstructor(
-        CubeMetastoreClient.class, String.class, UpdatePeriod.class, String.class);
+        String.class, UpdatePeriod.class, String.class);
       return constructor.newInstance(
-        client, storageTable, updatePeriod, partitionColumn);
+        storageTable, updatePeriod, partitionColumn);
     } catch (Exception e) {
-      return new EndsAndHolesPartitionTimeline(client, storageTable, updatePeriod, partitionColumn);
+      return new EndsAndHolesPartitionTimeline(storageTable, updatePeriod, partitionColumn);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/RangesPartitionTimeline.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/RangesPartitionTimeline.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/RangesPartitionTimeline.java
new file mode 100644
index 0000000..fb2d0a8
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/RangesPartitionTimeline.java
@@ -0,0 +1,243 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata.timeline;
+
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lens.api.LensException;
+import org.apache.lens.cube.metadata.TimePartition;
+import org.apache.lens.cube.metadata.UpdatePeriod;
+
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import lombok.Data;
+import lombok.ToString;
+
+/**
+ * One implementation of PartitionTimeline that stores ranges of partition presence, Basically a list of tuples each
+ * tuple represents a range of presence. range is of the form [from, end) i.e. including the first element and excluding
+ * the second element of the tuple
+ */
+@Data
+@ToString(callSuper = true)
+public class RangesPartitionTimeline extends PartitionTimeline {
+  private List<TimePartition.TimePartitionRange> ranges = Lists.newArrayList();
+
+  public RangesPartitionTimeline(String storageTableName, UpdatePeriod updatePeriod,
+    String partCol) {
+    super(storageTableName, updatePeriod, partCol);
+  }
+
+  @Override
+  public boolean add(TimePartition partition) throws LensException {
+    int ind = getStrictlyAfterIndex(partition);
+    int added = 0;
+    if (ind > 0) {
+      if (ranges.get(ind - 1).contains(partition)) {
+        return true;
+      }
+      if (ranges.get(ind - 1).getEnd().equals(partition)) {
+        added++;
+        ranges.get(ind - 1).setEnd(partition.next());
+      }
+    }
+    if (ind < ranges.size()) {
+      if (partition.equals(ranges.get(ind).getBegin().previous())) {
+        added++;
+        ranges.get(ind).setBegin(partition);
+      }
+    }
+    switch (added) {
+    case 0:
+      ranges.add(ind, partition.singletonRange());
+      break;
+    case 2:
+      ranges.get(ind - 1).setEnd(ranges.get(ind).getEnd());
+      ranges.remove(ind);
+      break;
+    case 1:
+      // Nothing needs to be done.
+    default:
+      break;
+
+    }
+    return true;
+  }
+
+  private int getStrictlyAfterIndex(TimePartition part) {
+    int start = 0;
+    int end = getRanges().size();
+    int mid;
+    while (end - start > 0) {
+      mid = (start + end) / 2;
+      if (ranges.get(mid).getBegin().after(part)) {
+        end = mid;
+      } else {
+        start = mid + 1;
+      }
+    }
+    return end;
+  }
+
+  private void mergeRanges() {
+    for (int i = 0; i < ranges.size() - 1; i++) {
+      if (ranges.get(i).getEnd().equals(ranges.get(i + 1).getBegin())) {
+        TimePartition.TimePartitionRange removed = ranges.remove(i + 1);
+        ranges.get(i).setEnd(removed.getEnd());
+        i--; // check again at same index
+      }
+    }
+  }
+
+  @Override
+  public boolean drop(TimePartition toDrop) throws LensException {
+    int ind = getStrictlyAfterIndex(toDrop);
+    if (ind == 0) {
+      return true; // nothing to do
+    }
+    if (ranges.get(ind - 1).getBegin().equals(toDrop)) {
+      ranges.get(ind - 1).setBegin(toDrop.next());
+    } else if (ranges.get(ind - 1).getEnd().previous().equals(toDrop)) {
+      ranges.get(ind - 1).setEnd(toDrop);
+    } else {
+      TimePartition end = ranges.get(ind - 1).getEnd();
+      ranges.get(ind - 1).setEnd(toDrop);
+      ranges.add(ind, toDrop.next().rangeUpto(end));
+    }
+    if (ranges.get(ind - 1).isEmpty()) {
+      ranges.remove(ind - 1);
+    }
+    return true;
+  }
+
+
+  @Override
+  public TimePartition latest() {
+    if (isEmpty()) {
+      return null;
+    }
+    return ranges.get(ranges.size() - 1).getEnd().previous();
+  }
+
+  @Override
+  public Map<String, String> toProperties() {
+    HashMap<String, String> ret = Maps.newHashMap();
+    if (isEmpty()) {
+      return ret;
+    }
+    StringBuilder sb = new StringBuilder();
+    String sep = "";
+    for (TimePartition.TimePartitionRange range : ranges) {
+      sb.append(sep);
+      sep = ",";
+      sb.append(range.getBegin()).append(sep).append(range.getEnd());
+    }
+    ret.put("ranges", sb.toString());
+    return ret;
+  }
+
+  @Override
+  public boolean initFromProperties(Map<String, String> properties) throws LensException {
+    ranges.clear();
+    String rangesStr = properties.get("ranges");
+    if (!Strings.isNullOrEmpty(rangesStr)) {
+      String[] split = rangesStr.split("\\s*,\\s*");
+      if (split.length % 2 == 1) {
+        throw new LensException("Ranges incomplete");
+      }
+      for (int i = 0; i < split.length; i += 2) {
+        ranges.add(TimePartition.of(getUpdatePeriod(), split[i]).rangeUpto(TimePartition.of(getUpdatePeriod(),
+          split[i + 1])));
+      }
+    }
+    return isConsistent();
+  }
+
+
+  public boolean isEmpty() {
+    return ranges.isEmpty();
+  }
+
+  @Override
+  public boolean isConsistent() {
+    if (isEmpty()) {
+      return true;
+    }
+    if (!ranges.get(0).getBegin().before(ranges.get(0).getEnd())) {
+      return false;
+    }
+    for (int i = 0; i < ranges.size() - 1; i++) {
+      if (!ranges.get(i).getEnd().before(ranges.get(i + 1).getBegin())) {
+        return false;
+      }
+      if (!ranges.get(i + 1).getBegin().before(ranges.get(i + 1).getEnd())) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  @Override
+  public boolean exists(TimePartition toCheck) {
+    if (isEmpty()) {
+      return false;
+    }
+    for (TimePartition.TimePartitionRange range : ranges) {
+      if (range.contains(toCheck)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  @Override
+  public Iterator<TimePartition> iterator() {
+
+    return new Iterator<TimePartition>() {
+      Iterator<TimePartition.TimePartitionRange> uber = ranges.iterator();
+      Iterator<TimePartition> cur = null;
+
+      @Override
+      public boolean hasNext() {
+        if (cur == null || !cur.hasNext()) {
+          if (!uber.hasNext()) {
+            return false;
+          }
+          cur = uber.next().iterator();
+        }
+        return cur.hasNext();
+      }
+
+      @Override
+      public TimePartition next() {
+        return cur.next();
+      }
+
+      @Override
+      public void remove() {
+        throw new UnsupportedOperationException();
+      }
+    };
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/StoreAllPartitionTimeline.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/StoreAllPartitionTimeline.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/StoreAllPartitionTimeline.java
index 8f8b03a..d6ee0a1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/StoreAllPartitionTimeline.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/StoreAllPartitionTimeline.java
@@ -21,7 +21,6 @@ package org.apache.lens.cube.metadata.timeline;
 import java.util.*;
 
 import org.apache.lens.api.LensException;
-import org.apache.lens.cube.metadata.CubeMetastoreClient;
 import org.apache.lens.cube.metadata.TimePartition;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 
@@ -42,9 +41,9 @@ import lombok.ToString;
 public class StoreAllPartitionTimeline extends PartitionTimeline {
   TreeSet<TimePartition> allPartitions;
 
-  public StoreAllPartitionTimeline(CubeMetastoreClient client, String storageTableName,
+  public StoreAllPartitionTimeline(String storageTableName,
     UpdatePeriod updatePeriod, String partCol) {
-    super(client, storageTableName, updatePeriod, partCol);
+    super(storageTableName, updatePeriod, partCol);
     allPartitions = Sets.newTreeSet();
   }
 
@@ -60,9 +59,6 @@ public class StoreAllPartitionTimeline extends PartitionTimeline {
 
   @Override
   public boolean drop(@NonNull TimePartition toDrop) throws LensException {
-    if (morePartitionsExist(toDrop.getDateString())) {
-      return true;
-    }
     return allPartitions.remove(toDrop);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
index bfeff4f..bc9ef93 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
@@ -22,6 +22,7 @@ import java.util.*;
 
 import org.codehaus.jackson.annotate.JsonWriteNullProperties;
 
+import com.google.common.collect.Sets;
 import lombok.Data;
 import lombok.NoArgsConstructor;
 
@@ -95,7 +96,7 @@ public class CandidateTablePruneCause {
     // missing partitions for cube table
     MISSING_PARTITIONS("Missing partitions for the cube table: %s") {
       Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
-        List<List<String>> missingPartitions = new ArrayList<List<String>>();
+        Set<Set<String>> missingPartitions = Sets.newHashSet();
         for (CandidateTablePruneCause cause : causes) {
           missingPartitions.add(cause.getMissingPartitions());
         }
@@ -183,7 +184,7 @@ public class CandidateTablePruneCause {
   private Map<String, SkipStorageCause> storageCauses;
 
   // populated only incase of missing partitions cause
-  private List<String> missingPartitions;
+  private Set<String> missingPartitions;
   // populated only incase of missing update periods cause
   private List<String> missingUpdatePeriods;
   // populated in case of missing columns
@@ -215,7 +216,7 @@ public class CandidateTablePruneCause {
     return columnNotFound(colList);
   }
 
-  public static CandidateTablePruneCause missingPartitions(List<String> nonExistingParts) {
+  public static CandidateTablePruneCause missingPartitions(Set<String> nonExistingParts) {
     CandidateTablePruneCause cause =
       new CandidateTablePruneCause(CandidateTablePruneCode.MISSING_PARTITIONS);
     cause.setMissingPartitions(nonExistingParts);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
index c3142cd..a6374f6 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
@@ -44,7 +44,6 @@ public final class CubeQueryConfUtil {
   public static final String DRIVER_SUPPORTED_STORAGES = "lens.cube.query.driver." + "supported.storages";
   public static final String FAIL_QUERY_ON_PARTIAL_DATA = "lens.cube.query.fail.if.data.partial";
   public static final String NON_EXISTING_PARTITIONS = "lens.cube.query.nonexisting.partitions";
-  public static final String ADD_NON_EXISTING_PARTITIONS = "lens.cube.query.add.nonexisting.partitions";
   public static final String ENABLE_MULTI_TABLE_SELECT = "lens.cube.query.enable.multi.table.select";
   public static final String QUERY_MAX_INTERVAL = "lens.cube.query.max.interval";
   public static final String PROCESS_TIME_PART_COL = "lens.cube.query.process.time" + ".partition.column";
@@ -59,7 +58,6 @@ public final class CubeQueryConfUtil {
   public static final int DEFAULT_LOOK_AHEAD_PT_PARTS = 1;
   public static final boolean DEFAULT_ENABLE_GROUP_BY_TO_SELECT = false;
   public static final boolean DEFAULT_ENABLE_SELECT_TO_GROUPBY = false;
-  public static final boolean DEFAULT_ADD_NON_EXISTING_PARTITIONS = false;
   public static final boolean DEFAULT_REPLACE_TIMEDIM_WITH_PART_COL = true;
 
   public static String getLookAheadPTPartsKey(UpdatePeriod interval) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index e06022c..7ea67f4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -641,7 +641,7 @@ public class CubeQueryContext {
     }
   }
 
-  void setNonexistingParts(Map<String, List<String>> nonExistingParts) throws SemanticException {
+  void setNonexistingParts(Map<String, Set<String>> nonExistingParts) throws SemanticException {
     if (!nonExistingParts.isEmpty()) {
       ByteArrayOutputStream out = null;
       String partsStr;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
index ae9e013..32ef421 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
@@ -18,10 +18,7 @@
  */
 package org.apache.lens.cube.parse;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
 import org.apache.lens.cube.metadata.AbstractCubeTable;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
@@ -30,6 +27,7 @@ import org.apache.commons.lang.StringUtils;
 
 import org.codehaus.jackson.annotate.JsonWriteNullProperties;
 
+import com.google.common.collect.Maps;
 import lombok.AllArgsConstructor;
 import lombok.Data;
 import lombok.Getter;
@@ -39,6 +37,21 @@ public class PruneCauses<T extends AbstractCubeTable> extends HashMap<T, List<Ca
   @Getter(lazy = true)
   private final HashMap<CandidateTablePruneCause, List<T>> reversed = reverse();
   @Getter(lazy = true)
+  private final HashMap<String, List<CandidateTablePruneCause>> compact = computeCompact();
+
+  private HashMap<String, List<CandidateTablePruneCause>> computeCompact() {
+    HashMap<String, List<CandidateTablePruneCause>> detailedMessage = Maps.newHashMap();
+    for (Map.Entry<CandidateTablePruneCause, List<T>> entry : getReversed().entrySet()) {
+      String key = StringUtils.join(entry.getValue(), ",");
+      if (detailedMessage.get(key) == null) {
+        detailedMessage.put(key, new ArrayList<CandidateTablePruneCause>());
+      }
+      detailedMessage.get(key).add(entry.getKey());
+    }
+    return detailedMessage;
+  }
+
+  @Getter(lazy = true)
   private final BriefAndDetailedError jsonObject = toJsonObject();
 
   public void addPruningMsg(T table, CandidateTablePruneCause msg) {
@@ -62,16 +75,7 @@ public class PruneCauses<T extends AbstractCubeTable> extends HashMap<T, List<Ca
   }
 
   public BriefAndDetailedError toJsonObject() {
-    final HashMap<String, List<CandidateTablePruneCause>> detailedMessage
-      = new HashMap<String, List<CandidateTablePruneCause>>();
-    for (Map.Entry<CandidateTablePruneCause, List<T>> entry : getReversed().entrySet()) {
-      String key = StringUtils.join(entry.getValue(), ",");
-      if (detailedMessage.get(key) == null) {
-        detailedMessage.put(key, new ArrayList<CandidateTablePruneCause>());
-      }
-      detailedMessage.get(key).add(entry.getKey());
-    }
-    return new BriefAndDetailedError(getBriefCause(), detailedMessage);
+    return new BriefAndDetailedError(getBriefCause(), getCompact());
   }
 
   public String getBriefCause() {
@@ -81,10 +85,10 @@ public class PruneCauses<T extends AbstractCubeTable> extends HashMap<T, List<Ca
         maxCause = cause.getCause();
       }
     }
-    Map<CandidateTablePruneCause, List<T>> maxCauseMap = new HashMap<CandidateTablePruneCause, List<T>>();
-    for (Map.Entry<CandidateTablePruneCause, List<T>> entry : getReversed().entrySet()) {
-      if (entry.getKey().getCause().compareTo(maxCause) == 0) {
-        maxCauseMap.put(entry.getKey(), entry.getValue());
+    Map<CandidateTablePruneCause, String> maxCauseMap = Maps.newHashMap();
+    for (Map.Entry<CandidateTablePruneCause, List<T>> entry: getReversed().entrySet()) {
+      if (entry.getKey().getCause().equals(maxCause)) {
+        maxCauseMap.put(entry.getKey(), StringUtils.join(entry.getValue(), ","));
       }
     }
     return maxCause.getBriefError(maxCauseMap.keySet());

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index 23fd5a6..6e63483 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -25,6 +25,7 @@ import java.util.*;
 
 import org.apache.lens.api.LensException;
 import org.apache.lens.cube.metadata.*;
+import org.apache.lens.cube.metadata.timeline.RangesPartitionTimeline;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCause;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode;
@@ -38,6 +39,9 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.util.ReflectionUtils;
 
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+
 /**
  * Resolve storages and partitions of all candidate tables and prunes candidate tables with missing storages or
  * partitions.
@@ -55,8 +59,7 @@ class StorageTableResolver implements ContextRewriter {
     new HashMap<CubeFactTable, Map<UpdatePeriod, Set<String>>>();
   private String processTimePartCol = null;
   private final UpdatePeriod maxInterval;
-  private final boolean populateNonExistingParts;
-  private final Map<String, List<String>> nonExistingPartitions = new HashMap<String, List<String>>();
+  private final Map<String, Set<String>> nonExistingPartitions = new HashMap<String, Set<String>>();
   private TimeRangeWriter rangeWriter;
   private DateFormat partWhereClauseFormat = null;
   private PHASE phase;
@@ -82,13 +85,6 @@ class StorageTableResolver implements ContextRewriter {
     this.supportedStorages = getSupportedStorages(conf);
     this.allStoragesSupported = (supportedStorages == null);
     this.failOnPartialData = conf.getBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, false);
-    if (!failOnPartialData) {
-      this.populateNonExistingParts = true;
-    } else {
-      this.populateNonExistingParts =
-        conf.getBoolean(CubeQueryConfUtil.ADD_NON_EXISTING_PARTITIONS,
-          CubeQueryConfUtil.DEFAULT_ADD_NON_EXISTING_PARTITIONS);
-    }
     String str = conf.get(CubeQueryConfUtil.VALID_STORAGE_DIM_TABLES);
     validDimTables = StringUtils.isBlank(str) ? null : Arrays.asList(StringUtils.split(str.toLowerCase(), ","));
     this.processTimePartCol = conf.get(CubeQueryConfUtil.PROCESS_TIME_PART_COL);
@@ -320,10 +316,10 @@ class StorageTableResolver implements ContextRewriter {
       CandidateFact cfact = i.next();
       List<FactPartition> answeringParts = new ArrayList<FactPartition>();
       HashMap<String, SkipStorageCause> skipStorageCauses = new HashMap<String, SkipStorageCause>();
-      List<String> nonExistingParts = new ArrayList<String>();
+      Map<UpdatePeriod, RangesPartitionTimeline> missingPartitionRanges = Maps.newHashMap();
       boolean noPartsForRange = false;
       for (TimeRange range : cubeql.getTimeRanges()) {
-        Set<FactPartition> rangeParts = getPartitions(cfact.fact, range, skipStorageCauses, nonExistingParts);
+        Set<FactPartition> rangeParts = getPartitions(cfact.fact, range, skipStorageCauses, missingPartitionRanges);
         if (rangeParts == null || rangeParts.isEmpty()) {
           LOG.info("No partitions for range:" + range);
           noPartsForRange = true;
@@ -335,6 +331,14 @@ class StorageTableResolver implements ContextRewriter {
         cfact.getRangeToWhereClause().put(range, rangeWriter.getTimeRangeWhereClause(cubeql,
           cubeql.getAliasForTabName(cubeql.getCube().getName()), rangeParts));
       }
+      Set<String> nonExistingParts = Sets.newHashSet();
+      if (!missingPartitionRanges.isEmpty()) {
+        for (UpdatePeriod period : missingPartitionRanges.keySet()) {
+          for (TimePartition.TimePartitionRange range : missingPartitionRanges.get(period).getRanges()) {
+            nonExistingParts.add(range.toString());
+          }
+        }
+      }
       if (!nonExistingParts.isEmpty()) {
         addNonExistingParts(cfact.fact.getName(), nonExistingParts);
       }
@@ -343,24 +347,19 @@ class StorageTableResolver implements ContextRewriter {
           + cubeql.getTimeRanges());
         /*
          * This fact is getting discarded because of any of following reasons:
-         * 1. Storage tables are not partitioned by timedim partition column
-         * 2. Has missing partitions, and CubeQueryConfUtil.ADD_NON_EXISTING_PARTITIONS is true - which can populate
-         *  all missing partitions
-         * 3. Has missing partitions, and CubeQueryConfUtil.ADD_NON_EXISTING_PARTITIONS is false - will populate only
-         *  the first missing partition.
-         * 4. Storage tables do not have the update period for the timerange queried.
+         * 1. Has missing partitions
+         * 2. All Storage tables were skipped for some reasons.
+         * 3. Storage tables do not have the update period for the timerange queried.
          */
-        if (!skipStorageCauses.isEmpty()) {
+        if (!nonExistingParts.isEmpty()) {
+          cubeql.addFactPruningMsgs(cfact.fact, CandidateTablePruneCause.missingPartitions(nonExistingParts));
+        } else if (!skipStorageCauses.isEmpty()) {
           CandidateTablePruneCause cause = CandidateTablePruneCause.noCandidateStorages(skipStorageCauses);
           cubeql.addFactPruningMsgs(cfact.fact, cause);
         } else {
-          if (!nonExistingParts.isEmpty()) {
-            cubeql.addFactPruningMsgs(cfact.fact, CandidateTablePruneCause.missingPartitions(nonExistingParts));
-          } else {
-            CandidateTablePruneCause cause =
-              new CandidateTablePruneCause(CandidateTablePruneCode.NO_FACT_UPDATE_PERIODS_FOR_GIVEN_RANGE);
-            cubeql.addFactPruningMsgs(cfact.fact, cause);
-          }
+          CandidateTablePruneCause cause =
+            new CandidateTablePruneCause(CandidateTablePruneCode.NO_FACT_UPDATE_PERIODS_FOR_GIVEN_RANGE);
+          cubeql.addFactPruningMsgs(cfact.fact, cause);
         }
         i.remove();
         continue;
@@ -385,14 +384,16 @@ class StorageTableResolver implements ContextRewriter {
     }
   }
 
-  void addNonExistingParts(String name, List<String> nonExistingParts) {
+
+  void addNonExistingParts(String name, Set<String> nonExistingParts) {
     nonExistingPartitions.put(name, nonExistingParts);
   }
 
   private Set<FactPartition> getPartitions(CubeFactTable fact, TimeRange range,
-    HashMap<String, SkipStorageCause> skipStorageCauses, List<String> nonExistingParts) throws SemanticException {
+    HashMap<String, SkipStorageCause> skipStorageCauses,
+    Map<UpdatePeriod, RangesPartitionTimeline> nonExistingParts) throws SemanticException {
     try {
-      return getPartitions(fact, range, getValidUpdatePeriods(fact), this.populateNonExistingParts, skipStorageCauses,
+      return getPartitions(fact, range, getValidUpdatePeriods(fact), true, skipStorageCauses,
         nonExistingParts);
     } catch (Exception e) {
       throw new SemanticException(e);
@@ -400,7 +401,8 @@ class StorageTableResolver implements ContextRewriter {
   }
 
   private Set<FactPartition> getPartitions(CubeFactTable fact, TimeRange range, TreeSet<UpdatePeriod> updatePeriods,
-    boolean addNonExistingParts, Map<String, SkipStorageCause> skipStorageCauses, List<String> nonExistingParts)
+    boolean addNonExistingParts, Map<String, SkipStorageCause> skipStorageCauses,
+    Map<UpdatePeriod, RangesPartitionTimeline> nonExistingParts)
     throws Exception {
     Set<FactPartition> partitions = new TreeSet<FactPartition>();
     if (getPartitions(fact, range.getFromDate(), range.getToDate(), range.getPartitionColumn(), partitions,
@@ -413,7 +415,8 @@ class StorageTableResolver implements ContextRewriter {
 
   private boolean getPartitions(CubeFactTable fact, Date fromDate, Date toDate, String partCol,
     Set<FactPartition> partitions, TreeSet<UpdatePeriod> updatePeriods,
-    boolean addNonExistingParts, Map<String, SkipStorageCause> skipStorageCauses, List<String> nonExistingParts)
+    boolean addNonExistingParts, Map<String, SkipStorageCause> skipStorageCauses,
+    Map<UpdatePeriod, RangesPartitionTimeline> nonExistingParts)
     throws Exception {
     LOG.info("getPartitions for " + fact + " from fromDate:" + fromDate + " toDate:" + toDate);
     if (fromDate.equals(toDate) || fromDate.after(toDate)) {
@@ -526,10 +529,13 @@ class StorageTableResolver implements ContextRewriter {
         if (!getPartitions(fact, dt, nextDt, partCol, partitions, newset, false, skipStorageCauses,
           nonExistingParts)) {
 
-          // Add non existing partitions for all cases of whether we populate all non existing or not.
           LOG.info("Adding non existing partition" + part);
-          nonExistingParts.add(part.getPartString());
           if (addNonExistingParts) {
+            // Add non existing partitions for all cases of whether we populate all non existing or not.
+            if (!nonExistingParts.containsKey(part.getPeriod())) {
+              nonExistingParts.put(part.getPeriod(), new RangesPartitionTimeline(null, null, null));
+            }
+            nonExistingParts.get(part.getPeriod()).add(TimePartition.of(part.getPeriod(), dt));
             if (!failOnPartialData) {
               partitions.add(part);
               // add all storage tables as the answering tables

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
index 24d9340..0704171 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
@@ -33,20 +33,20 @@ public final class StorageUtil {
 
   private static final Log LOG = LogFactory.getLog(StorageUtil.class.getName());
 
-  public static String getWherePartClause(String timeDimName, String tableName, List<String> parts) {
+  public static String getWherePartClause(String timeDimName, String tableName, Collection<String> parts) {
     if (parts.size() == 0) {
       return "";
     }
     StringBuilder partStr = new StringBuilder();
     String sep = "";
-    for (int i = 0; i < parts.size(); i++) {
+    for (String part : parts) {
       partStr.append(sep);
       partStr.append("(");
       partStr.append(tableName != null ? tableName : "%s");
       partStr.append(".");
       partStr.append(timeDimName);
       partStr.append(" = '");
-      partStr.append(parts.get(i));
+      partStr.append(part);
       partStr.append("'");
       partStr.append(")");
       sep = " OR ";

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/test/java/org/apache/lens/cube/metadata/timeline/TestPartitionTimelines.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/timeline/TestPartitionTimelines.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/timeline/TestPartitionTimelines.java
index 0027e64..50b75e3 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/timeline/TestPartitionTimelines.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/timeline/TestPartitionTimelines.java
@@ -18,8 +18,7 @@
  */
 package org.apache.lens.cube.metadata.timeline;
 
-import java.util.Date;
-import java.util.Map;
+import java.util.*;
 
 import org.apache.lens.api.LensException;
 import org.apache.lens.cube.metadata.CubeMetastoreClient;
@@ -29,23 +28,87 @@ import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
+import com.beust.jcommander.internal.Lists;
+
 public class TestPartitionTimelines {
   CubeMetastoreClient client = null;
   private static final String TABLE_NAME = "storage_fact";
   private static final UpdatePeriod PERIOD = UpdatePeriod.HOURLY;
   private static final String PART_COL = "pt";
   private static final Date DATE = new Date();
+  private static final List<Class<? extends PartitionTimeline>> TIMELINE_IMPLEMENTATIONS = Arrays.asList(
+    StoreAllPartitionTimeline.class,
+    EndsAndHolesPartitionTimeline.class,
+    RangesPartitionTimeline.class
+  );
 
   @Test
   public void testPropertiesContractsForAllSubclasses() throws LensException {
-    testPropertiesContract(StoreAllPartitionTimeline.class);
-    testPropertiesContract(EndsAndHolesPartitionTimeline.class);
+    for (Class<? extends PartitionTimeline> clazz : TIMELINE_IMPLEMENTATIONS) {
+      testPropertiesContract(clazz);
+    }
+  }
+
+  @Test
+  public void testEquivalence() throws LensException {
+    for (int j = 0; j < 10; j++) {
+      Random randomGenerator = new Random();
+      List<PartitionTimeline> timelines = Lists.newArrayList();
+      for (Class<? extends PartitionTimeline> clazz : TIMELINE_IMPLEMENTATIONS) {
+        timelines.add(getInstance(clazz));
+      }
+      final List<TimePartition> addedPartitions = Lists.newArrayList();
+      for (int i = 0; i < 200; i++) {
+        int randomInt = randomGenerator.nextInt(100) - 50;
+        TimePartition part = TimePartition.of(PERIOD, timeAtHourDiff(randomInt));
+        addedPartitions.add(part);
+        for (PartitionTimeline timeline : timelines) {
+          timeline.add(part);
+        }
+      }
+      Iterator<TimePartition> sourceOfTruth = timelines.get(0).iterator();
+      List<Iterator<TimePartition>> otherIterators = Lists.newArrayList();
+      for (int i = 1; i < TIMELINE_IMPLEMENTATIONS.size() - 1; i++) {
+        otherIterators.add(timelines.get(i).iterator());
+      }
+      while (sourceOfTruth.hasNext()) {
+        TimePartition cur = sourceOfTruth.next();
+        for (Iterator<TimePartition> iterator : otherIterators) {
+          Assert.assertTrue(iterator.hasNext());
+          Assert.assertEquals(iterator.next(), cur);
+        }
+      }
+      for (Iterator<TimePartition> iterator : otherIterators) {
+        Assert.assertFalse(iterator.hasNext());
+      }
+      Collections.shuffle(addedPartitions);
+      Iterator<TimePartition> iter = addedPartitions.iterator();
+      while (iter.hasNext()) {
+        TimePartition part = iter.next();
+        iter.remove();
+        if (!addedPartitions.contains(part)) {
+          for (PartitionTimeline timeline : timelines) {
+            timeline.drop(part);
+          }
+        }
+      }
+      for (PartitionTimeline timeline : timelines) {
+        Assert.assertTrue(timeline.isEmpty());
+      }
+    }
+  }
+
+  private Date timeAtHourDiff(int d) {
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(DATE);
+    cal.add(PERIOD.calendarField(), d);
+    return cal.getTime();
   }
 
   private <T extends PartitionTimeline> T getInstance(Class<T> clz) {
     try {
-      return clz.getConstructor(CubeMetastoreClient.class, String.class, UpdatePeriod.class, String.class)
-        .newInstance(client, TABLE_NAME, PERIOD, PART_COL);
+      return clz.getConstructor(String.class, UpdatePeriod.class, String.class)
+        .newInstance(TABLE_NAME, PERIOD, PART_COL);
     } catch (Exception e) {
       e.printStackTrace();
     }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/42ffb4e1/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 4278229..f9a3762 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -109,7 +109,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     compareQueries(expected, hqlQuery);
 
     conf.setBoolean(CubeQueryConfUtil.LIGHTEST_FACT_FIRST, true);
-    conf.setBoolean(CubeQueryConfUtil.ADD_NON_EXISTING_PARTITIONS, true);
+
     SemanticException th = getSemanticExceptionInRewrite(
       "select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     Assert.assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE.getErrorCode());
@@ -122,24 +122,6 @@ public class TestCubeRewriter extends TestQueryRewrite {
     Assert.assertEquals(pruneCauses.getDetails().get("testfact").size(), 1);
     Assert.assertEquals(pruneCauses.getDetails().get("testfact").iterator().next().getCause(),
       CandidateTablePruneCode.MISSING_PARTITIONS);
-
-    // Error should be no missing partitions with first missing partition populated for each update period
-    conf.setBoolean(CubeQueryConfUtil.ADD_NON_EXISTING_PARTITIONS, false);
-    th = getSemanticExceptionInRewrite(
-      "select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
-    Assert.assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE.getErrorCode());
-    pruneCauses = extractPruneCause(th);
-    Assert.assertEquals(
-      pruneCauses.getBrief().substring(0, CandidateTablePruneCode.MISSING_PARTITIONS.errorFormat.length() - 3),
-      CandidateTablePruneCode.MISSING_PARTITIONS.errorFormat.substring(0,
-        CandidateTablePruneCode.MISSING_PARTITIONS.errorFormat.length() - 3)
-    );
-    Assert.assertEquals(pruneCauses.getDetails().get("testfact").size(), 1);
-    Assert.assertEquals(pruneCauses.getDetails().get("testfact").iterator().next().getCause(),
-      CandidateTablePruneCode.MISSING_PARTITIONS);
-    Assert.assertEquals(pruneCauses.getDetails().get("testfactmonthly").size(), 1);
-    Assert.assertEquals(pruneCauses.getDetails().get("testfactmonthly").iterator().next().getCause(),
-      CandidateTablePruneCode.NO_FACT_UPDATE_PERIODS_FOR_GIVEN_RANGE);
   }
 
   @Test
@@ -858,12 +840,20 @@ public class TestCubeRewriter extends TestQueryRewrite {
       CandidateTablePruneCode.MISSING_PARTITIONS);
     Assert.assertEquals(pruneCauses.getDetails().get("testfactmonthly").iterator().next().getCause(),
       CandidateTablePruneCode.MISSING_PARTITIONS);
-    Assert.assertEquals(pruneCauses.getDetails().get("testfact2_raw,testfact2").iterator().next().getCause(),
+    Assert.assertEquals(pruneCauses.getDetails().get("testfact2").iterator().next().getCause(),
         CandidateTablePruneCode.MISSING_PARTITIONS);
+    Assert.assertEquals(pruneCauses.getDetails().get("testfact2_raw").iterator().next().getCause(),
+      CandidateTablePruneCode.MISSING_PARTITIONS);
     Assert.assertEquals(pruneCauses.getDetails().get("cheapfact").iterator().next().getCause(),
         CandidateTablePruneCode.NO_CANDIDATE_STORAGES);
-    Assert.assertEquals(pruneCauses.getDetails().get("summary1,summary2,summary3,summary4").iterator().next()
+    Assert.assertEquals(pruneCauses.getDetails().get("summary1,summary2,summary3").iterator().next().getCause(),
+      CandidateTablePruneCode.MISSING_PARTITIONS);
+    Assert.assertEquals(pruneCauses.getDetails().get("summary4").iterator().next()
       .getCause(), CandidateTablePruneCode.NO_CANDIDATE_STORAGES);
+    Assert.assertEquals(pruneCauses.getDetails().get("summary4").iterator().next()
+      .getStorageCauses().values().iterator().next().getCause(), SkipStorageCode.PART_COL_DOES_NOT_EXIST);
+    Assert.assertEquals(pruneCauses.getDetails().get("summary4").iterator().next()
+      .getStorageCauses().values().iterator().next().getNonExistantPartCols(), Arrays.asList("dt"));
   }
 
   @Test


[38/50] [abbrv] incubator-lens git commit: LENS-491: Create example metastore schema for sales

Posted by am...@apache.org.
LENS-491: Create example metastore schema for sales


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/f0798b14
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/f0798b14
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/f0798b14

Branch: refs/heads/current-release-line
Commit: f0798b14afce6b3634e1150a6d6ae7e0c276b5be
Parents: c20120c
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Thu Apr 9 14:49:04 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Thu Apr 9 14:49:04 2015 +0530

----------------------------------------------------------------------
 .../lens/cli/commands/BaseLensCommand.java      |   3 +
 .../apache/lens/cube/metadata/JoinChain.java    |  16 ++-
 .../org/apache/lens/examples/DatabaseUtil.java  |   4 +-
 .../lens/examples/PopulateSampleMetastore.java  |   8 --
 .../apache/lens/examples/SampleMetastore.java   | 100 ++++++-------
 lens-examples/src/main/resources/city.xml       |  33 +++++
 .../src/main/resources/city_subset.xml          |  39 +++++
 lens-examples/src/main/resources/city_table.xml |  47 ++++++
 lens-examples/src/main/resources/customer.xml   |  55 +++++++
 .../src/main/resources/customer_table.xml       |  58 ++++++++
 .../src/main/resources/db-storage-schema.sql    |  12 ++
 lens-examples/src/main/resources/product.xml    |  36 +++++
 .../src/main/resources/product_table.xml        |  59 ++++++++
 .../src/main/resources/sales-aggr-fact1.xml     |  83 +++++++++++
 .../src/main/resources/sales-aggr-fact2.xml     |  74 ++++++++++
 lens-examples/src/main/resources/sales-cube.xml | 142 +++++++++++++++++++
 .../src/main/resources/sales-raw-fact.xml       |  65 +++++++++
 .../apache/lens/server/metastore/JAXBUtils.java |   4 +-
 18 files changed, 772 insertions(+), 66 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java
index cdfdbe8..dbb8b39 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java
@@ -30,6 +30,7 @@ import org.codehaus.jackson.JsonGenerationException;
 import org.codehaus.jackson.JsonGenerator;
 import org.codehaus.jackson.impl.Indenter;
 import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion;
 import org.codehaus.jackson.util.DefaultPrettyPrinter;
 
 /**
@@ -74,6 +75,8 @@ public class BaseLensCommand {
   public BaseLensCommand() {
     getClient();
     mapper = new ObjectMapper();
+    mapper.setSerializationInclusion(Inclusion.NON_NULL);
+    mapper.setSerializationInclusion(Inclusion.NON_DEFAULT);
     pp = new DefaultPrettyPrinter();
     pp.indentObjectsWith(new Indenter() {
       @Override

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java
index 3c8cb3a..a3d15a7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java
@@ -61,8 +61,12 @@ public class JoinChain implements Named {
       props.put(MetastoreUtil.getCubeJoinChainFullChainKey(getName(), i),
         MetastoreUtil.getReferencesString(paths.get(i).getReferences()));
     }
-    props.put(MetastoreUtil.getCubeJoinChainDisplayKey(getName()), displayString);
-    props.put(MetastoreUtil.getCubeJoinChainDescriptionKey(getName()), description);
+    if (displayString != null) {
+      props.put(MetastoreUtil.getCubeJoinChainDisplayKey(getName()), displayString);
+    }
+    if (description != null) {
+      props.put(MetastoreUtil.getCubeJoinChainDescriptionKey(getName()), description);
+    }
   }
 
   public void addProperties(Dimension dimension) {
@@ -72,8 +76,12 @@ public class JoinChain implements Named {
       props.put(MetastoreUtil.getDimensionJoinChainFullChainKey(getName(), i),
         MetastoreUtil.getReferencesString(paths.get(i).getReferences()));
     }
-    props.put(MetastoreUtil.getDimensionJoinChainDisplayKey(getName()), displayString);
-    props.put(MetastoreUtil.getDimensionJoinChainDescriptionKey(getName()), description);
+    if (displayString != null) {
+      props.put(MetastoreUtil.getDimensionJoinChainDisplayKey(getName()), displayString);
+    }
+    if (description != null) {
+      props.put(MetastoreUtil.getDimensionJoinChainDescriptionKey(getName()), description);
+    }
   }
 
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/java/org/apache/lens/examples/DatabaseUtil.java
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/java/org/apache/lens/examples/DatabaseUtil.java b/lens-examples/src/main/java/org/apache/lens/examples/DatabaseUtil.java
index 9bcbd9a..6191aa5 100644
--- a/lens-examples/src/main/java/org/apache/lens/examples/DatabaseUtil.java
+++ b/lens-examples/src/main/java/org/apache/lens/examples/DatabaseUtil.java
@@ -39,7 +39,7 @@ public final class DatabaseUtil {
    *
    * @throws Exception the exception
    */
-  public static void initalizeDatabaseStorage() throws Exception {
+  public static void initializeDatabaseStorage() throws Exception {
 
     try {
       Class.forName("org.hsqldb.jdbcDriver");
@@ -72,6 +72,6 @@ public final class DatabaseUtil {
    * @throws Exception the exception
    */
   public static void main(String[] args) throws Exception {
-    DatabaseUtil.initalizeDatabaseStorage();
+    DatabaseUtil.initializeDatabaseStorage();
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/java/org/apache/lens/examples/PopulateSampleMetastore.java
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/java/org/apache/lens/examples/PopulateSampleMetastore.java b/lens-examples/src/main/java/org/apache/lens/examples/PopulateSampleMetastore.java
index 6604553..0b2f37c 100644
--- a/lens-examples/src/main/java/org/apache/lens/examples/PopulateSampleMetastore.java
+++ b/lens-examples/src/main/java/org/apache/lens/examples/PopulateSampleMetastore.java
@@ -104,14 +104,6 @@ public class PopulateSampleMetastore {
     } else {
       System.out.println("Added partition from:dim4-local-part.xml");
     }
-
-    try {
-      DatabaseUtil.initalizeDatabaseStorage();
-      System.out.println("Created DB storages for dim_table3 and dim_table4");
-    } catch (Exception e) {
-      e.printStackTrace();
-      System.out.println("Creating DB storage failed for dim_table3 and dim_table4");
-    }
   }
 
   private void createFactPartition(String fileName, String fact, String storage) throws JAXBException, IOException {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java b/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java
index 09be3f2..df67ed7 100644
--- a/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java
+++ b/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java
@@ -63,38 +63,39 @@ public class SampleMetastore {
     LensClientSingletonWrapper.instance().getClient().closeConnection();
   }
 
-  public void createCube() throws JAXBException, IOException {
-    result = metaClient.createCube("sample-cube.xml");
+  private void createCube(String cubeSpec) {
+    result = metaClient.createCube(cubeSpec);
     if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Creating cube from:sample-cube.xml failed, reason:" + result.getMessage());
+      System.err.println("Creating cube from:" + cubeSpec + " failed, reason:" + result.getMessage());
       retCode = 1;
     }
   }
 
-  public void createDimensions() throws JAXBException, IOException {
-    result = metaClient.createDimension("sample-dimension.xml");
-    if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Creating dimension from:sample-dimension.xml failed, reason:" + result.getMessage());
-      retCode = 1;
-    }
-
-    result = metaClient.createDimension("sample-dimension2.xml");
-    if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Creating dimension from:sample-dimension2.xml failed, reason:" + result.getMessage());
-      retCode = 1;
-    }
+  public void createCubes() throws JAXBException, IOException {
+    createCube("sample-cube.xml");
+    createCube("sales-cube.xml");
+  }
 
-    result = metaClient.createDimension("sample-db-only-dimension.xml");
+  private void createDimension(String dimensionSpec) {
+    result = metaClient.createDimension(dimensionSpec);
     if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Creating dimension from:sample-db-only-dimension.xml failed, reason:" + result.getMessage());
+      System.err.println("Creating dimension from:" + dimensionSpec + " failed, reason:" + result.getMessage());
       retCode = 1;
     }
   }
+  public void createDimensions() throws JAXBException, IOException {
+    createDimension("sample-dimension.xml");
+    createDimension("sample-dimension2.xml");
+    createDimension("sample-db-only-dimension.xml");
+    createDimension("city.xml");
+    createDimension("customer.xml");
+    createDimension("product.xml");
+  }
 
   private void createStorage(String fileName) throws JAXBException, IOException {
     result = metaClient.createNewStorage(fileName);
     if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Creating storage from:" + fileName + " failed, reason:" + result.getMessage());
+      System.err.println("Creating storage from:" + fileName + " failed, reason:" + result.getMessage());
       retCode = 1;
     }
   }
@@ -107,53 +108,52 @@ public class SampleMetastore {
 
   public void createAll() throws JAXBException, IOException {
     createStorages();
-    createCube();
+    createCubes();
     createDimensions();
     createFacts();
     createDimensionTables();
-  }
-
-  private void createDimensionTables() throws JAXBException, IOException {
-    result = metaClient.createDimensionTable("dim_table.xml");
-    if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Creating dim table from: dim_table.xml failed, reason:" + result.getMessage());
-      retCode = 1;
-    }
-    result = metaClient.createDimensionTable("dim_table2.xml");
-    if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Creating dim table from: dim_table2.xml failed, reason:" + result.getMessage());
+    try {
+      DatabaseUtil.initializeDatabaseStorage();
+      System.out.println("Created DB storages");
+    } catch (Exception e) {
       retCode = 1;
+      e.printStackTrace();
+      System.err.println("Creating DB storage failed");
     }
+  }
 
-    result = metaClient.createDimensionTable("dim_table3.xml");
+  private void createDimTable(String dimTableSpec) {
+    result = metaClient.createDimensionTable(dimTableSpec);
     if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Creating dim table from: dim_table3.xmlfailed, reason:" + result.getMessage());
+      System.err.println("Creating dim table from: " + dimTableSpec + " failed, reason:" + result.getMessage());
       retCode = 1;
     }
+  }
+  private void createDimensionTables() throws JAXBException, IOException {
+    createDimTable("dim_table.xml");
+    createDimTable("dim_table2.xml");
+    createDimTable("dim_table3.xml");
+    createDimTable("dim_table4.xml");
+    createDimTable("city_table.xml");
+    createDimTable("city_subset.xml");
+    createDimTable("product_table.xml");
+    createDimTable("customer_table.xml");
+  }
 
-    result = metaClient.createDimensionTable("dim_table4.xml");
+  private void createFact(String factSpec) {
+    result = metaClient.createFactTable(factSpec);
     if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Creating dim table from: dim_table4.xml failed, reason:" + result.getMessage());
+      System.err.println("Creating fact table from: " + factSpec + " failed, reason:" + result.getMessage());
       retCode = 1;
     }
   }
-
   private void createFacts() throws JAXBException, IOException {
-    result = metaClient.createFactTable("fact1.xml");
-    if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Creating fact table from: fact1.xml failed, reason:" + result.getMessage());
-      retCode = 1;
-    }
-    result = metaClient.createFactTable("fact2.xml");
-    if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Creating fact table from: fact2.xml failed, reason:" + result.getMessage());
-      retCode = 1;
-    }
-    result = metaClient.createFactTable("rawfact.xml");
-    if (result.getStatus().equals(APIResult.Status.FAILED)) {
-      System.out.println("Creating fact table from: rawfact.xml failed, reason:" + result.getMessage());
-      retCode = 1;
-    }
+    createFact("fact1.xml");
+    createFact("fact2.xml");
+    createFact("rawfact.xml");
+    createFact("sales-raw-fact.xml");
+    createFact("sales-aggr-fact1.xml");
+    createFact("sales-aggr-fact2.xml");
   }
 
   public static void main(String[] args) throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/resources/city.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/city.xml b/lens-examples/src/main/resources/city.xml
new file mode 100644
index 0000000..1a89a2a
--- /dev/null
+++ b/lens-examples/src/main/resources/city.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_dimension name="city" xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <attributes>
+    <dim_attribute name="id" type="INT"/>
+    <dim_attribute name="name" type="STRING"/>
+    <dim_attribute name="POI" type="ARRAY&lt;STRING&gt;" description="Point of interests"/>
+    <dim_attribute name="population" type="BIGINT" />
+  </attributes>
+  <properties>
+    <property name="dimension.city.timed.dimension" value="dt"/>
+  </properties>
+</x_dimension>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/resources/city_subset.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/city_subset.xml b/lens-examples/src/main/resources/city_subset.xml
new file mode 100644
index 0000000..5059e5e
--- /dev/null
+++ b/lens-examples/src/main/resources/city_subset.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_dimension_table dimension_name="city" table_name="city_subset" weight="0.0" xmlns="uri:lens:cube:0.1"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <columns>
+    <column comment="ID" name="id" type="INT"/>
+    <column comment="name" name="name" type="STRING"/>
+  </columns>
+  <storage_tables>
+    <storage_table>
+      <storage_name>mydb</storage_name>
+      <table_desc external="true" field_delimiter="," table_location="/tmp/db-storage.db"
+        storage_handler_name="org.apache.lens.storage.db.DBStorageHandler">
+        <table_parameters>
+          <property name="lens.metastore.native.db.name" value="default"/>
+        </table_parameters>
+      </table_desc>
+    </storage_table>
+  </storage_tables>
+</x_dimension_table>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/resources/city_table.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/city_table.xml b/lens-examples/src/main/resources/city_table.xml
new file mode 100644
index 0000000..5ce18b6
--- /dev/null
+++ b/lens-examples/src/main/resources/city_table.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_dimension_table dimension_name="city" table_name="city_table" weight="10.0" xmlns="uri:lens:cube:0.1"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <columns>
+    <column comment="ID" name="id" type="INT"/>
+    <column comment="name" name="name" type="STRING"/>
+    <column comment="Point of interests" name="POI" type="ARRAY&lt;STRING&gt;"/>
+    <column comment="city population" name="population" type="BIGINT"/>
+  </columns>
+  <properties>
+    <property name="city.prop" value="d1"/>
+  </properties>
+  <storage_tables>
+    <storage_table>
+      <update_periods>
+        <update_period>HOURLY</update_period>
+      </update_periods>
+      <storage_name>local</storage_name>
+      <table_desc external="true" field_delimiter="," table_location="/tmp/examples/city">
+        <part_cols>
+          <column comment="Time column" name="dt" type="STRING"/>
+        </part_cols>
+        <time_part_cols>dt</time_part_cols>
+      </table_desc>
+    </storage_table>
+  </storage_tables>
+</x_dimension_table>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/resources/customer.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/customer.xml b/lens-examples/src/main/resources/customer.xml
new file mode 100644
index 0000000..e78dab2
--- /dev/null
+++ b/lens-examples/src/main/resources/customer.xml
@@ -0,0 +1,55 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_dimension name="customer" xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <attributes>
+    <dim_attribute name="id" type="INT"/>
+    <dim_attribute name="name" type="STRING"/>
+    <dim_attribute name="gender" type="STRING" />
+    <dim_attribute name="age" type="INT" />
+    <dim_attribute name="city_id" type="INT" />
+    <dim_attribute name="customer_city_name" type="string" description="City name to which the customer belongs"
+      displayString=" Customer City">
+      <ref_spec>
+        <chain_ref_column chain_name="customer_city" ref_col="name" />
+      </ref_spec>
+    </dim_attribute>
+    <dim_attribute name="customer_credit_status" type="STRING" start_time='2015-03-01-00:00'/>
+  </attributes>
+  <join_chains>
+    <join_chain name="customer_city">
+      <paths>
+        <path>
+          <edges>
+            <edge>
+              <from table="customer" column="city_id" />
+              <to table="city" column="id" />
+            </edge>
+          </edges>
+        </path>
+      </paths>
+    </join_chain>
+  </join_chains>
+  <properties>
+    <property name="dimension.customer.timed.dimension" value="dt"/>
+  </properties>
+</x_dimension>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/resources/customer_table.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/customer_table.xml b/lens-examples/src/main/resources/customer_table.xml
new file mode 100644
index 0000000..18c387f
--- /dev/null
+++ b/lens-examples/src/main/resources/customer_table.xml
@@ -0,0 +1,58 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_dimension_table dimension_name="customer" table_name="customer_table" weight="0.0" xmlns="uri:lens:cube:0.1"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <columns>
+    <column comment="ID" name="id" type="INT"/>
+    <column comment="name" name="name" type="STRING"/>
+    <column comment="" name="gender" type="STRING"/>
+    <column comment="" name="age" type="INT"/>
+    <column comment="" name="city_id" type="INT"/>
+    <column comment="" name="customer_credit_status" type="STRING"/>
+  </columns>
+  <properties>
+    <property name="dim4.prop" value="d1"/>
+  </properties>
+  <storage_tables>
+    <storage_table>
+      <update_periods>
+        <update_period>HOURLY</update_period>
+      </update_periods>
+      <storage_name>local</storage_name>
+      <table_desc external="true" field_delimiter="," table_location="/tmp/examples/customer">
+        <part_cols>
+          <column comment="Time column" name="dt" type="STRING"/>
+        </part_cols>
+        <time_part_cols>dt</time_part_cols>
+      </table_desc>
+    </storage_table>
+    <storage_table>
+      <storage_name>mydb</storage_name>
+      <table_desc external="true" field_delimiter="," table_location="/tmp/db-storage.db"
+        storage_handler_name="org.apache.lens.storage.db.DBStorageHandler">
+        <table_parameters>
+          <property name="lens.metastore.native.db.name" value="default"/>
+        </table_parameters>
+      </table_desc>
+    </storage_table>
+  </storage_tables>
+</x_dimension_table>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/resources/db-storage-schema.sql
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/db-storage-schema.sql b/lens-examples/src/main/resources/db-storage-schema.sql
index 160590f..653d2fd 100644
--- a/lens-examples/src/main/resources/db-storage-schema.sql
+++ b/lens-examples/src/main/resources/db-storage-schema.sql
@@ -34,5 +34,17 @@ insert into mydb_dim_table4(id, name, detail, d2id) values (2,'second','this is
 
 insert into mydb_dim_table4(id, name, detail, d2id) values (3,'third','this is three',12)
 
+DROP TABLE IF EXISTS mydb_sales_aggr_fact2
+CREATE TABLE mydb_sales_aggr_fact2 (order_time timestamp, delivery_time timestamp, product_id integer, promotion_id integer, customer_city_id integer, production_city_id integer, delivery_city_id integer, unit_sales double, store_sales double, store_cost double)
 
+DROP TABLE IF EXISTS mydb_sales_aggr_fact1
+CREATE TABLE mydb_sales_aggr_fact1 (order_time timestamp, delivery_time timestamp, customer_id integer, product_id integer, promotion_id integer, customer_city_id integer, production_city_id integer, delivery_city_id integer, unit_sales double, store_sales double, store_cost double, average_line_item_price float, average_line_item_discount float, max_line_item_price float, max_line_item_discount float)
 
+DROP TABLE IF EXISTS mydb_product_table
+CREATE TABLE mydb_product_table (id integer, SKU_number integer, description varchar(255), color varchar(50), category varchar(255), weight float, manufacturer varchar(255))
+
+DROP TABLE IF EXISTS mydb_customer_table
+CREATE TABLE mydb_customer_table (id integer, name varchar(255), description varchar(255), gender varchar(50), age integer, city_id integer, customer_credit_status varchar(255))
+
+DROP TABLE IF EXISTS mydb_city_subset
+CREATE TABLE mydb_city_subset (id integer, name varchar(255))

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/resources/product.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/product.xml b/lens-examples/src/main/resources/product.xml
new file mode 100644
index 0000000..8093561
--- /dev/null
+++ b/lens-examples/src/main/resources/product.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_dimension name="product" xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <attributes>
+    <dim_attribute name="id" type="INT"/>
+    <dim_attribute name="SKU_number" type="INT" />
+    <dim_attribute name="description" type="STRING" />
+    <dim_attribute name="color" type="STRING" />
+    <dim_attribute name="weight" type="FLOAT" />
+    <dim_attribute name="category" type="STRING" />
+    <dim_attribute name="manufacturer" type="STRING" />
+  </attributes>
+  <properties>
+    <property name="dimension.product.timed.dimension" value="dt"/>
+  </properties>
+</x_dimension>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/resources/product_table.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/product_table.xml b/lens-examples/src/main/resources/product_table.xml
new file mode 100644
index 0000000..4babca6
--- /dev/null
+++ b/lens-examples/src/main/resources/product_table.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_dimension_table dimension_name="product" table_name="product_table" weight="0.0" xmlns="uri:lens:cube:0.1"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <columns>
+    <column comment="ID" name="id" type="INT"/>
+    <column comment="SKU_number" name="SKU_number" type="INT"/>
+    <column comment="" name="description" type="STRING"/>
+    <column comment="" name="color" type="STRING"/>
+    <column comment="" name="category" type="STRING"/>
+    <column comment="" name="weight" type="FLOAT"/>
+    <column comment="" name="manufacturer" type="STRING"/>
+  </columns>
+  <properties>
+    <property name="dim4.prop" value="d1"/>
+  </properties>
+  <storage_tables>
+    <storage_table>
+      <update_periods>
+        <update_period>HOURLY</update_period>
+      </update_periods>
+      <storage_name>local</storage_name>
+      <table_desc external="true" field_delimiter="," table_location="/tmp/examples/product">
+        <part_cols>
+          <column comment="Time column" name="dt" type="STRING"/>
+        </part_cols>
+        <time_part_cols>dt</time_part_cols>
+      </table_desc>
+    </storage_table>
+    <storage_table>
+      <storage_name>mydb</storage_name>
+      <table_desc external="true" field_delimiter="," table_location="/tmp/db-storage.db"
+        storage_handler_name="org.apache.lens.storage.db.DBStorageHandler">
+        <table_parameters>
+          <property name="lens.metastore.native.db.name" value="default"/>
+        </table_parameters>
+      </table_desc>
+    </storage_table>
+  </storage_tables>
+</x_dimension_table>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/resources/sales-aggr-fact1.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact1.xml b/lens-examples/src/main/resources/sales-aggr-fact1.xml
new file mode 100644
index 0000000..8d50554
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-aggr-fact1.xml
@@ -0,0 +1,83 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_fact_table cube_name="sales" name="sales_aggr_fact1" weight="300.0" xmlns="uri:lens:cube:0.1"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <columns>
+    <column comment="" name="order_time" type="TIMESTAMP"/>
+    <column comment="" name="delivery_time" type="TIMESTAMP"/>
+    <column comment="" name="customer_id" type="INT"/>
+    <column comment="" name="product_id" type="INT"/>
+    <column comment="" name="promotion_id" type="INT"/>
+    <column comment="" name="customer_city_id" type="INT"/>
+    <column comment="" name="production_city_id" type="INT"/>
+    <column comment="" name="delivery_city_id" type="INT"/>
+    <column comment="" name="unit_sales" type="BIGINT"/>
+    <column comment="" name="store_sales" type="DOUBLE"/>
+    <column comment="" name="store_cost" type="DOUBLE"/>
+    <column comment="" name="average_line_item_price" type="FLOAT"/>
+    <column comment="" name="average_line_item_discount" type="FLOAT"/>
+    <column comment="" name="max_line_item_price" type="FLOAT"/>
+    <column comment="" name="max_line_item_discount" type="FLOAT"/>
+  </columns>
+  <properties>
+    <property name="cube.fact.is.aggregated" value="true"/>
+  </properties>
+  <storage_tables>
+    <storage_table>
+      <update_periods>
+        <update_period>HOURLY</update_period>
+        <update_period>DAILY</update_period>
+      </update_periods>
+      <storage_name>local</storage_name>
+      <table_desc external="true" field_delimiter="," table_location="/tmp/examples/aggrfact1">
+        <part_cols>
+          <column comment="Process time partition" name="pt" type="STRING"/>
+          <column comment="Order time partition" name="ot" type="STRING"/>
+          <column comment="Delivery time partition" name="dt" type="STRING"/>
+        </part_cols>
+        <time_part_cols>pt</time_part_cols>
+        <time_part_cols>ot</time_part_cols>
+        <time_part_cols>dt</time_part_cols>
+      </table_desc>
+    </storage_table>
+    <storage_table>
+      <update_periods>
+        <update_period>DAILY</update_period>
+      </update_periods>
+      <storage_name>mydb</storage_name>
+      <table_desc external="true" field_delimiter="," table_location="/tmp/db-storage.db"
+        storage_handler_name="org.apache.lens.storage.db.DBStorageHandler">
+        <table_parameters>
+          <property name="lens.metastore.native.db.name" value="default"/>
+        </table_parameters>
+        <part_cols>
+          <column comment="Process time partition" name="pt" type="STRING"/>
+          <column comment="Order time partition" name="ot" type="STRING"/>
+          <column comment="Delivery time partition" name="dt" type="STRING"/>
+        </part_cols>
+        <time_part_cols>pt</time_part_cols>
+        <time_part_cols>ot</time_part_cols>
+        <time_part_cols>dt</time_part_cols>
+      </table_desc>
+    </storage_table>
+  </storage_tables>
+</x_fact_table>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/resources/sales-aggr-fact2.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact2.xml b/lens-examples/src/main/resources/sales-aggr-fact2.xml
new file mode 100644
index 0000000..b535a87
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-aggr-fact2.xml
@@ -0,0 +1,74 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_fact_table cube_name="sales" name="sales_aggr_fact2" weight="100.0" xmlns="uri:lens:cube:0.1"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <columns>
+    <column comment="" name="order_time" type="TIMESTAMP"/>
+    <column comment="" name="delivery_time" type="TIMESTAMP"/>
+    <column comment="" name="product_id" type="INT"/>
+    <column comment="" name="promotion_id" type="INT"/>
+    <column comment="" name="customer_city_id" type="INT"/>
+    <column comment="" name="production_city_id" type="INT"/>
+    <column comment="" name="delivery_city_id" type="INT"/>
+    <column comment="" name="unit_sales" type="BIGINT"/>
+    <column comment="" name="store_sales" type="DOUBLE"/>
+    <column comment="" name="store_cost" type="DOUBLE"/>
+  </columns>
+  <properties>
+    <property name="cube.fact.is.aggregated" value="true"/>
+  </properties>
+  <storage_tables>
+    <storage_table>
+      <update_periods>
+        <update_period>HOURLY</update_period>
+        <update_period>DAILY</update_period>
+      </update_periods>
+      <storage_name>local</storage_name>
+      <table_desc external="true" field_delimiter="," table_location="/tmp/examples/aggrfact2">
+        <part_cols>
+          <column comment="Delivery time partition" name="dt" type="STRING"/>
+        </part_cols>
+        <time_part_cols>dt</time_part_cols>
+      </table_desc>
+    </storage_table>
+    <storage_table>
+      <update_periods>
+        <update_period>DAILY</update_period>
+      </update_periods>
+      <storage_name>mydb</storage_name>
+      <table_desc external="true" field_delimiter="," table_location="/tmp/db-storage.db"
+        storage_handler_name="org.apache.lens.storage.db.DBStorageHandler">
+        <table_parameters>
+          <property name="lens.metastore.native.db.name" value="default"/>
+        </table_parameters>
+        <part_cols>
+          <column comment="Process time partition" name="pt" type="STRING"/>
+          <column comment="Order time partition" name="ot" type="STRING"/>
+          <column comment="Delivery time partition" name="dt" type="STRING"/>
+        </part_cols>
+        <time_part_cols>pt</time_part_cols>
+        <time_part_cols>ot</time_part_cols>
+        <time_part_cols>dt</time_part_cols>
+      </table_desc>
+    </storage_table>
+  </storage_tables>
+</x_fact_table>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/resources/sales-cube.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-cube.xml b/lens-examples/src/main/resources/sales-cube.xml
new file mode 100644
index 0000000..d4768cc
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-cube.xml
@@ -0,0 +1,142 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_base_cube name="sales" xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <measures>
+    <measure name="unit_sales" type="BIGINT" default_aggr="SUM" display_string="Unit Sales" format_string="#,###"/>
+    <measure name="store_sales" type="DOUBLE" default_aggr="SUM" display_string="Store Sales" format_string="#,###.##"/>
+    <measure name="store_cost" type="DOUBLE" default_aggr="SUM" display_string="Store Cost" format_string="#,###.00"
+     start_time='2015-03-01-00:00' />
+    <measure name="line_item_quantity" type="INT" default_aggr="SUM" display_string="Line item quantity"/>
+    <measure name="line_item_product_price" type="FLOAT" default_aggr="SUM" display_string="Line item product price"/>
+    <measure name="line_item_discount_amount" type="FLOAT" default_aggr="SUM" display_string="Line item discount"/>
+    <measure name="line_item_tax" type="FLOAT" default_aggr="SUM" display_string="Line item tax"/>
+    <measure name="average_line_item_price" type="FLOAT" default_aggr="AVG" display_string="Average Line item price"/>
+    <measure name="average_line_item_discount" type="FLOAT" default_aggr="AVG"
+     display_string="Average Line item discount"/>
+    <measure name="max_line_item_price" type="FLOAT" default_aggr="MAX" display_string="Maximum Line item price"/>
+    <measure name="max_line_item_discount" type="FLOAT" default_aggr="MAX"
+     display_string="Maximum Line item discount"/>
+  </measures>
+  <expressions>
+    <expression name="profit" type="DOUBLE" expr="store_sales - store_cost" display_string="Profit"/>
+    <expression name="promotion_sales" type="DOUBLE" expr="sum(case when promotion_id = 0 then 0 else store_sales end)"
+     display_string="Promotion sales"/>
+  </expressions>
+  <dim_attributes>
+    <dim_attribute name="customer_id" type="INT" />
+    <dim_attribute name="product_id" type="INT" />
+    <dim_attribute name="promotion_id" type="INT" />
+    <dim_attribute name="order_id" type="INT" />
+    <dim_attribute name="order_line_number" type="INT" />
+    <dim_attribute name="order_time" type="TIMESTAMP" />
+    <dim_attribute name="delivery_time" type="TIMESTAMP" />
+    <dim_attribute name="customer_city_id" type="INT" start_time='2015-03-01-00:00' />
+    <dim_attribute name="production_city_id" type="INT" />
+    <dim_attribute name="delivery_city_id" type="INT" />
+    <dim_attribute name="customer_city_name" type="string" description="City name to which the customer belongs"
+      displayString=" Customer City">
+      <ref_spec>
+        <chain_ref_column chain_name="customer_city" ref_col="name" />
+      </ref_spec>
+    </dim_attribute>
+    <dim_attribute name="production_city_name" type="STRING" description="City name in which the product was produced"
+      displayString=" Production City">
+      <ref_spec>
+        <chain_ref_column chain_name="production_city" ref_col="name" />
+      </ref_spec>
+    </dim_attribute>
+    <dim_attribute name="delivery_city_name" type="STRING" description="City name to which the product was delivered"
+      displayString=" Delivery City">
+      <ref_spec>
+        <chain_ref_column chain_name="delivery_city" ref_col="name" />
+      </ref_spec>
+    </dim_attribute>
+  </dim_attributes>
+  <join_chains>
+    <join_chain name="customer_details">
+      <paths>
+        <path>
+          <edges>
+            <edge>
+              <from table="sales" column="customer_id" />
+              <to table="customer" column="id" />
+            </edge>
+          </edges>
+        </path>
+      </paths>
+    </join_chain>
+    <join_chain name="customer_city">
+      <paths>
+        <path>
+          <edges>
+            <edge>
+              <from table="sales" column="customer_city_id" />
+              <to table="city" column="id" />
+            </edge>
+          </edges>
+        </path>
+        <path>
+          <edges>
+            <edge>
+              <from table="sales" column="customer_id" />
+              <to table="customer" column="id" />
+            </edge>
+            <edge>
+              <from table="customer" column="city_id" />
+              <to table="city" column="id" />
+            </edge>
+          </edges>
+        </path>
+      </paths>
+    </join_chain>
+    <join_chain name="delivery_city">
+      <paths>
+        <path>
+          <edges>
+            <edge>
+              <from table="sales" column="delivery_city_id" />
+              <to table="city" column="id" />
+            </edge>
+          </edges>
+        </path>
+      </paths>
+    </join_chain>
+    <join_chain name="production_city">
+      <paths>
+        <path>
+          <edges>
+            <edge>
+              <from table="sales" column="production_city_id" />
+              <to table="city" column="id" />
+            </edge>
+          </edges>
+        </path>
+      </paths>
+    </join_chain>
+  </join_chains>
+  <properties>
+    <property name="cube.sales.timed.dimensions.list" value="order_time,delivery_time" />
+    <property name="cube.timedim.partition.order_time" value="ot" />
+    <property name="cube.timedim.partition.delivery_time" value="dt" />
+  </properties>
+</x_base_cube>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-examples/src/main/resources/sales-raw-fact.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-raw-fact.xml b/lens-examples/src/main/resources/sales-raw-fact.xml
new file mode 100644
index 0000000..5b4fa18
--- /dev/null
+++ b/lens-examples/src/main/resources/sales-raw-fact.xml
@@ -0,0 +1,65 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<x_fact_table cube_name="sales" name="sales_raw_fact" weight="500.0" xmlns="uri:lens:cube:0.1"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
+  <columns>
+    <column comment="" name="order_time" type="TIMESTAMP"/>
+    <column comment="" name="delivery_time" type="TIMESTAMP"/>
+    <column comment="" name="customer_id" type="INT"/>
+    <column comment="" name="product_id" type="INT"/>
+    <column comment="" name="order_id" type="INT"/>
+    <column comment="" name="promotion_id" type="INT"/>
+    <column comment="" name="order_line_number" type="INT"/>
+    <column comment="" name="customer_city_id" type="INT"/>
+    <column comment="" name="production_city_id" type="INT"/>
+    <column comment="" name="delivery_city_id" type="INT"/>
+    <column comment="" name="unit_sales" type="BIGINT"/>
+    <column comment="" name="store_sales" type="DOUBLE"/>
+    <column comment="" name="store_cost" type="DOUBLE"/>
+    <column comment="" name="line_item_quantity" type="INT"/>
+    <column comment="" name="line_item_product_price" type="INT"/>
+    <column comment="" name="line_item_discount_amount" type="INT"/>
+    <column comment="" name="line_item_tax" type="INT"/>
+  </columns>
+  <properties>
+    <property name="cube.fact.is.aggregated" value="false"/>
+  </properties>
+  <storage_tables>
+    <storage_table>
+      <update_periods>
+        <update_period>HOURLY</update_period>
+        <update_period>DAILY</update_period>
+      </update_periods>
+      <storage_name>local</storage_name>
+      <table_desc external="true" field_delimiter="," table_location="/tmp/examples/rawfact">
+        <part_cols>
+          <column comment="Process time partition" name="pt" type="STRING"/>
+          <column comment="Order time partition" name="ot" type="STRING"/>
+          <column comment="Delivery time partition" name="dt" type="STRING"/>
+        </part_cols>
+        <time_part_cols>pt</time_part_cols>
+        <time_part_cols>ot</time_part_cols>
+        <time_part_cols>dt</time_part_cols>
+      </table_desc>
+    </storage_table>
+  </storage_tables>
+</x_fact_table>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f0798b14/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
index 138797e..b76ddc0 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
@@ -456,11 +456,11 @@ public final class JAXBUtils {
   }
 
   public static List<XColumn> columnsFromFieldSchemaList(List<FieldSchema> fslist) {
+    List<XColumn> cols = new ArrayList<XColumn>();
     if (fslist == null || fslist.isEmpty()) {
-      return null;
+      return cols;
     }
 
-    List<XColumn> cols = new ArrayList<XColumn>();
     for (FieldSchema fs : fslist) {
       cols.add(columnFromFieldSchema(fs));
     }


[40/50] [abbrv] incubator-lens git commit: LENS-492: Fix all priority High warnings

Posted by am...@apache.org.
LENS-492: Fix all priority High warnings


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/8f716a7d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/8f716a7d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/8f716a7d

Branch: refs/heads/current-release-line
Commit: 8f716a7db9fd080fa92a4f6c5cccdd2e95e8582c
Parents: 127c081
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Fri Apr 10 19:55:37 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Fri Apr 10 19:55:37 2015 +0530

----------------------------------------------------------------------
 .../src/main/resources/findbugs-exclude.xml     |  8 ++++++++
 .../lens/client/jdbc/LensJdbcResultSet.java     |  4 ++--
 .../lens/cube/metadata/FactPartition.java       |  3 ++-
 .../lens/cube/parse/CubeQueryContext.java       | 15 +++------------
 .../apache/lens/cube/parse/JoinResolver.java    | 20 +++++++++++---------
 .../lens/cube/parse/TestJoinResolver.java       | 18 ++++++------------
 .../lens/cube/parse/TestQueryRewrite.java       |  1 +
 .../org/apache/lens/examples/DatabaseUtil.java  |  2 +-
 .../org/apache/lens/examples/SampleQueries.java |  2 +-
 .../java/org/apache/lens/rdd/LensRDDClient.java |  2 +-
 .../org/apache/lens/lib/query/CSVSerde.java     |  2 +-
 .../lens/lib/query/FilePersistentFormatter.java |  3 ++-
 .../org/apache/lens/server/LensService.java     | 14 +++++++-------
 .../server/query/QueryExecutionServiceImpl.java |  2 +-
 .../lens/server/session/HiveSessionService.java | 16 ++++++++--------
 .../user/PropertyBasedUserConfigLoader.java     |  2 +-
 pom.xml                                         |  4 ++--
 17 files changed, 58 insertions(+), 60 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/checkstyle/src/main/resources/findbugs-exclude.xml
----------------------------------------------------------------------
diff --git a/checkstyle/src/main/resources/findbugs-exclude.xml b/checkstyle/src/main/resources/findbugs-exclude.xml
index f9c9d52..aade28d 100644
--- a/checkstyle/src/main/resources/findbugs-exclude.xml
+++ b/checkstyle/src/main/resources/findbugs-exclude.xml
@@ -14,4 +14,12 @@
    limitations under the License.
  -->
 <FindBugsFilter>
+  <!-- Enable only high priority warnings -->
+  <Match>
+    <Priority value="2"/>
+  </Match>
+
+  <Match>
+    <Priority value="3"/>
+  </Match>
 </FindBugsFilter>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-client/src/main/java/org/apache/lens/client/jdbc/LensJdbcResultSet.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/jdbc/LensJdbcResultSet.java b/lens-client/src/main/java/org/apache/lens/client/jdbc/LensJdbcResultSet.java
index 0e2637c..ee5ed4d 100644
--- a/lens-client/src/main/java/org/apache/lens/client/jdbc/LensJdbcResultSet.java
+++ b/lens-client/src/main/java/org/apache/lens/client/jdbc/LensJdbcResultSet.java
@@ -245,7 +245,7 @@ public class LensJdbcResultSet implements ResultSet {
       } else if (obj == null) {
         return 0;
       } else if (String.class.isInstance(obj)) {
-        return Integer.valueOf((String) obj);
+        return Integer.parseInt((String) obj);
       }
       throw new Exception("Illegal Conversion");
     } catch (Exception e) {
@@ -277,7 +277,7 @@ public class LensJdbcResultSet implements ResultSet {
       } else if (obj == null) {
         return 0;
       } else if (String.class.isInstance(obj)) {
-        return Long.valueOf((String) obj);
+        return Long.parseLong((String) obj);
       }
       throw new Exception("Illegal Conversion");
     } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
index 729da30..44c6915 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
@@ -120,8 +120,9 @@ public class FactPartition implements Comparable<FactPartition> {
       if (this.partSpec != null) {
         if (o.partSpec == null) {
           partComp = 1;
+        } else {
+          partComp = this.partSpec.compareTo(o.partSpec);
         }
-        partComp = this.partSpec.compareTo(o.partSpec);
       } else {
         if (o.partSpec != null) {
           partComp = -1;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 7ea67f4..0df4b2e 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -781,11 +781,7 @@ public class CubeQueryContext {
     } else {
       denormTables.addAll(deNormCtx.rewriteDenormctx(null, dimsToQuery, false));
     }
-    if (dimsToQuery == null) {
-      dimsToQuery = pickCandidateDimsToQuery(denormTables);
-    } else {
-      dimsToQuery.putAll(pickCandidateDimsToQuery(denormTables));
-    }
+    dimsToQuery.putAll(pickCandidateDimsToQuery(denormTables));
     // Prune join paths once denorm tables are picked
     if (autoJoinCtx != null) {
       // prune join paths for picked fact and dimensions
@@ -801,14 +797,9 @@ public class CubeQueryContext {
           joiningTables.addAll(factJoiningTables);
         }
       } else {
-        joiningTables.addAll(autoJoinCtx.pickOptionalTables(null,
-          dimsToQuery != null ? dimsToQuery.keySet() : null, this));
-      }
-      if (dimsToQuery == null) {
-        dimsToQuery = pickCandidateDimsToQuery(joiningTables);
-      } else {
-        dimsToQuery.putAll(pickCandidateDimsToQuery(joiningTables));
+        joiningTables.addAll(autoJoinCtx.pickOptionalTables(null, dimsToQuery.keySet(), this));
       }
+      dimsToQuery.putAll(pickCandidateDimsToQuery(joiningTables));
     }
     LOG.info("Picked Fact:" + cfacts + " dimsToQuery:" + dimsToQuery);
     pickedDimTables = dimsToQuery.values();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index bf57907..a41424a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -937,14 +937,10 @@ class JoinResolver implements ContextRewriter {
   public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
     partialJoinConditions = new HashMap<AbstractCubeTable, String>();
     tableJoinTypeMap = new HashMap<AbstractCubeTable, JoinType>();
-    try {
-      resolveJoins(cubeql);
-    } catch (HiveException e) {
-      throw new SemanticException(e);
-    }
+    resolveJoins(cubeql);
   }
 
-  private void resolveJoins(CubeQueryContext cubeql) throws HiveException {
+  private void resolveJoins(CubeQueryContext cubeql) throws SemanticException {
     QB cubeQB = cubeql.getQb();
     boolean joinResolverDisabled =
       cubeql.getConf().getBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, CubeQueryConfUtil.DEFAULT_DISABLE_AUTO_JOINS);
@@ -953,7 +949,13 @@ class JoinResolver implements ContextRewriter {
         cubeQB.setQbJoinTree(genJoinTree(cubeQB, cubeql.getJoinTree(), cubeql));
       }
     } else {
-      autoResolveJoins(cubeql);
+      try {
+        autoResolveJoins(cubeql);
+      } catch (SemanticException e) {
+        throw e;
+      } catch (HiveException e) {
+        throw new SemanticException(e);
+      }
     }
   }
 
@@ -996,6 +998,7 @@ class JoinResolver implements ContextRewriter {
         String targetDimTable = cubeql.getQb().getTabNameForAlias(targetDimAlias);
         if (targetDimTable == null) {
           LOG.warn("Null table for alias " + targetDimAlias);
+          return;
         }
         target = cubeql.getMetastoreClient().getDimension(targetDimTable);
       }
@@ -1029,7 +1032,6 @@ class JoinResolver implements ContextRewriter {
     SchemaGraph graph = cubeql.getMetastoreClient().getSchemaGraph();
     Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> multipleJoinPaths =
       new LinkedHashMap<Aliased<Dimension>, List<SchemaGraph.JoinPath>>();
-    Map<Dimension, String> dimensionAliasMap = new HashMap<Dimension, String>();
 
     // Resolve join path for each dimension accessed in the query
     for (Dimension joinee : dimTables) {
@@ -1286,7 +1288,7 @@ class JoinResolver implements ContextRewriter {
       cubeql.setJoinCond(joinTree, HQLParser.getString(joinCond));
     } else {
       // No join condition specified. this should be an error
-      new SemanticException(ErrorMsg.NO_JOIN_CONDITION_AVAIABLE);
+      throw new SemanticException(ErrorMsg.NO_JOIN_CONDITION_AVAIABLE);
     }
     return joinTree;
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
index dfa178b..cb078e3 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
@@ -578,8 +578,7 @@ public class TestJoinResolver extends TestQueryRewrite {
       Assert.fail("Should have failed. "
         + "The table citydim is getting accessed as both chain and without chain ");
     } catch (SemanticException e) {
-      Assert.assertNotNull(e.getCause());
-      Assert.assertEquals(e.getCause().getMessage().toLowerCase(),
+      Assert.assertEquals(e.getMessage().toLowerCase(),
         "Table citydim is getting accessed via joinchain: citystate and no chain at all".toLowerCase());
     }
 
@@ -590,8 +589,7 @@ public class TestJoinResolver extends TestQueryRewrite {
       Assert.fail("Should have failed. "
         + "The table citydim is getting accessed as both chain and without chain ");
     } catch (SemanticException e) {
-      Assert.assertNotNull(e.getCause());
-      Assert.assertEquals(e.getCause().getMessage().toLowerCase(),
+      Assert.assertEquals(e.getMessage().toLowerCase(),
         "Table citydim is getting accessed via joinchain: citystate and no chain at all".toLowerCase());
     }
 
@@ -604,9 +602,8 @@ public class TestJoinResolver extends TestQueryRewrite {
         + "It's not possible to resolve which statedim is being asked for when cityState and cubeState both end at"
         + " statedim table.");
     } catch (SemanticException e) {
-      Assert.assertNotNull(e.getCause());
       Assert.assertEquals(
-        e.getCause().getMessage().indexOf("Table statedim has 2 different paths through joinchains"), 0);
+        e.getMessage().indexOf("Table statedim has 2 different paths through joinchains"), 0);
     }
 
     // this test case should pass when default qualifiers for dimensions' chains are added
@@ -617,8 +614,7 @@ public class TestJoinResolver extends TestQueryRewrite {
       Assert.fail("Should have failed. "
         + "The table statedim is getting accessed as both cubeState and statedim ");
     } catch (SemanticException e) {
-      Assert.assertNotNull(e.getCause());
-      Assert.assertEquals(e.getCause().getMessage().toLowerCase(),
+      Assert.assertEquals(e.getMessage().toLowerCase(),
         "Table statedim is getting accessed via two different names: [cubestate, statedim]".toLowerCase());
     }
     // this should pass when default qualifiers are added
@@ -628,8 +624,7 @@ public class TestJoinResolver extends TestQueryRewrite {
       Assert.fail("Should have failed. "
         + "The table statedim is getting accessed as both cubeState and statedim ");
     } catch (SemanticException e) {
-      Assert.assertNotNull(e.getCause());
-      Assert.assertEquals(e.getCause().getMessage().toLowerCase(),
+      Assert.assertEquals(e.getMessage().toLowerCase(),
         "Table statedim is getting accessed via two different names: [citystate, statedim]".toLowerCase());
     }
 
@@ -642,8 +637,7 @@ public class TestJoinResolver extends TestQueryRewrite {
       Assert.fail("Should have failed. "
         + "The table citydim is getting accessed as both chain and without chain ");
     } catch (SemanticException e) {
-      Assert.assertNotNull(e.getCause());
-      Assert.assertEquals(e.getCause().getMessage().toLowerCase(),
+      Assert.assertEquals(e.getMessage().toLowerCase(),
         "Table citydim is getting accessed via joinchain: citystate and no chain at all".toLowerCase());
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
index 6a37756..c4449da 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
@@ -89,6 +89,7 @@ public abstract class TestQueryRewrite {
       // unreachable
       return null;
     } catch (SemanticException e) {
+      e.printStackTrace();
       return e;
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-examples/src/main/java/org/apache/lens/examples/DatabaseUtil.java
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/java/org/apache/lens/examples/DatabaseUtil.java b/lens-examples/src/main/java/org/apache/lens/examples/DatabaseUtil.java
index 6191aa5..f81504e 100644
--- a/lens-examples/src/main/java/org/apache/lens/examples/DatabaseUtil.java
+++ b/lens-examples/src/main/java/org/apache/lens/examples/DatabaseUtil.java
@@ -52,7 +52,7 @@ public final class DatabaseUtil {
     Statement statement = con.createStatement();
 
     InputStream file = DatabaseUtil.class.getClassLoader().getResourceAsStream("db-storage-schema.sql");
-    BufferedReader reader = new BufferedReader(new InputStreamReader(file));
+    BufferedReader reader = new BufferedReader(new InputStreamReader(file, "UTF-8"));
     String line;
     while ((line = reader.readLine()) != null) {
       if (line.trim().equals("") || line.startsWith("--")) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-examples/src/main/java/org/apache/lens/examples/SampleQueries.java
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/java/org/apache/lens/examples/SampleQueries.java b/lens-examples/src/main/java/org/apache/lens/examples/SampleQueries.java
index eac0e0b..6c820a4 100644
--- a/lens-examples/src/main/java/org/apache/lens/examples/SampleQueries.java
+++ b/lens-examples/src/main/java/org/apache/lens/examples/SampleQueries.java
@@ -119,7 +119,7 @@ public class SampleQueries {
    */
   public void runQueries(String fileName) throws IOException {
     InputStream file = SampleMetastore.class.getClassLoader().getResourceAsStream(fileName);
-    BufferedReader reader = new BufferedReader(new InputStreamReader(file));
+    BufferedReader reader = new BufferedReader(new InputStreamReader(file, "UTF-8"));
     String query;
     while ((query = reader.readLine()) != null) {
       if (StringUtils.isBlank(query)) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java b/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java
index ac89eee..cdcdec0 100644
--- a/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java
+++ b/lens-ml-lib/src/main/java/org/apache/lens/rdd/LensRDDClient.java
@@ -373,8 +373,8 @@ public class LensRDDClient {
         try {
           JavaPairRDD<WritableComparable, HCatRecord> javaPairRDD = HiveTableRDD.createHiveTableRDD(sparkContext,
             HIVE_CONF, "default", tempTableName, TEMP_TABLE_PART_COL + "='" + TEMP_TABLE_PART_VAL + "'");
-          LOG.info("Created RDD " + resultRDD.name() + " for table " + tempTableName);
           resultRDD = javaPairRDD.map(new HCatRecordToObjectListMapper()).rdd();
+          LOG.info("Created RDD " + resultRDD.name() + " for table " + tempTableName);
         } catch (IOException e) {
           throw new LensException("Error creating RDD for table " + tempTableName, e);
         }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-query-lib/src/main/java/org/apache/lens/lib/query/CSVSerde.java
----------------------------------------------------------------------
diff --git a/lens-query-lib/src/main/java/org/apache/lens/lib/query/CSVSerde.java b/lens-query-lib/src/main/java/org/apache/lens/lib/query/CSVSerde.java
index 9831456..613d6ba 100644
--- a/lens-query-lib/src/main/java/org/apache/lens/lib/query/CSVSerde.java
+++ b/lens-query-lib/src/main/java/org/apache/lens/lib/query/CSVSerde.java
@@ -330,7 +330,7 @@ public final class CSVSerde extends AbstractSerDe {
         StringBuilder unionString = new StringBuilder();
         ByteArrayOutputStream tagStream = new ByteArrayOutputStream();
         LazyInteger.writeUTF8(tagStream, uoi.getTag(field));
-        unionString.append(new String(tagStream.toByteArray()));
+        unionString.append(new String(tagStream.toByteArray(), "UTF-8"));
         unionString.append(unionTagFieldSeperator);
         unionString.append(serializeField(uoi.getField(field), ois.get(uoi.getTag(field))));
         return unionString.toString();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-query-lib/src/main/java/org/apache/lens/lib/query/FilePersistentFormatter.java
----------------------------------------------------------------------
diff --git a/lens-query-lib/src/main/java/org/apache/lens/lib/query/FilePersistentFormatter.java b/lens-query-lib/src/main/java/org/apache/lens/lib/query/FilePersistentFormatter.java
index 983580c..96476bf 100644
--- a/lens-query-lib/src/main/java/org/apache/lens/lib/query/FilePersistentFormatter.java
+++ b/lens-query-lib/src/main/java/org/apache/lens/lib/query/FilePersistentFormatter.java
@@ -126,7 +126,8 @@ public class FilePersistentFormatter extends WrappedFileFormatter implements Per
         LOG.info("Processing file:" + entry.getValue().getPath());
         BufferedReader in = null;
         try {
-          in = new BufferedReader(new InputStreamReader(persistFs.open(entry.getValue().getPath())));
+          // default encoding in hadoop filesystem is utf-8
+          in = new BufferedReader(new InputStreamReader(persistFs.open(entry.getValue().getPath()), "UTF-8"));
           String row = in.readLine();
           while (row != null) {
             writeRow(row);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-server/src/main/java/org/apache/lens/server/LensService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/LensService.java b/lens-server/src/main/java/org/apache/lens/server/LensService.java
index 2126a31..bd8699b 100644
--- a/lens-server/src/main/java/org/apache/lens/server/LensService.java
+++ b/lens-server/src/main/java/org/apache/lens/server/LensService.java
@@ -72,7 +72,7 @@ public abstract class LensService extends CompositeService implements Externaliz
   // Static session map which is used by query submission thread to get the
   // lens session before submitting a query to hive server
   /** The session map. */
-  protected static ConcurrentHashMap<String, LensSessionHandle> sessionMap
+  protected static final ConcurrentHashMap<String, LensSessionHandle> SESSION_MAP
     = new ConcurrentHashMap<String, LensSessionHandle>();
 
   /**
@@ -148,7 +148,7 @@ public abstract class LensService extends CompositeService implements Externaliz
     }
     LensSessionHandle lensSession = new LensSessionHandle(sessionHandle.getHandleIdentifier().getPublicId(),
       sessionHandle.getHandleIdentifier().getSecretId());
-    sessionMap.put(lensSession.getPublicId().toString(), lensSession);
+    SESSION_MAP.put(lensSession.getPublicId().toString(), lensSession);
     return lensSession;
   }
 
@@ -168,7 +168,7 @@ public abstract class LensService extends CompositeService implements Externaliz
         new HashMap<String, String>());
       LensSessionHandle restoredSession = new LensSessionHandle(restoredHandle.getHandleIdentifier().getPublicId(),
         restoredHandle.getHandleIdentifier().getSecretId());
-      sessionMap.put(restoredSession.getPublicId().toString(), restoredSession);
+      SESSION_MAP.put(restoredSession.getPublicId().toString(), restoredSession);
     } catch (HiveSQLException e) {
       throw new LensException("Error restoring session " + sessionHandle, e);
     }
@@ -212,7 +212,7 @@ public abstract class LensService extends CompositeService implements Externaliz
   public void closeSession(LensSessionHandle sessionHandle) throws LensException {
     try {
       cliService.closeSession(getHiveSessionHandle(sessionHandle));
-      sessionMap.remove(sessionHandle.getPublicId().toString());
+      SESSION_MAP.remove(sessionHandle.getPublicId().toString());
     } catch (Exception e) {
       throw new LensException(e);
     }
@@ -260,7 +260,7 @@ public abstract class LensService extends CompositeService implements Externaliz
    * @param sessionHandle public UUID of the session
    */
   public void acquire(String sessionHandle) {
-    LensSessionHandle handle = sessionMap.get(sessionHandle);
+    LensSessionHandle handle = SESSION_MAP.get(sessionHandle);
 
     if (handle == null) {
       throw new NotFoundException("Session handle not found " + sessionHandle);
@@ -288,7 +288,7 @@ public abstract class LensService extends CompositeService implements Externaliz
    * @throws LensException if session cannot be released
    */
   public void release(String sessionHandle) throws LensException {
-    LensSessionHandle handle = sessionMap.get(sessionHandle);
+    LensSessionHandle handle = SESSION_MAP.get(sessionHandle);
     if (handle != null) {
       getSession(handle).release();
     }
@@ -301,7 +301,7 @@ public abstract class LensService extends CompositeService implements Externaliz
    * @return the session handle
    */
   protected LensSessionHandle getSessionHandle(String sessionid) {
-    return sessionMap.get(sessionid);
+    return SESSION_MAP.get(sessionid);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index d531010..b44ce7e 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -971,7 +971,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
     synchronized (allQueries) {
       for (QueryContext ctx : allQueries.values()) {
         try {
-          if (sessionMap.containsKey(ctx.getLensSessionIdentifier())) {
+          if (SESSION_MAP.containsKey(ctx.getLensSessionIdentifier())) {
             // try setting configuration if the query session is still not closed
             ctx.setConf(getLensConf(getSessionHandle(ctx.getLensSessionIdentifier()), ctx.getLensConf()));
           } else {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java b/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
index 754cf23..e4fb812 100644
--- a/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
+++ b/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
@@ -232,8 +232,8 @@ public class HiveSessionService extends LensService implements SessionService {
     String[] auxJars = getSession(sessionid).getSessionConf().getStrings(LensConfConstants.AUX_JARS);
 
     if (auxJars != null) {
-      LOG.info("Adding aux jars:" + auxJars);
       for (String jar : auxJars) {
+        LOG.info("Adding aux jar:" + jar);
         addResourceToAllServices(sessionid, "jar", jar);
       }
     }
@@ -242,7 +242,7 @@ public class HiveSessionService extends LensService implements SessionService {
 
   @Override
   public boolean isOpen(LensSessionHandle sessionHandle) {
-    return sessionMap.containsKey(sessionHandle);
+    return SESSION_MAP.containsKey(sessionHandle.getPublicId().toString());
   }
 
   /**
@@ -407,12 +407,12 @@ public class HiveSessionService extends LensService implements SessionService {
   @Override
   public void writeExternal(ObjectOutput out) throws IOException {
     // Write out all the sessions
-    out.writeInt(sessionMap.size());
-    for (LensSessionHandle sessionHandle : sessionMap.values()) {
+    out.writeInt(SESSION_MAP.size());
+    for (LensSessionHandle sessionHandle : SESSION_MAP.values()) {
       LensSessionImpl session = getSession(sessionHandle);
       session.getLensSessionPersistInfo().writeExternal(out);
     }
-    LOG.info("Session service pesristed " + sessionMap.size() + " sessions");
+    LOG.info("Session service pesristed " + SESSION_MAP.size() + " sessions");
   }
 
   /*
@@ -429,9 +429,9 @@ public class HiveSessionService extends LensService implements SessionService {
       LensSessionImpl.LensSessionPersistInfo persistInfo = new LensSessionImpl.LensSessionPersistInfo();
       persistInfo.readExternal(in);
       restorableSessions.add(persistInfo);
-      sessionMap.put(persistInfo.getSessionHandle().getPublicId().toString(), persistInfo.getSessionHandle());
+      SESSION_MAP.put(persistInfo.getSessionHandle().getPublicId().toString(), persistInfo.getSessionHandle());
     }
-    LOG.info("Session service recovered " + sessionMap.size() + " sessions");
+    LOG.info("Session service recovered " + SESSION_MAP.size() + " sessions");
   }
 
   /**
@@ -474,7 +474,7 @@ public class HiveSessionService extends LensService implements SessionService {
      * Run internal.
      */
     public void runInternal() {
-      List<LensSessionHandle> sessionsToRemove = new ArrayList<LensSessionHandle>(sessionMap.values());
+      List<LensSessionHandle> sessionsToRemove = new ArrayList<LensSessionHandle>(SESSION_MAP.values());
       Iterator<LensSessionHandle> itr = sessionsToRemove.iterator();
       while (itr.hasNext()) {
         LensSessionHandle sessionHandle = itr.next();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/lens-server/src/main/java/org/apache/lens/server/user/PropertyBasedUserConfigLoader.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/user/PropertyBasedUserConfigLoader.java b/lens-server/src/main/java/org/apache/lens/server/user/PropertyBasedUserConfigLoader.java
index 46a0e37..16a9a70 100644
--- a/lens-server/src/main/java/org/apache/lens/server/user/PropertyBasedUserConfigLoader.java
+++ b/lens-server/src/main/java/org/apache/lens/server/user/PropertyBasedUserConfigLoader.java
@@ -54,7 +54,7 @@ public class PropertyBasedUserConfigLoader extends UserConfigLoader {
         + "Please set property " + LensConfConstants.USER_RESOLVER_PROPERTYBASED_FILENAME);
     }
     try {
-      properties.load(new InputStreamReader(new FileInputStream(new File(filename))));
+      properties.load(new InputStreamReader(new FileInputStream(new File(filename)), "UTF-8"));
     } catch (IOException e) {
       throw new UserConfigLoaderException("property file not found. Provided path was: " + filename);
     }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/8f716a7d/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 7e42144..1ffb135 100644
--- a/pom.xml
+++ b/pom.xml
@@ -96,7 +96,7 @@
     <war.plugin.version>2.1.1</war.plugin.version>
     <license.plugin.version>2.6</license.plugin.version>
     <buildnumber.plugin.version>1.0</buildnumber.plugin.version>
-    <findbugs.plugin.version>3.0.1</findbugs.plugin.version>
+    <findbugs.plugin.version>2.5.5</findbugs.plugin.version>
 
     <!-- debian -->
     <mvn.deb.build.dir>${project.build.directory}/debian</mvn.deb.build.dir>
@@ -577,7 +577,7 @@
         <configuration>
           <xmlOutput>true</xmlOutput>
           <excludeFilterFile>checkstyle/src/main/resources/findbugs-exclude.xml</excludeFilterFile>
-          <failOnError>false</failOnError>
+          <failOnError>true</failOnError>
           <skip>${skipCheck}</skip>
         </configuration>
         <executions>


[10/50] [abbrv] incubator-lens git commit: LENS-424 : Fix logging in partition timeline (Rajat Khandelwal via amareshwari)

Posted by am...@apache.org.
LENS-424 : Fix logging in partition timeline (Rajat Khandelwal via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/e5bc9e9b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/e5bc9e9b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/e5bc9e9b

Branch: refs/heads/current-release-line
Commit: e5bc9e9b791adf2e704bc3b0532ee0177ff8c69f
Parents: b86424c
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Fri Mar 27 12:39:38 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Fri Mar 27 12:39:38 2015 +0530

----------------------------------------------------------------------
 .../java/org/apache/lens/cube/metadata/CubeMetastoreClient.java   | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e5bc9e9b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index 41b5b08..42bf98f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -195,6 +195,7 @@ public class CubeMetastoreClient {
               // Not found in table properties either, compute from all partitions of the fact-storage table.
               // First make sure all combinations of update period and partition column have an entry even
               // if no partitions exist
+              log.info("loading from all partitions");
               if (getCubeFact(fact).getUpdatePeriods() != null && getCubeFact(fact).getUpdatePeriods().get(
                 storage) != null) {
                 for (UpdatePeriod updatePeriod : getCubeFact(fact).getUpdatePeriods().get(storage)) {
@@ -221,6 +222,7 @@ public class CubeMetastoreClient {
               commitAllBatchAdditions(storageTableName);
             } else {
               // found in table properties, load from there.
+              log.info("loading from table properties");
               for (UpdatePeriod updatePeriod : getCubeFact(fact).getUpdatePeriods().get(storage)) {
                 for (String partCol : getTimePartsOfTable(storageTableName)) {
                   ensureEntry(storageTableName, updatePeriod, partCol).init(storageTable);
@@ -277,7 +279,6 @@ public class CubeMetastoreClient {
           CubeMetastoreClient.this, storageTable, updatePeriod, partitionColumn));
       }
       PartitionTimeline ret = get(storageTable).get(updatePeriod).get(partitionColumn);
-      log.info("ensured entry " + ret);
       return ret;
     }
 


[06/50] [abbrv] incubator-lens git commit: LENS-366 : Document lens.server.ui.enable property (Raju Bairishetti via amareshwari)

Posted by am...@apache.org.
LENS-366 : Document lens.server.ui.enable property (Raju Bairishetti via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/44f4faf8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/44f4faf8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/44f4faf8

Branch: refs/heads/current-release-line
Commit: 44f4faf8e0eb138c9989e860c05d293413a2124c
Parents: 46cf815
Author: Raju Bairishetti <ra...@gmail.com>
Authored: Wed Mar 25 12:54:51 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Mar 25 12:54:51 2015 +0530

----------------------------------------------------------------------
 .../src/main/resources/lensserver-default.xml   |  6 +++
 src/site/apt/admin/config.apt                   | 44 ++++++++++----------
 2 files changed, 29 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/44f4faf8/lens-server/src/main/resources/lensserver-default.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/main/resources/lensserver-default.xml b/lens-server/src/main/resources/lensserver-default.xml
index 9fe7ab5..c153193 100644
--- a/lens-server/src/main/resources/lensserver-default.xml
+++ b/lens-server/src/main/resources/lensserver-default.xml
@@ -83,6 +83,12 @@
   </property>
 
   <property>
+    <name>lens.server.ui.enable</name>
+    <value>true</value>
+    <description>Bringing up the ui server is optional. By default it brings up UI server.</description>
+  </property>
+
+  <property>
     <name>lens.server.ui.enable.caching</name>
     <value>true</value>
     <description>Set this to false to disable static file caching in the UI server</description>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/44f4faf8/src/site/apt/admin/config.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/admin/config.apt b/src/site/apt/admin/config.apt
index a017f1c..2462f85 100644
--- a/src/site/apt/admin/config.apt
+++ b/src/site/apt/admin/config.apt
@@ -165,46 +165,48 @@ Lens server configuration
 *--+--+---+--+
 |69|lens.server.ui.base.uri|http://0.0.0.0:19999/|The base url for the Lens UI Server|
 *--+--+---+--+
-|70|lens.server.ui.enable.caching|true|Set this to false to disable static file caching in the UI server|
+|70|lens.server.ui.enable|true|Bringing up the ui server is optional. By default it brings up UI server.|
 *--+--+---+--+
-|71|lens.server.ui.static.dir|webapp/lens-server/static|The base directory to server UI static files from|
+|71|lens.server.ui.enable.caching|true|Set this to false to disable static file caching in the UI server|
 *--+--+---+--+
-|72|lens.server.user.resolver.custom.class|full.package.name.Classname|Required for CUSTOM user resolver. In case the provided implementations are not sufficient for user config resolver, a custom classname can be provided. Class should extend org.apache.lens.server.user.UserConfigLoader|
+|72|lens.server.ui.static.dir|webapp/lens-server/static|The base directory to server UI static files from|
 *--+--+---+--+
-|73|lens.server.user.resolver.db.keys|lens.session.cluster.user,mapred.job.queue.name|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loaders, the conf keys that will be loaded from database.|
+|73|lens.server.user.resolver.custom.class|full.package.name.Classname|Required for CUSTOM user resolver. In case the provided implementations are not sufficient for user config resolver, a custom classname can be provided. Class should extend org.apache.lens.server.user.UserConfigLoader|
 *--+--+---+--+
-|74|lens.server.user.resolver.db.query|select clusteruser,queue from user_config_table where username=?|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loader, this query will be run with single argument = logged in user and the result columns will be assigned to lens.server.user.resolver.db.keys in order. For ldap backed database resolver, the argument to this query will be the intermediate values obtained from ldap.|
+|74|lens.server.user.resolver.db.keys|lens.session.cluster.user,mapred.job.queue.name|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loaders, the conf keys that will be loaded from database.|
 *--+--+---+--+
-|75|lens.server.user.resolver.fixed.value| |Required for FIXED user resolver. when lens.server.user.resolver.type=FIXED, This will be the value cluster user will resolve to.|
+|75|lens.server.user.resolver.db.query|select clusteruser,queue from user_config_table where username=?|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loader, this query will be run with single argument = logged in user and the result columns will be assigned to lens.server.user.resolver.db.keys in order. For ldap backed database resolver, the argument to this query will be the intermediate values obtained from ldap.|
 *--+--+---+--+
-|76|lens.server.user.resolver.ldap.bind.dn| |Required for LDAP_BACKED_DATABASE user resolvers. ldap dn for admin binding example: CN=company-it-admin,ou=service-account,ou=company-service-account,dc=dc1,dc=com...|
+|76|lens.server.user.resolver.fixed.value| |Required for FIXED user resolver. when lens.server.user.resolver.type=FIXED, This will be the value cluster user will resolve to.|
 *--+--+---+--+
-|77|lens.server.user.resolver.ldap.bind.password| |Required for LDAP_BACKED_DATABASE user resolvers. ldap password for admin binding above|
+|77|lens.server.user.resolver.ldap.bind.dn| |Required for LDAP_BACKED_DATABASE user resolvers. ldap dn for admin binding example: CN=company-it-admin,ou=service-account,ou=company-service-account,dc=dc1,dc=com...|
 *--+--+---+--+
-|78|lens.server.user.resolver.ldap.fields|department|Required for LDAP_BACKED_DATABASE user resolvers. list of fields to be obtained from ldap. These will be cached by the intermediate db.|
+|78|lens.server.user.resolver.ldap.bind.password| |Required for LDAP_BACKED_DATABASE user resolvers. ldap password for admin binding above|
 *--+--+---+--+
-|79|lens.server.user.resolver.ldap.intermediate.db.delete.sql|delete from user_department where username=?|Required for LDAP_BACKED_DATABASE user resolvers. query to delete intermediate values from database backing ldap as cache. one argument: logged in user.|
+|79|lens.server.user.resolver.ldap.fields|department|Required for LDAP_BACKED_DATABASE user resolvers. list of fields to be obtained from ldap. These will be cached by the intermediate db.|
 *--+--+---+--+
-|80|lens.server.user.resolver.ldap.intermediate.db.insert.sql|insert into user_department (username, department, expiry) values (?, ?, ?)|Required for LDAP_BACKED_DATABASE user resolvers. query to insert intermediate values from database backing ldap as cache. arguments: first logged in user, then all intermediate values, then current time + expiration time|
+|80|lens.server.user.resolver.ldap.intermediate.db.delete.sql|delete from user_department where username=?|Required for LDAP_BACKED_DATABASE user resolvers. query to delete intermediate values from database backing ldap as cache. one argument: logged in user.|
 *--+--+---+--+
-|81|lens.server.user.resolver.ldap.intermediate.db.query|select department from user_department where username=? and expiry>?|Required for LDAP_BACKED_DATABASE user resolvers. query to obtain intermediate values from database backing ldap as cache. two arguments: logged in user and current time.|
+|81|lens.server.user.resolver.ldap.intermediate.db.insert.sql|insert into user_department (username, department, expiry) values (?, ?, ?)|Required for LDAP_BACKED_DATABASE user resolvers. query to insert intermediate values from database backing ldap as cache. arguments: first logged in user, then all intermediate values, then current time + expiration time|
 *--+--+---+--+
-|82|lens.server.user.resolver.ldap.search.base| |Required for LDAP_BACKED_DATABASE user resolvers. for searching intermediate values for a user, the search keys. example: cn=users,dc=dc1,dc=dc2...|
+|82|lens.server.user.resolver.ldap.intermediate.db.query|select department from user_department where username=? and expiry>?|Required for LDAP_BACKED_DATABASE user resolvers. query to obtain intermediate values from database backing ldap as cache. two arguments: logged in user and current time.|
 *--+--+---+--+
-|83|lens.server.user.resolver.ldap.search.filter|(&(objectClass=user)(sAMAccountName=%s))|Required for LDAP_BACKED_DATABASE user resolvers. filter pattern for ldap search|
+|83|lens.server.user.resolver.ldap.search.base| |Required for LDAP_BACKED_DATABASE user resolvers. for searching intermediate values for a user, the search keys. example: cn=users,dc=dc1,dc=dc2...|
 *--+--+---+--+
-|84|lens.server.user.resolver.ldap.url| |Required for LDAP_BACKED_DATABASE user resolvers. ldap url to connect to.|
+|84|lens.server.user.resolver.ldap.search.filter|(&(objectClass=user)(sAMAccountName=%s))|Required for LDAP_BACKED_DATABASE user resolvers. filter pattern for ldap search|
 *--+--+---+--+
-|85|lens.server.user.resolver.propertybased.filename|/path/to/propertyfile|Required for PROPERTYBASED user resolver. when lens.server.user.resolver.type is PROPERTYBASED, then this file will be read and parsed to determine cluster user. Each line should contain username followed by DOT followed by property full name followed by equal-to sign and followed by value. example schema of the file is: user1.lens.server.cluster.user=clusteruser1 user1.mapred.job.queue.name=queue1 *.lens.server.cluster.user=defaultclusteruser *.mapred.job.queue.name=default|
+|85|lens.server.user.resolver.ldap.url| |Required for LDAP_BACKED_DATABASE user resolvers. ldap url to connect to.|
 *--+--+---+--+
-|86|lens.server.user.resolver.type|FIXED|Type of user config resolver. allowed values are FIXED, PROPERTYBASED, DATABASE, LDAP_BACKED_DATABASE, CUSTOM.|
+|86|lens.server.user.resolver.propertybased.filename|/path/to/propertyfile|Required for PROPERTYBASED user resolver. when lens.server.user.resolver.type is PROPERTYBASED, then this file will be read and parsed to determine cluster user. Each line should contain username followed by DOT followed by property full name followed by equal-to sign and followed by value. example schema of the file is: user1.lens.server.cluster.user=clusteruser1 user1.mapred.job.queue.name=queue1 *.lens.server.cluster.user=defaultclusteruser *.mapred.job.queue.name=default|
 *--+--+---+--+
-|87|lens.server.ws.featurenames|multipart|These JAX-RS Feature(s) would be started in the specified order when lens-server starts up|
+|87|lens.server.user.resolver.type|FIXED|Type of user config resolver. allowed values are FIXED, PROPERTYBASED, DATABASE, LDAP_BACKED_DATABASE, CUSTOM.|
 *--+--+---+--+
-|88|lens.server.ws.filternames|authentication,consistentState,serverMode|These JAX-RS filters would be started in the specified order when lens-server starts up|
+|88|lens.server.ws.featurenames|multipart|These JAX-RS Feature(s) would be started in the specified order when lens-server starts up|
 *--+--+---+--+
-|89|lens.server.ws.listenernames|appevent|These listeners would be called in the specified order when lens-server starts up|
+|89|lens.server.ws.filternames|authentication,consistentState,serverMode|These JAX-RS filters would be started in the specified order when lens-server starts up|
 *--+--+---+--+
-|90|lens.server.ws.resourcenames|session,metastore,query,quota,scheduler,index|These JAX-RS resources would be started in the specified order when lens-server starts up|
+|90|lens.server.ws.listenernames|appevent|These listeners would be called in the specified order when lens-server starts up|
+*--+--+---+--+
+|91|lens.server.ws.resourcenames|session,metastore,query,quota,scheduler,index|These JAX-RS resources would be started in the specified order when lens-server starts up|
 *--+--+---+--+
 The configuration parameters and their default values


[09/50] [abbrv] incubator-lens git commit: LENS-323: Use cube query context for setting priority(Amareshwari Sriramadasu via prongs)

Posted by am...@apache.org.
LENS-323: Use cube query context for setting priority(Amareshwari Sriramadasu via prongs)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/b86424c8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/b86424c8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/b86424c8

Branch: refs/heads/current-release-line
Commit: b86424c8c4bc46a31582f5de78d91a7cb88f362d
Parents: 36764bb
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Thu Mar 26 19:12:30 2015 +0530
Committer: Rajat Khandelwal <pr...@apache.org>
Committed: Thu Mar 26 19:12:30 2015 +0530

----------------------------------------------------------------------
 .../apache/lens/cube/parse/CandidateDim.java    |  29 +++-
 .../apache/lens/cube/parse/CandidateFact.java   |  28 +++-
 .../apache/lens/cube/parse/CandidateTable.java  |  12 ++
 .../lens/cube/parse/CandidateTableResolver.java |   2 +-
 .../lens/cube/parse/CubeQueryContext.java       |   5 +-
 .../apache/lens/cube/parse/DimHQLContext.java   |   2 +-
 .../apache/lens/cube/parse/JoinResolver.java    |   4 +-
 .../lens/cube/parse/LeastPartitionResolver.java |   2 +-
 .../lens/cube/parse/SingleFactHQLContext.java   |   2 +-
 .../lens/cube/parse/StorageTableResolver.java   |  17 ++-
 .../org/apache/lens/cube/parse/StorageUtil.java |   2 +-
 .../apache/lens/driver/cube/RewriteUtil.java    |  14 ++
 .../apache/lens/driver/cube/RewriterPlan.java   | 132 +++++++++++++++++
 .../apache/lens/cube/parse/CubeTestSetup.java   |   6 +
 .../lens/cube/parse/TestRewriterPlan.java       | 147 +++++++++++++++++++
 lens-driver-hive/pom.xml                        |   5 +
 .../org/apache/lens/driver/hive/HiveDriver.java |   7 +-
 .../apache/lens/driver/hive/HiveQueryPlan.java  |  18 +--
 .../DurationBasedQueryPriorityDecider.java      |  89 ++++++-----
 .../apache/lens/driver/hive/TestHiveDriver.java | 119 +++++++++++----
 .../src/test/resources/priority_tests.data      |   6 +-
 .../lens/server/api/driver/DriverQueryPlan.java |  47 ++++--
 .../server/api/query/AbstractQueryContext.java  |  10 ++
 .../api/query/DriverSelectorQueryContext.java   |  32 ++++
 .../lens/server/api/driver/MockDriver.java      |   8 -
 .../server/query/QueryExecutionServiceImpl.java |   2 +
 26 files changed, 613 insertions(+), 134 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
index 3ac207e..90d0b6d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
@@ -19,20 +19,30 @@
 package org.apache.lens.cube.parse;
 
 import java.util.Collection;
+import java.util.Collections;
+import java.util.Set;
 
 import org.apache.lens.cube.metadata.CubeDimensionTable;
 import org.apache.lens.cube.metadata.Dimension;
+import org.apache.lens.cube.metadata.StorageConstants;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
+import lombok.Getter;
+import lombok.Setter;
+
 /**
  * Holds context of a candidate dim table.
  */
-class CandidateDim implements CandidateTable {
+public class CandidateDim implements CandidateTable {
   final CubeDimensionTable dimtable;
-  String storageTable;
-  String whereClause;
+  @Getter
+  @Setter
+  private String storageTable;
+  @Getter
+  @Setter
+  private String whereClause;
   private boolean dbResolved = false;
   private boolean whereClauseAdded = false;
   private Dimension baseTable;
@@ -109,4 +119,17 @@ class CandidateDim implements CandidateTable {
   public Collection<String> getColumns() {
     return dimtable.getAllFieldNames();
   }
+
+  @Override
+  public Set<String> getStorageTables() {
+    return Collections.singleton(storageTable);
+  }
+
+  @Override
+  public Set<String> getPartsQueried() {
+    if (StringUtils.isBlank(whereClause)) {
+      return Collections.emptySet();
+    }
+    return Collections.singleton(StorageConstants.LATEST_PARTITION_VALUE);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 66c3b25..47b9fc1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -25,6 +25,7 @@ import java.util.*;
 import org.apache.lens.cube.metadata.AbstractCubeTable;
 import org.apache.lens.cube.metadata.CubeFactTable;
 import org.apache.lens.cube.metadata.CubeInterface;
+import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;
 import org.apache.lens.cube.parse.HQLParser.TreeNode;
 
@@ -41,24 +42,35 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.antlr.runtime.CommonToken;
 
+import lombok.Getter;
+import lombok.Setter;
+
 /**
  * Holds context of a candidate fact table.
  */
-class CandidateFact implements CandidateTable {
+public class CandidateFact implements CandidateTable {
   public static final Log LOG = LogFactory.getLog(CandidateFact.class.getName());
   final CubeFactTable fact;
-  Set<String> storageTables;
+  @Getter
+  @Setter
+  private Set<String> storageTables;
   // flag to know if querying multiple storage tables is enabled for this fact
-  boolean enabledMultiTableSelect;
-  int numQueriedParts = 0;
-  final Map<TimeRange, String> rangeToWhereClause = new HashMap<TimeRange, String>();
+  @Getter
+  @Setter
+  private  boolean enabledMultiTableSelect;
+  @Getter
+  private int numQueriedParts = 0;
+  @Getter
+  private final Set<FactPartition> partsQueried = new HashSet<FactPartition>();
+  @Getter
+  private final Map<TimeRange, String> rangeToWhereClause = new HashMap<TimeRange, String>();
   private boolean dbResolved = false;
   private CubeInterface baseTable;
   private ASTNode selectAST;
   private ASTNode whereAST;
   private ASTNode groupbyAST;
   private ASTNode havingAST;
-  List<TimeRangeNode> timenodes = new ArrayList<TimeRangeNode>();
+  private List<TimeRangeNode> timenodes = new ArrayList<TimeRangeNode>();
   private final List<Integer> selectIndices = new ArrayList<Integer>();
   private final List<Integer> dimFieldIndices = new ArrayList<Integer>();
   private Collection<String> columns;
@@ -95,6 +107,10 @@ class CandidateFact implements CandidateTable {
     }
   }
 
+  void incrementPartsQueried(int incr) {
+    numQueriedParts += incr;
+  }
+
   private void updateTimeRanges(ASTNode root, ASTNode parent, int childIndex) throws SemanticException {
     if (root == null) {
       return;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTable.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTable.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTable.java
index 5eaf7ec..e001ca4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTable.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTable.java
@@ -19,6 +19,7 @@
 package org.apache.lens.cube.parse;
 
 import java.util.Collection;
+import java.util.Set;
 
 import org.apache.lens.cube.metadata.AbstractCubeTable;
 
@@ -36,6 +37,12 @@ public interface CandidateTable {
   String getStorageString(String alias);
 
   /**
+   * Get storage tables corresponding to this candidate
+   * @return
+   */
+  Set<String> getStorageTables();
+
+  /**
    * Get candidate table
    *
    * @return Candidate fact or dim table
@@ -62,4 +69,9 @@ public interface CandidateTable {
    * @return set or list of columns
    */
   Collection<String> getColumns();
+
+  /**
+   * Get partitions queried
+   */
+  Set<?> getPartsQueried();
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
index 9ee1667..a1022fd 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
@@ -96,7 +96,7 @@ class CandidateTableResolver implements ContextRewriter {
         }
         for (CubeFactTable fact : factTables) {
           CandidateFact cfact = new CandidateFact(fact, cubeql.getCube());
-          cfact.enabledMultiTableSelect = qlEnabledMultiTableSelect;
+          cfact.setEnabledMultiTableSelect(qlEnabledMultiTableSelect);
           cubeql.getCandidateFactTables().add(cfact);
         }
         LOG.info("Populated candidate facts:" + cubeql.getCandidateFactTables());

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index df5a212..e06022c 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -743,6 +743,8 @@ public class CubeQueryContext {
   }
 
   private HQLContextInterface hqlContext;
+  @Getter private Collection<CandidateFact> pickedFacts;
+  @Getter private Collection<CandidateDim> pickedDimTables;
 
   public String toHQL() throws SemanticException {
     Set<CandidateFact> cfacts = pickCandidateFactToQuery();
@@ -809,7 +811,8 @@ public class CubeQueryContext {
       }
     }
     LOG.info("Picked Fact:" + cfacts + " dimsToQuery:" + dimsToQuery);
-
+    pickedDimTables = dimsToQuery.values();
+    pickedFacts = cfacts;
     if (cfacts != null) {
       if (cfacts.size() > 1) {
         // Update ASTs for each fact

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
index d83db18..e200b12 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
@@ -102,7 +102,7 @@ abstract class DimHQLContext extends SimpleHQLContext {
       boolean added = (originalWhere != null);
       for (Dimension dim : queriedDims) {
         CandidateDim cdim = dimsToQuery.get(dim);
-        if (!cdim.isWhereClauseAdded() && !StringUtils.isBlank(cdim.whereClause)) {
+        if (!cdim.isWhereClauseAdded() && !StringUtils.isBlank(cdim.getWhereClause())) {
           appendWhereClause(whereBuf, StorageUtil.getWhereClause(cdim, query.getAliasForTabName(dim.getName())), added);
           added = true;
         }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index ae7886b..7feb31b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -576,8 +576,8 @@ class JoinResolver implements ContextRewriter {
     private String getStorageFilter(Map<Dimension, CandidateDim> dimsToQuery, AbstractCubeTable table, String alias) {
       String whereClause = "";
       if (dimsToQuery != null && dimsToQuery.get(table) != null) {
-        if (StringUtils.isNotBlank(dimsToQuery.get(table).whereClause)) {
-          whereClause = dimsToQuery.get(table).whereClause;
+        if (StringUtils.isNotBlank(dimsToQuery.get(table).getWhereClause())) {
+          whereClause = dimsToQuery.get(table).getWhereClause();
           if (alias != null) {
             whereClause = StorageUtil.getWhereClause(whereClause, alias);
           }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
index 0514ecf..d0b3ccb 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
@@ -62,7 +62,7 @@ class LeastPartitionResolver implements ContextRewriter {
   private int getPartCount(Set<CandidateFact> set) {
     int parts = 0;
     for (CandidateFact f : set) {
-      parts += f.numQueriedParts;
+      parts += f.getNumQueriedParts();
     }
     return parts;
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
index 5c39982..beaa551 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
@@ -56,7 +56,7 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
       // resolve timerange positions and replace it by corresponding where
       // clause
       for (TimeRange range : query.getTimeRanges()) {
-        String rangeWhere = fact.rangeToWhereClause.get(range);
+        String rangeWhere = fact.getRangeToWhereClause().get(range);
         if (!StringUtils.isBlank(rangeWhere)) {
           ASTNode rangeAST;
           try {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index c29d88f..23fd5a6 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -224,8 +224,8 @@ class StorageTableResolver implements ContextRewriter {
           continue;
         }
         // pick the first storage table
-        candidate.storageTable = storageTables.iterator().next();
-        candidate.whereClause = whereClauses.get(candidate.storageTable);
+        candidate.setStorageTable(storageTables.iterator().next());
+        candidate.setWhereClause(whereClauses.get(candidate.getStorageTable()));
       }
     }
   }
@@ -329,15 +329,16 @@ class StorageTableResolver implements ContextRewriter {
           noPartsForRange = true;
           continue;
         }
-        cfact.numQueriedParts += rangeParts.size();
+        cfact.incrementPartsQueried(rangeParts.size());
         answeringParts.addAll(rangeParts);
-        cfact.rangeToWhereClause.put(range, rangeWriter.getTimeRangeWhereClause(cubeql,
+        cfact.getPartsQueried().addAll(rangeParts);
+        cfact.getRangeToWhereClause().put(range, rangeWriter.getTimeRangeWhereClause(cubeql,
           cubeql.getAliasForTabName(cubeql.getCube().getName()), rangeParts));
       }
       if (!nonExistingParts.isEmpty()) {
         addNonExistingParts(cfact.fact.getName(), nonExistingParts);
       }
-      if (cfact.numQueriedParts == 0 || (failOnPartialData && (noPartsForRange || !nonExistingParts.isEmpty()))) {
+      if (cfact.getNumQueriedParts() == 0 || (failOnPartialData && (noPartsForRange || !nonExistingParts.isEmpty()))) {
         LOG.info("Not considering fact table:" + cfact.fact + " as it could" + " not find partition for given ranges: "
           + cubeql.getTimeRanges());
         /*
@@ -375,10 +376,10 @@ class StorageTableResolver implements ContextRewriter {
       }
       Set<String> storageTables = new LinkedHashSet<String>();
       storageTables.addAll(minimalStorageTables.keySet());
-      cfact.storageTables = storageTables;
+      cfact.setStorageTables(storageTables);
       // multi table select is already false, do not alter it
-      if (cfact.enabledMultiTableSelect) {
-        cfact.enabledMultiTableSelect = enabledMultiTableSelect;
+      if (cfact.isEnabledMultiTableSelect()) {
+        cfact.setEnabledMultiTableSelect(enabledMultiTableSelect);
       }
       LOG.info("Resolved partitions for fact " + cfact + ": " + answeringParts + " storageTables:" + storageTables);
     }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
index 354c735..24d9340 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
@@ -167,6 +167,6 @@ public final class StorageUtil {
   }
 
   public static String getWhereClause(CandidateDim dim, String alias) {
-    return getWhereClause(dim.whereClause, alias);
+    return getWhereClause(dim.getWhereClause(), alias);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriteUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriteUtil.java b/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriteUtil.java
index 8ed4070..04be157 100644
--- a/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriteUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriteUtil.java
@@ -26,6 +26,7 @@ import org.apache.lens.api.LensException;
 import org.apache.lens.cube.parse.CubeQueryContext;
 import org.apache.lens.cube.parse.CubeQueryRewriter;
 import org.apache.lens.cube.parse.HQLParser;
+import org.apache.lens.server.api.driver.DriverQueryPlan;
 import org.apache.lens.server.api.driver.LensDriver;
 import org.apache.lens.server.api.metrics.MethodMetricsContext;
 import org.apache.lens.server.api.metrics.MethodMetricsFactory;
@@ -229,12 +230,19 @@ public final class RewriteUtil {
     }
   }
 
+  public static DriverQueryPlan getRewriterPlan(DriverRewriterRunnable rewriter) {
+    RewriterPlan plan = new RewriterPlan(rewriter.cubeQueryCtx);
+    return plan;
+  }
+
   public static class DriverRewriterRunnable implements Runnable {
     @Getter
     private final LensDriver driver;
     private final AbstractQueryContext ctx;
     private final List<CubeQueryInfo> cubeQueries;
     private final String replacedQuery;
+    /** Cube query context - set after rewriting */
+    private List<CubeQueryContext> cubeQueryCtx;
 
     @Getter
     /** Indicate if rewrite operation succeeded */
@@ -256,6 +264,9 @@ public final class RewriteUtil {
       this.ctx = ctx;
       this.cubeQueries = cubeQueries;
       this.replacedQuery = replacedQuery;
+      if (cubeQueries != null) {
+        cubeQueryCtx = new ArrayList<CubeQueryContext>(cubeQueries.size());
+      }
     }
 
     @Override
@@ -296,6 +307,7 @@ public final class RewriteUtil {
             qIndex + "-" + TOHQL_GAUGE);
           // toHQL actually generates the rewritten query
           String hqlQuery = cqc.toHQL();
+          cubeQueryCtx.add(cqc);
           toHQLGauge.markSuccess();
           qIndex++;
 
@@ -310,6 +322,8 @@ public final class RewriteUtil {
         builder.append(replacedQuery.substring(start));
 
         rewrittenQuery = builder.toString();
+        // set rewriter plan
+        ctx.getDriverContext().setDriverRewriterPlan(driver, getRewriterPlan(this));
         succeeded = true;
         ctx.setDriverQuery(driver, rewrittenQuery);
         LOG.info("Final rewritten query for driver:" + driver + " is: " + rewrittenQuery);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java b/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
new file mode 100644
index 0000000..b05814e
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.driver.cube;
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.lens.api.query.QueryCost;
+import org.apache.lens.cube.metadata.FactPartition;
+import org.apache.lens.cube.parse.CandidateTable;
+import org.apache.lens.cube.parse.CubeQueryContext;
+import org.apache.lens.server.api.LensConfConstants;
+import org.apache.lens.server.api.driver.DriverQueryPlan;
+
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * The Rewriter plan
+ */
+@Slf4j
+public final class RewriterPlan extends DriverQueryPlan {
+
+  public RewriterPlan(Collection<CubeQueryContext> cubeQueries) {
+    extractPlan(cubeQueries);
+  }
+
+  @SuppressWarnings("unchecked") // required for (Set<FactPartition>) casting
+  void extractPlan(Collection<CubeQueryContext> cubeQueries) {
+    int selectCount = 0;
+    int havingCount = 0;
+    int joinCount = 0;
+    int groupbyCount = 0;
+    int orderbyCount = 0;
+
+    for (CubeQueryContext ctx : cubeQueries) {
+      if (ctx.getOrderByAST() != null) {
+        orderbyCount += ctx.getOrderByAST().getChildCount();
+      }
+      if (ctx.getGroupByAST() != null) {
+        groupbyCount += ctx.getGroupByAST().getChildCount();
+      }
+      if (ctx.getHavingAST() != null) {
+        havingCount += ctx.getHavingAST().getChildCount();
+      }
+      if (ctx.getSelectAST() != null) {
+        selectCount += ctx.getSelectAST().getChildCount();
+      }
+      if (ctx.getPickedDimTables() != null && !ctx.getPickedDimTables().isEmpty()) {
+        joinCount += ctx.getPickedDimTables().size();
+        for (CandidateTable dim : ctx.getPickedDimTables()) {
+          addTablesQueried(dim.getStorageTables());
+          if (partitions.get(dim.getName()) == null || partitions.get(dim.getName()).isEmpty()) {
+            // puts storage table to latest part
+            partitions.put(dim.getName(), dim.getPartsQueried());
+          }
+        }
+      }
+      if (ctx.getPickedFacts() != null && !ctx.getPickedFacts().isEmpty()) {
+        for (CandidateTable fact : ctx.getPickedFacts()) {
+          addTablesQueried(fact.getStorageTables());
+          Set<FactPartition> factParts = (Set<FactPartition>) partitions.get(fact.getName());
+          if (factParts == null) {
+            factParts = new HashSet<FactPartition>();
+            partitions.put(fact.getName(), factParts);
+          }
+          factParts.addAll((Set<FactPartition>) fact.getPartsQueried());
+        }
+      } else {
+        // if no facts are there, reducing join count by one, as target would be one of the dimtables picked
+        if (joinCount > 0) {
+          joinCount--;
+        }
+      }
+      for (String table : getTablesQueried()) {
+        if (!tableWeights.containsKey(table)) {
+          Table tbl;
+          try {
+            tbl = ctx.getMetastoreClient().getTable(table);
+          } catch (HiveException e) {
+            log.error("Error while getting table:" + table, e);
+            continue;
+          }
+          String costStr = tbl.getParameters().get(LensConfConstants.STORAGE_COST);
+          Double weight = 1d;
+          if (costStr != null) {
+            weight = Double.parseDouble(costStr);
+          }
+          tableWeights.put(table, weight);
+        }
+      }
+    }
+    setHasSubQuery(hasSubQuery || cubeQueries.size() > 1);
+    setNumGbys(groupbyCount);
+    setNumJoins(joinCount);
+    setNumOrderBys(orderbyCount);
+    setNumSels(selectCount);
+    setNumHaving(havingCount);
+    setNumAggreagateExprs(-1);
+    setNumSelDistincts(-1);
+    setNumFilters(-1);
+  }
+
+  @Override
+  public String getPlan() {
+    throw new UnsupportedOperationException("Not implemented");
+  }
+
+  @Override
+  public QueryCost getCost() {
+    throw new UnsupportedOperationException("Not implemented");
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 4888ae1..c9ac651 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -27,6 +27,7 @@ import org.apache.lens.api.LensException;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.timeline.EndsAndHolesPartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.StoreAllPartitionTimeline;
+import org.apache.lens.server.api.LensConfConstants;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -45,6 +46,8 @@ import org.apache.hadoop.mapred.TextInputFormat;
 import org.testng.Assert;
 
 import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
 import lombok.extern.slf4j.Slf4j;
 
 /*
@@ -1326,6 +1329,9 @@ public class CubeTestSetup {
     s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
     s1.setPartCols(partCols);
     s1.setTimePartCols(timePartCols);
+    Map<String, String> tblPros = Maps.newHashMap();
+    tblPros.put(LensConfConstants.STORAGE_COST, "100");
+    s1.setTblProps(tblPros);
     dumpPeriods.put(c1, UpdatePeriod.HOURLY);
 
     StorageTableDesc s2 = new StorageTableDesc();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
new file mode 100644
index 0000000..0738b27
--- /dev/null
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
@@ -0,0 +1,147 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.lens.cube.parse;
+
+import static org.apache.lens.cube.parse.CubeTestSetup.TWO_DAYS_RANGE;
+
+import java.util.Arrays;
+import java.util.Collections;
+
+import org.apache.lens.driver.cube.RewriterPlan;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public class TestRewriterPlan extends TestQueryRewrite {
+
+  Configuration conf = new Configuration();
+
+  TestRewriterPlan() {
+    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2");
+    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
+    conf.setBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, true);
+    conf.setBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, true);
+    conf.setBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER, false);
+  }
+
+  @Test
+  public void testPlanExtractionForSimpleQuery() throws Exception {
+    // simple query
+    CubeQueryContext ctx = rewriteCtx("cube select SUM(msr2) from testCube where " + TWO_DAYS_RANGE, conf);
+    ctx.toHQL();
+    RewriterPlan plan = new RewriterPlan(Collections.singleton(ctx));
+    Assert.assertNotNull(plan);
+    Assert.assertEquals(plan.getNumSels(), 1);
+    Assert.assertEquals(plan.getNumGbys(), 0);
+    Assert.assertEquals(plan.getNumJoins(), 0);
+    Assert.assertEquals(plan.getNumHaving(), 0);
+    Assert.assertEquals(plan.getNumFilters(), -1);
+    Assert.assertEquals(plan.getNumOrderBys(), 0);
+    Assert.assertEquals(plan.getNumAggreagateExprs(), -1);
+    Assert.assertFalse(plan.getTablesQueried().isEmpty());
+    Assert.assertTrue(plan.getTablesQueried().contains("TestQueryRewrite.c2_testfact"));
+    Assert.assertEquals(plan.getTableWeights().get("TestQueryRewrite.c2_testfact"), 1.0);
+    Assert.assertFalse(plan.getPartitions().isEmpty());
+    Assert.assertFalse(plan.getPartitions().get("testfact").isEmpty());
+    Assert.assertTrue(plan.getPartitions().get("testfact").size() > 1);
+  }
+
+  @Test
+  public void testPlanExtractionForComplexQuery() throws Exception {
+    // complex query
+    CubeQueryContext ctx = rewriteCtx("cube select citydim.name, SUM(msr2) from testCube where citydim.name != \"XYZ\""
+      + " and " + TWO_DAYS_RANGE + " having sum(msr2) > 1000 order by citydim.name limit 50", conf);
+    ctx.toHQL();
+    RewriterPlan plan = new RewriterPlan(Collections.singleton(ctx));
+    Assert.assertNotNull(plan);
+    Assert.assertEquals(plan.getNumSels(), 2);
+    Assert.assertEquals(plan.getNumGbys(), 1);
+    Assert.assertEquals(plan.getNumJoins(), 1);
+    Assert.assertEquals(plan.getNumHaving(), 1);
+    Assert.assertEquals(plan.getNumOrderBys(), 1);
+    Assert.assertEquals(plan.getNumFilters(), -1);
+    Assert.assertEquals(plan.getNumAggreagateExprs(), -1);
+    Assert.assertFalse(plan.getTablesQueried().isEmpty());
+    Assert.assertTrue(plan.getTablesQueried().contains("TestQueryRewrite.c2_testfact"));
+    Assert.assertTrue(plan.getTablesQueried().contains("TestQueryRewrite.c1_citytable"));
+    Assert.assertEquals(plan.getTableWeights().get("TestQueryRewrite.c2_testfact"), 1.0);
+    Assert.assertEquals(plan.getTableWeights().get("TestQueryRewrite.c1_citytable"), 100.0);
+    Assert.assertFalse(plan.getPartitions().isEmpty());
+    Assert.assertFalse(plan.getPartitions().get("testfact").isEmpty());
+    Assert.assertTrue(plan.getPartitions().get("testfact").size() > 1);
+    Assert.assertFalse(plan.getPartitions().get("citytable").isEmpty());
+    Assert.assertEquals(plan.getPartitions().get("citytable").size(), 1);
+  }
+
+  @Test
+  public void testPlanExtractionForMultipleQueries() throws Exception {
+    // simple query
+    CubeQueryContext ctx1 = rewriteCtx("cube select SUM(msr2) from testCube where " + TWO_DAYS_RANGE, conf);
+    ctx1.toHQL();
+    CubeQueryContext ctx2 = rewriteCtx("cube select citydim.name, SUM(msr2) from testCube where citydim.name != \"XYZ\""
+      + " and " + TWO_DAYS_RANGE + " having sum(msr2) > 1000 order by citydim.name limit 50", conf);
+    ctx2.toHQL();
+    RewriterPlan plan = new RewriterPlan(Arrays.asList(ctx1, ctx2));
+    Assert.assertNotNull(plan);
+    Assert.assertEquals(plan.getNumSels(), 3);
+    Assert.assertEquals(plan.getNumGbys(), 1);
+    Assert.assertEquals(plan.getNumJoins(), 1);
+    Assert.assertEquals(plan.getNumHaving(), 1);
+    Assert.assertEquals(plan.getNumOrderBys(), 1);
+    Assert.assertEquals(plan.getNumFilters(), -1);
+    Assert.assertEquals(plan.getNumAggreagateExprs(), -1);
+    Assert.assertFalse(plan.getTablesQueried().isEmpty());
+    Assert.assertTrue(plan.getTablesQueried().contains("TestQueryRewrite.c2_testfact"));
+    Assert.assertTrue(plan.getTablesQueried().contains("TestQueryRewrite.c1_citytable"));
+    Assert.assertEquals(plan.getTableWeights().get("TestQueryRewrite.c2_testfact"), 1.0);
+    Assert.assertEquals(plan.getTableWeights().get("TestQueryRewrite.c1_citytable"), 100.0);
+    Assert.assertFalse(plan.getPartitions().isEmpty());
+    Assert.assertFalse(plan.getPartitions().get("testfact").isEmpty());
+    Assert.assertTrue(plan.getPartitions().get("testfact").size() > 1);
+    Assert.assertFalse(plan.getPartitions().get("citytable").isEmpty());
+    Assert.assertEquals(plan.getPartitions().get("citytable").size(), 1);
+  }
+
+  @Test
+  public void testUnimplemented() throws SemanticException, ParseException {
+    CubeQueryContext ctx = rewriteCtx("cube select SUM(msr2) from testCube where " + TWO_DAYS_RANGE, conf);
+    ctx.toHQL();
+    RewriterPlan plan = new RewriterPlan(Collections.singleton(ctx));
+    Assert.assertNotNull(plan);
+    try {
+      plan.getPlan();
+      Assert.fail("getPlan is not implemented");
+    } catch (UnsupportedOperationException e) {
+      Assert.assertEquals(e.getMessage(), "Not implemented");
+    }
+
+    try {
+      plan.getCost();
+      Assert.fail("getCost is not implemented");
+    } catch (UnsupportedOperationException e) {
+      Assert.assertEquals(e.getMessage(), "Not implemented");
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-driver-hive/pom.xml
----------------------------------------------------------------------
diff --git a/lens-driver-hive/pom.xml b/lens-driver-hive/pom.xml
index 3b8e73a..080c8f5 100644
--- a/lens-driver-hive/pom.xml
+++ b/lens-driver-hive/pom.xml
@@ -82,6 +82,11 @@
       <version>${project.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.lens</groupId>
+      <artifactId>lens-cube</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
       <groupId>org.apache.hive</groupId>
       <artifactId>hive-common</artifactId>
     </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index 1dc87b6..218dc53 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -333,6 +333,7 @@ public class HiveDriver implements LensDriver {
     connectionExpiryTimeout = this.driverConf.getLong(HS2_CONNECTION_EXPIRY_DELAY, DEFAULT_EXPIRY_DELAY);
     whetherCalculatePriority = this.driverConf.getBoolean(HS2_CALCULATE_PRIORITY, true);
     queryPriorityDecider = new DurationBasedQueryPriorityDecider(
+      this,
       this.driverConf.get(HS2_PRIORITY_RANGES, HS2_PRIORITY_DEFAULT_RANGES),
       this.driverConf.getFloat(HS2_PARTITION_WEIGHT_MONTHLY, MONTHLY_PARTITION_WEIGHT_DEFAULT),
       this.driverConf.getFloat(HS2_PARTITION_WEIGHT_DAILY, DAILY_PARTITION_WEIGHT_DEFAULT),
@@ -490,13 +491,13 @@ public class HiveDriver implements LensDriver {
       LOG.info("whetherCalculatePriority: " + whetherCalculatePriority);
       if (whetherCalculatePriority) {
         try {
-          // call explain for the plan to be filled
-          explain(ctx);
           // Inside try since non-data fetching queries can also be executed by async method.
           String priority = queryPriorityDecider.decidePriority(ctx).toString();
           ctx.getSelectedDriverConf().set("mapred.job.priority", priority);
           LOG.info("set priority to " + priority);
-        } catch (LensException e) {
+        } catch (Exception e) {
+          // not failing query launch when setting priority fails
+          // priority will be set to usually NORMAL - the default in underlying system.
           LOG.error("could not set priority for lens session id:" + ctx.getLensSessionIdentifier()
             + "User query: " + ctx.getUserQuery(), e);
         }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
index 64a6bba..fce93a9 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
@@ -19,9 +19,9 @@
 package org.apache.lens.driver.hive;
 
 import java.util.ArrayList;
-import java.util.LinkedHashMap;
+import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
+import java.util.Set;
 
 import org.apache.lens.api.query.QueryCost;
 import org.apache.lens.api.query.QueryPrepareHandle;
@@ -42,9 +42,6 @@ public class HiveQueryPlan extends DriverQueryPlan {
   /** The explain output. */
   private String explainOutput;
 
-  /** The partitions. */
-  private Map<String, List<String>> partitions;
-
   static final QueryCost HIVE_DRIVER_COST = new QueryCost(1, 1.0);
 
   /**
@@ -105,7 +102,6 @@ public class HiveQueryPlan extends DriverQueryPlan {
     setPrepareHandle(prepared);
     setExecMode(ExecMode.BATCH);
     setScanMode(ScanMode.PARTIAL_SCAN);
-    partitions = new LinkedHashMap<String, List<String>>();
     this.explainOutput = StringUtils.join(explainOutput, '\n');
     extractPlanDetails(explainOutput, metastoreConf);
   }
@@ -122,7 +118,6 @@ public class HiveQueryPlan extends DriverQueryPlan {
     ParserState prevState = state;
     ArrayList<ParserState> states = new ArrayList<ParserState>();
     Hive metastore = Hive.get(metastoreConf);
-    List<String> partList = null;
 
     for (int i = 0; i < explainOutput.size(); i++) {
       String line = explainOutput.get(i);
@@ -203,9 +198,9 @@ public class HiveQueryPlan extends DriverQueryPlan {
             }
 
             if (partConditionStr != null) {
-              List<String> tablePartitions = partitions.get(table);
+              Set<String> tablePartitions = (Set<String>) partitions.get(table);
               if (tablePartitions == null) {
-                tablePartitions = new ArrayList<String>();
+                tablePartitions = new HashSet<String>();
                 partitions.put(table, tablePartitions);
               }
               tablePartitions.add(partConditionStr);
@@ -270,9 +265,4 @@ public class HiveQueryPlan extends DriverQueryPlan {
      */
     return HIVE_DRIVER_COST;
   }
-
-  @Override
-  public Map<String, List<String>> getPartitions() {
-    return partitions;
-  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/priority/DurationBasedQueryPriorityDecider.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/priority/DurationBasedQueryPriorityDecider.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/priority/DurationBasedQueryPriorityDecider.java
index 4f471cb..c0156e5 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/priority/DurationBasedQueryPriorityDecider.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/priority/DurationBasedQueryPriorityDecider.java
@@ -18,13 +18,14 @@
  */
 package org.apache.lens.driver.hive.priority;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.lens.api.LensException;
 import org.apache.lens.api.Priority;
+import org.apache.lens.cube.metadata.FactPartition;
+import org.apache.lens.cube.metadata.UpdatePeriod;
+import org.apache.lens.server.api.driver.LensDriver;
 import org.apache.lens.server.api.priority.CostToPriorityRangeConf;
 import org.apache.lens.server.api.priority.QueryPriorityDecider;
 import org.apache.lens.server.api.query.AbstractQueryContext;
@@ -32,27 +33,30 @@ import org.apache.lens.server.api.query.AbstractQueryContext;
 public class DurationBasedQueryPriorityDecider implements QueryPriorityDecider {
 
 
-  CostToPriorityRangeConf costToPriorityRangeMap;
+  private final CostToPriorityRangeConf costToPriorityRangeMap;
 
   /** Partition Weights for priority calculation based on selected partitions **/
 
   /** weight of monthly partition * */
-  private float monthlyPartitionWeight;
+  private final float monthlyPartitionWeight;
   /** weight of daily partition * */
-  private float dailyPartitionWeight;
+  private final float dailyPartitionWeight;
   /** weight of hourly partition * */
-  private float hourlyPartitionWeight;
+  private final float hourlyPartitionWeight;
+  private final LensDriver driver;
+
 
   /**
    * Constructor. Takes three weights for partitions.
-   *
+   * @param driver
    * @param ranges
    * @param monthlyPartitoinWeight
    * @param dailyPartitionWeight
    * @param hourlyPartitionWeight
    */
-  public DurationBasedQueryPriorityDecider(String ranges,
-    float monthlyPartitoinWeight, float dailyPartitionWeight, float hourlyPartitionWeight) {
+  public DurationBasedQueryPriorityDecider(LensDriver driver,
+    String ranges, float monthlyPartitoinWeight, float dailyPartitionWeight, float hourlyPartitionWeight) {
+    this.driver = driver;
     this.costToPriorityRangeMap = new CostToPriorityRangeConf(ranges);
     this.monthlyPartitionWeight = monthlyPartitoinWeight;
     this.dailyPartitionWeight = dailyPartitionWeight;
@@ -74,27 +78,6 @@ public class DurationBasedQueryPriorityDecider implements QueryPriorityDecider {
   }
 
   /**
-   * Extract partitions from AbstractQueryContext. Hive currently gives partitions in the format
-   * {a:[dt partition1, dt partition2]...}. This method removes the "dt"
-   *
-   * @param queryContext
-   * @return all the tables along with their selected partitions.
-   * @throws LensException
-   */
-  protected Map<String, List<String>> extractPartitions(AbstractQueryContext queryContext) throws LensException {
-    Map<String, List<String>> partitions = new HashMap<String, List<String>>();
-    for (Map.Entry<String, List<String>> entry : queryContext.getDriverContext().getSelectedDriverQueryPlan()
-      .getPartitions().entrySet()) {
-      partitions.put(entry.getKey(), new ArrayList<String>());
-      for (String s : entry.getValue()) {
-        String[] splits = s.split("\\s+");
-        partitions.get(entry.getKey()).add(splits[splits.length - 1]); //last split.
-      }
-    }
-    return partitions;
-  }
-
-  /**
    * Calculates total cost based on weights of selected tables and their selected partitions
    *
    * @param queryContext
@@ -102,21 +85,35 @@ public class DurationBasedQueryPriorityDecider implements QueryPriorityDecider {
    * @throws LensException
    */
 
-  float getDurationCost(AbstractQueryContext queryContext) throws LensException {
-    final Map<String, List<String>> partitions = extractPartitions(queryContext);
-    LOG.info("partitions picked: " + partitions);
+  @SuppressWarnings("unchecked") // required for (Set<FactPartition>) casting
+  private float getDurationCost(AbstractQueryContext queryContext) throws LensException {
     float cost = 0;
-    for (String table : partitions.keySet()) {
-      for (String partition : partitions.get(table)) {
-        if (!partition.equals("latest")) {
-          cost += queryContext.getDriverContext().getSelectedDriverQueryPlan().getTableWeight(table)
-            * getNormalizedPartitionCost(partition);
+    if (queryContext.getDriverContext().getDriverRewriterPlan(driver) != null) {
+      // the calculation is done only for cube queries involving fact tables
+      // for all other native table queries and dimension only queries, the cost will be zero and priority will
+      // be the highest one associated with zero cost
+      for (Map.Entry<String, Set<?>> entry : queryContext.getDriverContext().getDriverRewriterPlan(driver)
+        .getPartitions().entrySet()) {
+        if (!entry.getValue().isEmpty() && entry.getValue().iterator().next() instanceof FactPartition) {
+          Set<FactPartition> factParts = (Set<FactPartition>)entry.getValue();
+          for (FactPartition partition : factParts) {
+            cost += getTableWeights(partition.getStorageTables(), queryContext) * getNormalizedPartitionCost(
+              partition.getPeriod());
+          }
         }
       }
     }
     return cost;
   }
 
+  private float getTableWeights(Set<String> tables, AbstractQueryContext queryContext) {
+    float weight = 0;
+    for (String tblName : tables) {
+      weight += queryContext.getDriverContext().getDriverRewriterPlan(driver).getTableWeight(tblName);
+    }
+    return weight == 0 ? 1 : weight;
+  }
+
   /**
    * Normalized cost of a partition. PartitionWeight multiplied by number of days in that partition.
    *
@@ -124,16 +121,16 @@ public class DurationBasedQueryPriorityDecider implements QueryPriorityDecider {
    * @return normalized cost.
    * @throws LensException
    */
-  float getNormalizedPartitionCost(String partition) throws LensException {
-    switch (partition.length()) {
-    case 7: //monthly
+  private float getNormalizedPartitionCost(UpdatePeriod updatePeriod) throws LensException {
+    switch (updatePeriod) {
+    case MONTHLY: //monthly
       return 30 * monthlyPartitionWeight;
-    case 10: // daily
+    case DAILY: // daily
       return 1 * dailyPartitionWeight;
-    case 13: // hourly
-      return (1 / 24) * hourlyPartitionWeight;
+    case HOURLY: // hourly
+      return (1.0f / 24) * hourlyPartitionWeight;
     default:
-      throw new LensException("Could not recognize partition: " + partition);
+      throw new LensException("Weight not defined for " + updatePeriod);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
index 8a5839b..b16c346 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
@@ -21,6 +21,7 @@ package org.apache.lens.driver.hive;
 import static org.testng.Assert.*;
 
 import java.io.*;
+import java.text.ParseException;
 import java.util.*;
 
 import org.apache.lens.api.LensConf;
@@ -28,13 +29,14 @@ import org.apache.lens.api.LensException;
 import org.apache.lens.api.Priority;
 import org.apache.lens.api.query.QueryCost;
 import org.apache.lens.api.query.QueryHandle;
+import org.apache.lens.cube.metadata.FactPartition;
+import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.driver.hive.priority.DurationBasedQueryPriorityDecider;
 import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.driver.*;
 import org.apache.lens.server.api.driver.DriverQueryStatus.DriverQueryState;
 import org.apache.lens.server.api.query.AbstractQueryContext;
 import org.apache.lens.server.api.query.ExplainQueryContext;
-import org.apache.lens.server.api.query.MockQueryContext;
 import org.apache.lens.server.api.query.PreparedQueryContext;
 import org.apache.lens.server.api.query.QueryContext;
 import org.apache.lens.server.api.util.LensUtil;
@@ -173,7 +175,7 @@ public class TestHiveDriver {
   protected void createTestTable(String tableName) throws Exception {
     System.out.println("Hadoop Location: " + System.getProperty("hadoop.bin.path"));
     String createTable = "CREATE TABLE IF NOT EXISTS " + tableName + "(ID STRING)" + " TBLPROPERTIES ('"
-        + LensConfConstants.STORAGE_COST + "'='500')";
+      + LensConfConstants.STORAGE_COST + "'='500')";
     // Create test table
     QueryContext context = createContext(createTable, conf);
     LensResultSet resultSet = driver.execute(context);
@@ -727,8 +729,9 @@ public class TestHiveDriver {
     assertEquals(plan.getTableWeight(dataBase + ".test_part_table"), 500.0);
     System.out.println("Parts:" + plan.getPartitions());
     assertFalse(plan.getPartitions().isEmpty());
-    assertTrue(plan.getPartitions().get(dataBase + ".test_part_table").get(0).contains("today"));
-    assertTrue(plan.getPartitions().get(dataBase + ".test_part_table").get(0).contains("dt"));
+    assertEquals(plan.getPartitions().size(), 1);
+    assertTrue(((String)plan.getPartitions().get(dataBase + ".test_part_table").iterator().next()).contains("today"));
+    assertTrue(((String)plan.getPartitions().get(dataBase + ".test_part_table").iterator().next()).contains("dt"));
   }
 
   /**
@@ -792,44 +795,110 @@ public class TestHiveDriver {
    *
    * @throws IOException
    * @throws LensException
+   * @throws ParseException
    */
   @Test
-  public void testPriority() throws IOException, LensException {
+  public void testPriority() throws IOException, LensException, ParseException {
     Configuration conf = new Configuration();
-    final MockDriver mockDriver = new MockDriver();
-    mockDriver.configure(conf);
-    DurationBasedQueryPriorityDecider alwaysNormalPriorityDecider = new DurationBasedQueryPriorityDecider("",
+    DurationBasedQueryPriorityDecider alwaysNormalPriorityDecider = new DurationBasedQueryPriorityDecider(driver,
+      "",
       HiveDriver.MONTHLY_PARTITION_WEIGHT_DEFAULT,
-      HiveDriver.DAILY_PARTITION_WEIGHT_DEFAULT,
-      HiveDriver.HOURLY_PARTITION_WEIGHT_DEFAULT
-    );
+      HiveDriver.DAILY_PARTITION_WEIGHT_DEFAULT, HiveDriver.HOURLY_PARTITION_WEIGHT_DEFAULT);
     BufferedReader br = new BufferedReader(new InputStreamReader(
       TestHiveDriver.class.getResourceAsStream("/priority_tests.data")));
     String line;
     while ((line = br.readLine()) != null) {
       String[] kv = line.split("\\s*:\\s*");
 
-      final List<String> partitions = Arrays.asList(kv[0].trim().split("\\s*,\\s*"));
+      final Set<FactPartition> partitions = getFactParts(Arrays.asList(kv[0].trim().split("\\s*,\\s*")));
       final Priority expected = Priority.valueOf(kv[1]);
-      final HashMap<LensDriver, String> driverQuery1 = new HashMap<LensDriver, String>() {
-        {
-          put(mockDriver, "driverQuery1");
+      AbstractQueryContext ctx = createContext("test priority query", conf);
+      ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
+
+        @Override
+        public String getPlan() {
+          return null;
         }
-      };
-      AbstractQueryContext ctx = new MockQueryContext("driverQuery1", new LensConf(), conf,
-        driverQuery1.keySet());
-      ctx.getDriverContext().setDriverQueryPlans(driverQuery1, ctx);
-      ctx.setSelectedDriver(mockDriver);
-
-      ((MockDriver.MockQueryPlan) ctx.getDriverContext().getDriverQueryPlan(mockDriver)).setPartitions(
-        new HashMap<String, List<String>>() {
+
+        @Override
+        public QueryCost getCost() {
+          return null;
+        }
+      });
+
+      ctx.getDriverContext().getDriverRewriterPlan(driver).getPartitions().putAll(
+        new HashMap<String, Set<FactPartition>>() {
           {
             put("table1", partitions);
           }
-        }
-      );
+        });
+      ctx.getDriverContext().getDriverRewriterPlan(driver).getTableWeights().putAll(
+        new HashMap<String, Double>() {
+          {
+            put("table1", 1.0);
+          }
+        });
       Assert.assertEquals(expected, driver.queryPriorityDecider.decidePriority(ctx));
       Assert.assertEquals(Priority.NORMAL, alwaysNormalPriorityDecider.decidePriority(ctx));
     }
+    // test priority without fact partitions
+    AbstractQueryContext ctx = createContext("test priority query", conf);
+    ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
+
+      @Override
+      public String getPlan() {
+        return null;
+      }
+
+      @Override
+      public QueryCost getCost() {
+        return null;
+      }
+    });
+
+    ctx.getDriverContext().getDriverRewriterPlan(driver).getPartitions().putAll(
+      new HashMap<String, Set<String>>() {
+        {
+          put("table1", new HashSet<String>());
+        }
+      });
+    ctx.getDriverContext().getDriverRewriterPlan(driver).getTableWeights().putAll(
+      new HashMap<String, Double>() {
+        {
+          put("table1", 1.0);
+        }
+      });
+    Assert.assertEquals(Priority.VERY_HIGH, driver.queryPriorityDecider.decidePriority(ctx));
+    Assert.assertEquals(Priority.NORMAL, alwaysNormalPriorityDecider.decidePriority(ctx));
+
+    // test priority without rewriter plan
+    ctx = createContext("test priority query", conf);
+    Assert.assertEquals(Priority.VERY_HIGH, driver.queryPriorityDecider.decidePriority(ctx));
+    Assert.assertEquals(Priority.NORMAL, alwaysNormalPriorityDecider.decidePriority(ctx));
+
+  }
+
+  private Set<FactPartition> getFactParts(List<String> partStrings) throws ParseException {
+    Set<FactPartition> factParts = new HashSet<FactPartition>();
+    for (String partStr : partStrings) {
+      String[] partEls = partStr.split(" ");
+      UpdatePeriod p = null;
+      String partSpec = partEls[1];
+      switch (partSpec.length()) {
+      case 7: //monthly
+        p = UpdatePeriod.MONTHLY;
+        break;
+      case 10: // daily
+        p = UpdatePeriod.DAILY;
+        break;
+      case 13: // hourly
+        p = UpdatePeriod.HOURLY;
+        break;
+      }
+      FactPartition part = new FactPartition(partEls[0], p.format().parse(partSpec), p, null, p.format(),
+        Collections.singleton("table1"));
+      factParts.add(part);
+    }
+    return factParts;
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-driver-hive/src/test/resources/priority_tests.data
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/resources/priority_tests.data b/lens-driver-hive/src/test/resources/priority_tests.data
index 230df95..98b82ef 100644
--- a/lens-driver-hive/src/test/resources/priority_tests.data
+++ b/lens-driver-hive/src/test/resources/priority_tests.data
@@ -1,4 +1,4 @@
 dt 2014-01-02-01: VERY_HIGH
-dt 2013-12,2014-01-01, dt 2014-01-02-00, dt 2014-01-02-01: HIGH
-dt 2013-12,2014-01, dt 2014-02, dt 2014-02-01-00: NORMAL
-dt 2013-12,2014-01, dt 2014-02, dt 2014-03, dt 2014-04, dt 2014-05: LOW
\ No newline at end of file
+dt 2013-12,dt 2014-01-01, dt 2014-01-02-00, dt 2014-01-02-01: HIGH
+dt 2013-12,dt 2014-01, dt 2014-02, dt 2014-02-01-00: NORMAL
+dt 2013-12,dt 2014-01, dt 2014-02, dt 2014-03, dt 2014-04, dt 2014-05: LOW
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryPlan.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryPlan.java b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryPlan.java
index 38ebc55..1d41720 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryPlan.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryPlan.java
@@ -22,8 +22,9 @@ import java.io.UnsupportedEncodingException;
 import java.net.URLEncoder;
 import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.List;
+import java.util.HashSet;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.lens.api.query.QueryCost;
 import org.apache.lens.api.query.QueryHandle;
@@ -125,7 +126,7 @@ public abstract class DriverQueryPlan {
   /**
    * The tables queried.
    */
-  protected final List<String> tablesQueried = new ArrayList<String>();
+  protected final Set<String> tablesQueried = new HashSet<String>();
 
   /**
    * The has sub query.
@@ -187,6 +188,8 @@ public abstract class DriverQueryPlan {
    */
   protected QueryPrepareHandle handle;
 
+  protected Map<String, Set<?>> partitions = new HashMap<String, Set<?>>();
+
   /**
    * Get the query plan
    *
@@ -314,7 +317,7 @@ public abstract class DriverQueryPlan {
    *
    * @return the tablesQueried
    */
-  public List<String> getTablesQueried() {
+  public Set<String> getTablesQueried() {
     return tablesQueried;
   }
 
@@ -323,11 +326,20 @@ public abstract class DriverQueryPlan {
    *
    * @param table the table
    */
-  protected void addTablesQueries(String table) {
+  protected void addTablesQueried(String table) {
     this.tablesQueried.add(table);
   }
 
   /**
+   * Set the list of table names to be queried.
+   *
+   * @param table the table
+   */
+  protected void addTablesQueried(Set<String> tables) {
+    this.tablesQueried.addAll(tables);
+  }
+
+  /**
    * Get the number of filters in query
    *
    * @return the numFilters
@@ -357,8 +369,8 @@ public abstract class DriverQueryPlan {
   /**
    * Set if query has subquery.
    */
-  protected void setHasSubQuery() {
-    this.hasSubQuery = true;
+  protected void setHasSubQuery(boolean hasSubQuery) {
+    this.hasSubQuery = hasSubQuery;
   }
 
   /**
@@ -579,17 +591,31 @@ public abstract class DriverQueryPlan {
     this.handle = handle;
   }
 
+  /**
+   * Get number of aggregate expressions.
+   *
+   * @return
+   */
   public int getNumAggreagateExprs() {
     return numAggrExprs;
   }
 
   /**
+   * Set num aggregate expressions
+   *
+   * @param numAggrs
+   */
+  protected void setNumAggreagateExprs(int numAggrs) {
+    numAggrExprs = numAggrs;
+  }
+
+  /**
    * Get list of partitions queried for each table
    *
    * @return
    */
-  public Map<String, List<String>> getPartitions() {
-    return null;
+  public Map<String, Set<?>> getPartitions() {
+    return partitions;
   }
 
   /**
@@ -600,8 +626,9 @@ public abstract class DriverQueryPlan {
    */
   public QueryPlan toQueryPlan() throws UnsupportedEncodingException {
     return new QueryPlan(numJoins, numGbys, numSels, numSelDi, numHaving, numObys, numAggrExprs, numFilters,
-      tablesQueried, hasSubQuery, execMode != null ? execMode.name() : null, scanMode != null ? scanMode.name()
-      : null, tableWeights, joinWeight, gbyWeight, filterWeight, havingWeight, obyWeight, selectWeight, null,
+      new ArrayList<String>(tablesQueried), hasSubQuery, execMode != null ? execMode.name() : null,
+      scanMode != null ? scanMode.name() : null, tableWeights, joinWeight, gbyWeight, filterWeight, havingWeight,
+      obyWeight, selectWeight, null,
       URLEncoder.encode(getPlan(), "UTF-8"), getCost(), false, null);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
index 5233569..6799e0c 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
@@ -374,6 +374,7 @@ public abstract class AbstractQueryContext implements Serializable {
    * Gets HiveConf corresponding to query conf.
    *
    * Should be called judiciously, because constructing HiveConf from conf object is costly.
+   * The field is set to null after query completion. Should not be accessed after completion.
    * @return
    */
   public HiveConf getHiveConf() {
@@ -417,4 +418,13 @@ public abstract class AbstractQueryContext implements Serializable {
   public String getLogHandle() {
     return this.getUserQuery();
   }
+
+  public void clearTransientStateAfterLaunch() {
+    driverContext.clearTransientStateAfterLaunch();
+  }
+
+  public void clearTransientStateAfterCompleted() {
+    driverContext.clearTransientStateAfterCompleted();
+    hiveConf = null;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
index fef834f..2fd592a 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
@@ -136,6 +136,14 @@ public class DriverSelectorQueryContext {
     @Setter
     /** final driver query - after driver rewrites the query*/
     protected String finalDriverQuery;
+
+    @Getter
+    @Setter
+    /**
+     * Plan generated from rewriter. This does not contain final driver plan. This contains the information
+     * extracted at rewriter only
+     */
+    protected DriverQueryPlan rewriterPlan;
   }
 
   /**
@@ -295,6 +303,17 @@ public class DriverSelectorQueryContext {
     return queries;
   }
 
+  public void setDriverRewriterPlan(LensDriver driver, DriverQueryPlan rewriterPlan) {
+    if (driverQueryContextMap.get(driver) != null) {
+      driverQueryContextMap.get(driver).setRewriterPlan(rewriterPlan);
+    }
+  }
+
+  public DriverQueryPlan getDriverRewriterPlan(LensDriver driver) {
+    return driverQueryContextMap.get(driver) != null
+      ? driverQueryContextMap.get(driver).getRewriterPlan() : null;
+  }
+
   public DriverQueryPlan getDriverQueryPlan(LensDriver driver) {
     return driverQueryContextMap.get(driver) != null
       ? driverQueryContextMap.get(driver).getDriverQueryPlan() : null;
@@ -323,4 +342,17 @@ public class DriverSelectorQueryContext {
   public void setDriverQueryPlan(LensDriver driver, DriverQueryPlan qp) {
     driverQueryContextMap.get(driver).setDriverQueryPlan(qp);
   }
+
+  void clearTransientStateAfterLaunch() {
+    for (DriverQueryContext driverCtx : driverQueryContextMap.values()) {
+      driverCtx.driverQueryPlan = null;
+      driverCtx.rewriterPlan = null;
+    }
+  }
+
+  void clearTransientStateAfterCompleted() {
+    for (DriverQueryContext driverCtx : driverQueryContextMap.values()) {
+      driverCtx.driverSpecificConf = null;
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-server-api/src/test/java/org/apache/lens/server/api/driver/MockDriver.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/test/java/org/apache/lens/server/api/driver/MockDriver.java b/lens-server-api/src/test/java/org/apache/lens/server/api/driver/MockDriver.java
index 67c73b8..406e2ac 100644
--- a/lens-server-api/src/test/java/org/apache/lens/server/api/driver/MockDriver.java
+++ b/lens-server-api/src/test/java/org/apache/lens/server/api/driver/MockDriver.java
@@ -22,7 +22,6 @@ import java.io.IOException;
 import java.io.ObjectInput;
 import java.io.ObjectOutput;
 import java.util.List;
-import java.util.Map;
 import java.util.UUID;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -40,9 +39,6 @@ import org.apache.lens.server.api.query.QueryContext;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hive.service.cli.ColumnDescriptor;
 
-import lombok.Getter;
-import lombok.Setter;
-
 /**
  * The Class MockDriver.
  */
@@ -117,10 +113,6 @@ public class MockDriver implements LensDriver {
       tableWeights.put("table3", 3.0);
     }
 
-    @Getter
-    @Setter
-    private Map<String, List<String>> partitions;
-
     @Override
     public String getPlan() {
       return query;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/b86424c8/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index 390071c..ea2da14 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -578,6 +578,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
     launchedQueries.add(ctx);
     ctx.setLaunchTime(System.currentTimeMillis());
     fireStatusChangeEvent(ctx, ctx.getStatus(), before);
+    ctx.clearTransientStateAfterLaunch();
   }
 
   /**
@@ -610,6 +611,7 @@ public class QueryExecutionServiceImpl extends LensService implements QueryExecu
       }
     }
     finishedQueries.add(new FinishedQuery(ctx));
+    ctx.clearTransientStateAfterLaunch();
   }
 
   void setSuccessState(QueryContext ctx) throws LensException {


[04/50] [abbrv] incubator-lens git commit: LENS-439 : Fix synchronization bug wrt get methods on CubeMetastoreClient (amareshwari)

Posted by am...@apache.org.
LENS-439 : Fix synchronization bug wrt get methods on CubeMetastoreClient (amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/057872b3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/057872b3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/057872b3

Branch: refs/heads/current-release-line
Commit: 057872b3b3780c221c667eccbf3188f253087970
Parents: 1b811de
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Wed Mar 25 12:48:58 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Mar 25 12:48:58 2015 +0530

----------------------------------------------------------------------
 .../apache/lens/cube/metadata/CubeMetastoreClient.java  | 12 ++++++++++++
 1 file changed, 12 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/057872b3/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index 8758986..41b5b08 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -1093,6 +1093,8 @@ public class CubeMetastoreClient {
             if (enableCaching) {
               allHiveTables.put(tableName, tbl);
             }
+          } else {
+            tbl = allHiveTables.get(tableName);
           }
         }
       }
@@ -1294,6 +1296,8 @@ public class CubeMetastoreClient {
               }
             }
           }
+        } else {
+          dimTable = allDimTables.get(tableName);
         }
       }
     }
@@ -1324,6 +1328,8 @@ public class CubeMetastoreClient {
               allStorages.put(storageName, storage);
             }
           }
+        } else {
+          storage = allStorages.get(storageName);
         }
       }
     }
@@ -1354,6 +1360,8 @@ public class CubeMetastoreClient {
               allCubes.put(tableName, cube);
             }
           }
+        } else {
+          cube = allCubes.get(tableName);
         }
       }
     }
@@ -1380,6 +1388,8 @@ public class CubeMetastoreClient {
               allDims.put(tableName, dim);
             }
           }
+        } else {
+          dim = allDims.get(tableName);
         }
       }
     }
@@ -1403,6 +1413,8 @@ public class CubeMetastoreClient {
           if (enableCaching && fact != null) {
             allFactTables.put(tableName, fact);
           }
+        } else {
+          fact = allFactTables.get(tableName);
         }
       }
     }


[16/50] [abbrv] incubator-lens git commit: LENS-342 : Fail query when a dimension is accessed with chain and without chain in the same query (amareshwari)

Posted by am...@apache.org.
LENS-342 : Fail query when a dimension is accessed with chain and without chain in the same query (amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/f57a9a14
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/f57a9a14
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/f57a9a14

Branch: refs/heads/current-release-line
Commit: f57a9a149f0e670f47b586080f14113ccd533ed8
Parents: 4a5c02a
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Tue Mar 31 10:05:56 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Mar 31 10:05:56 2015 +0530

----------------------------------------------------------------------
 .../cube/metadata/ReferencedDimAtrribute.java   |   2 +-
 .../cube/parse/DenormalizationResolver.java     |   5 +-
 .../apache/lens/cube/parse/JoinResolver.java    |   5 +
 .../apache/lens/cube/parse/CubeTestSetup.java   |   9 +-
 .../lens/cube/parse/TestBaseCubeQueries.java    |   6 +-
 .../lens/cube/parse/TestJoinResolver.java       | 152 +++++++++++--------
 6 files changed, 106 insertions(+), 73 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f57a9a14/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java
index 859b344..a8ece2d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java
@@ -28,7 +28,7 @@ import lombok.Getter;
 import lombok.ToString;
 
 @EqualsAndHashCode(callSuper = true)
-@ToString
+@ToString(callSuper = true)
 public class ReferencedDimAtrribute extends BaseDimAttribute {
   @Getter
   private final List<TableReference> references = new ArrayList<TableReference>();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f57a9a14/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index 2d239e1..f5d2115 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -232,7 +232,7 @@ public class DenormalizationResolver implements ContextRewriter {
       return false;
     }
 
-    private void pickColumnsForTable(String tbl) {
+    private void pickColumnsForTable(String tbl) throws SemanticException {
       if (tableToRefCols.containsKey(tbl)) {
         for (ReferencedQueriedColumn refered : tableToRefCols.get(tbl)) {
           if (!refered.col.isChainedColumn()) {
@@ -245,6 +245,9 @@ public class DenormalizationResolver implements ContextRewriter {
                 iter.remove();
               }
             }
+            if (refered.references.isEmpty()) {
+              throw new SemanticException("No reference column available for " + refered);
+            }
             PickedReference picked = new PickedReference(refered.references.iterator().next(),
               cubeql.getAliasForTabName(refered.srcTable.getName()), tbl);
             addPickedReference(refered.col.getName(), picked);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f57a9a14/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index 32a4b85..bf57907 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -1082,6 +1082,11 @@ class JoinResolver implements ContextRewriter {
           throw new SemanticException("Table " + joinee.getName() + " is getting accessed via two different names: "
             + "[" + dimensionInJoinChain.get(joinee).get(0).getName() + ", " + joinee.getName() + "]");
         }
+        // table is accessed with chain and no chain
+        if (cubeql.getNonChainedDimensions().contains(joinee)) {
+          throw new SemanticException("Table " + joinee.getName() + " is getting accessed via joinchain: "
+            + dimensionInJoinChain.get(joinee).get(0).getName() + " and no chain at all");
+        }
       }
     }
     // populate paths from joinchains

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f57a9a14/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 7d08212..b2ae9b5 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -1362,6 +1362,7 @@ public class CubeTestSetup {
 
     dimColumns = new ArrayList<FieldSchema>();
     dimColumns.add(new FieldSchema("id", "int", "code"));
+    dimColumns.add(new FieldSchema("name", "string", "name"));
 
     client.createCubeDimensionTable(cityDim.getName(), dimName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
 
@@ -1402,7 +1403,10 @@ public class CubeTestSetup {
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "name")));
     dimAttrs.add(new ReferencedDimAtrribute(new FieldSchema("testDim3id", "string", "f-key to testdim3"), "Dim3 refer",
       new TableReference("testdim3", "id")));
-    dimAttrs.add(new BaseDimAttribute(new FieldSchema("cityId ", "string", "name")));
+    dimAttrs.add(new ReferencedDimAtrribute(new FieldSchema("cityId", "string", "f-key to citydim"), "cityid",
+      new TableReference("citydim", "id")));
+    dimAttrs.add(new ReferencedDimAtrribute(new FieldSchema("cityname", "string", "name"), "cityid",
+      new TableReference("citydim", "name"), null, null, 0.0, false));
 
     // add ref dim through chain
     dimAttrs.add(new ReferencedDimAtrribute(
@@ -1450,6 +1454,9 @@ public class CubeTestSetup {
     dimColumns.add(new FieldSchema("id", "int", "code"));
     dimColumns.add(new FieldSchema("bigid1", "int", "code"));
     dimColumns.add(new FieldSchema("name", "string", "field1"));
+    dimColumns.add(new FieldSchema("cityId", "string", "f-key to cityDim"));
+    storageTables.put(c3, s1);
+    dumpPeriods.put(c3, UpdatePeriod.HOURLY);
 
     client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 10L, dumpPeriods, dimProps, storageTables);
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f57a9a14/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index 1f03db6..632829f 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -89,7 +89,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
       ErrorMsg.EXPRESSION_NOT_IN_ANY_FACT.getErrorCode());
 
     // no fact has the all the dimensions queried
-    e = getSemanticExceptionInRewrite("select dim1, cityid, msr3, msr13 from basecube" + " where " + TWO_DAYS_RANGE,
+    e = getSemanticExceptionInRewrite("select dim1, stateid, msr3, msr13 from basecube" + " where " + TWO_DAYS_RANGE,
       conf);
     Assert.assertEquals(e.getCanonicalErrorMsg().getErrorCode(),
       ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE.getErrorCode());
@@ -100,12 +100,12 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     Assert.assertTrue(matcher.matches());
     Assert.assertEquals(matcher.groupCount(), 1);
     String columnSetsStr = matcher.group(1);
-    Assert.assertNotEquals(columnSetsStr.indexOf("cityid"), -1);
+    Assert.assertNotEquals(columnSetsStr.indexOf("stateid"), -1);
     Assert.assertNotEquals(columnSetsStr.indexOf("msr3, msr13"), -1);
     Assert.assertEquals(pruneCauses.getDetails(),
       new HashMap<String, List<CandidateTablePruneCause>>() {
         {
-          put("testfact3_base,testfact3_raw_base", Arrays.asList(CandidateTablePruneCause.columnNotFound("cityid")));
+          put("testfact3_base,testfact3_raw_base", Arrays.asList(CandidateTablePruneCause.columnNotFound("stateid")));
           put("testfact2_raw_base,testfact2_base",
             Arrays.asList(CandidateTablePruneCause.columnNotFound("msr3", "msr13")));
         }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/f57a9a14/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
index 0e5978e..dfa178b 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
@@ -118,13 +118,18 @@ public class TestJoinResolver extends TestQueryRewrite {
     CubeInterface testCube = metastore.getCube("testcube");
     Dimension zipDim = metastore.getDimension("zipdim");
     Dimension cityDim = metastore.getDimension("citydim");
+    Dimension testDim2 = metastore.getDimension("testDim2");
 
     SchemaGraph.GraphSearch search = new SchemaGraph.GraphSearch(zipDim, (AbstractCubeTable) testCube, schemaGraph);
 
     List<SchemaGraph.JoinPath> paths = search.findAllPathsToTarget();
-    Assert.assertEquals(2, paths.size());
+    Assert.assertEquals(6, paths.size());
     validatePath(paths.get(0), zipDim, (AbstractCubeTable) testCube);
     validatePath(paths.get(1), zipDim, cityDim, (AbstractCubeTable) testCube);
+    validatePath(paths.get(2), zipDim, cityDim, testDim2, (AbstractCubeTable) testCube);
+    validatePath(paths.get(3), zipDim, cityDim, testDim2, (AbstractCubeTable) testCube);
+    validatePath(paths.get(4), zipDim, cityDim, testDim2, (AbstractCubeTable) testCube);
+    validatePath(paths.get(5), zipDim, cityDim, testDim2, (AbstractCubeTable) testCube);
   }
 
   private void validatePath(SchemaGraph.JoinPath jp, AbstractCubeTable... tables) {
@@ -450,34 +455,6 @@ public class TestJoinResolver extends TestQueryRewrite {
     );
     TestCubeRewriter.compareQueries(expected, hqlQuery);
 
-    // Single joinchain with two paths, intermediate dimension accessed separately by name.
-    query = "select cityState.name, citydim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    hqlQuery = rewrite(query, hconf);
-    expected = getExpectedQuery("basecube",
-      "select citystate.name, citydim.name, sum(basecube.msr2) FROM ",
-      " join " + getDbName() + "c1_citytable citydim on basecube.cityid = citydim.id and "
-        + "citydim.dt = 'latest'"
-        + " join " + getDbName() + "c1_statetable citystate on citydim.stateid = citystate.id and "
-        + "citystate.dt = 'latest'", null, "group by citystate.name,citydim.name", null,
-      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base")
-    );
-    TestCubeRewriter.compareQueries(expected, hqlQuery);
-
-    // Multi joinchains + a dimension part of one of the chains.
-    query = "select cityState.name, cubeState.name, citydim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    hqlQuery = rewrite(query, hconf);
-    expected = getExpectedQuery("basecube",
-      "select citystate.name, cubestate.name, citydim.name, sum(basecube.msr2) FROM ",
-      " join " + getDbName() + "c1_citytable citydim on basecube.cityid = citydim.id and "
-        + "citydim.dt = 'latest'"
-        + " join " + getDbName() + "c1_statetable citystate on citydim.stateid = citystate.id and "
-        + "citystate.dt = 'latest'"
-        + " join " + getDbName() + "c1_statetable cubestate on basecube.stateid=cubestate.id and cubestate.dt='latest'"
-      , null, "group by citystate.name,cubestate.name,citydim.name", null,
-      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base")
-    );
-    TestCubeRewriter.compareQueries(expected, hqlQuery);
-
     // Two joinchains, one accessed as refcol.
     query = "select cubestate.name, cityStateCapital, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
     hqlQuery = rewrite(query, hconf);
@@ -526,44 +503,6 @@ public class TestJoinResolver extends TestQueryRewrite {
     );
     TestCubeRewriter.compareQueries(expected, hqlQuery);
 
-    // this test case should pass when default qualifiers for dimensions' chains are added
-    // Two joinchains with same destination, and the destination table accessed separately
-    query = "select cityState.name, cubeState.name, statedim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    try {
-      rewrite(query, hconf);
-      Assert.fail("Should have failed. "
-        + "It's not possible to resolve which statedim is being asked for when cityState and cubeState both end at"
-        + " statedim table.");
-    } catch (SemanticException e) {
-      Assert.assertNotNull(e.getCause());
-      Assert.assertEquals(
-        e.getCause().getMessage().indexOf("Table statedim has 2 different paths through joinchains"), 0);
-    }
-
-    // this test case should pass when default qualifiers for dimensions' chains are added
-    // Two Single joinchain, And dest table accessed separately.
-    query = "select cubeState.name, statedim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    try {
-      rewrite(query, hconf);
-      Assert.fail("Should have failed. "
-        + "The table statedim is getting accessed as both cubeState and statedim ");
-    } catch (SemanticException e) {
-      Assert.assertNotNull(e.getCause());
-      Assert.assertEquals(e.getCause().getMessage().toLowerCase(),
-        "Table statedim is getting accessed via two different names: [cubestate, statedim]".toLowerCase());
-    }
-    // this should pass when default qualifiers are added
-    query = "select cityStateCapital, statedim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    try {
-      rewrite(query, hconf);
-      Assert.fail("Should have failed. "
-        + "The table statedim is getting accessed as both cubeState and statedim ");
-    } catch (SemanticException e) {
-      Assert.assertNotNull(e.getCause());
-      Assert.assertEquals(e.getCause().getMessage().toLowerCase(),
-        "Table statedim is getting accessed via two different names: [citystate, statedim]".toLowerCase());
-    }
-
     // Test 4 Dim only query with join chains
 
     List<String> expectedClauses = new ArrayList<String>();
@@ -631,6 +570,85 @@ public class TestJoinResolver extends TestQueryRewrite {
   }
 
   @Test
+  public void testConflictingJoins() throws ParseException {
+    // Single joinchain with two paths, intermediate dimension accessed separately by name.
+    String query = "select cityState.name, citydim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    try {
+      rewrite(query, hconf);
+      Assert.fail("Should have failed. "
+        + "The table citydim is getting accessed as both chain and without chain ");
+    } catch (SemanticException e) {
+      Assert.assertNotNull(e.getCause());
+      Assert.assertEquals(e.getCause().getMessage().toLowerCase(),
+        "Table citydim is getting accessed via joinchain: citystate and no chain at all".toLowerCase());
+    }
+
+    // Multi joinchains + a dimension part of one of the chains.
+    query = "select cityState.name, cubeState.name, citydim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    try {
+      rewrite(query, hconf);
+      Assert.fail("Should have failed. "
+        + "The table citydim is getting accessed as both chain and without chain ");
+    } catch (SemanticException e) {
+      Assert.assertNotNull(e.getCause());
+      Assert.assertEquals(e.getCause().getMessage().toLowerCase(),
+        "Table citydim is getting accessed via joinchain: citystate and no chain at all".toLowerCase());
+    }
+
+    // this test case should pass when default qualifiers for dimensions' chains are added
+    // Two joinchains with same destination, and the destination table accessed separately
+    query = "select cityState.name, cubeState.name, statedim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    try {
+      rewrite(query, hconf);
+      Assert.fail("Should have failed. "
+        + "It's not possible to resolve which statedim is being asked for when cityState and cubeState both end at"
+        + " statedim table.");
+    } catch (SemanticException e) {
+      Assert.assertNotNull(e.getCause());
+      Assert.assertEquals(
+        e.getCause().getMessage().indexOf("Table statedim has 2 different paths through joinchains"), 0);
+    }
+
+    // this test case should pass when default qualifiers for dimensions' chains are added
+    // Two Single joinchain, And dest table accessed separately.
+    query = "select cubeState.name, statedim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    try {
+      rewrite(query, hconf);
+      Assert.fail("Should have failed. "
+        + "The table statedim is getting accessed as both cubeState and statedim ");
+    } catch (SemanticException e) {
+      Assert.assertNotNull(e.getCause());
+      Assert.assertEquals(e.getCause().getMessage().toLowerCase(),
+        "Table statedim is getting accessed via two different names: [cubestate, statedim]".toLowerCase());
+    }
+    // this should pass when default qualifiers are added
+    query = "select cityStateCapital, statedim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    try {
+      rewrite(query, hconf);
+      Assert.fail("Should have failed. "
+        + "The table statedim is getting accessed as both cubeState and statedim ");
+    } catch (SemanticException e) {
+      Assert.assertNotNull(e.getCause());
+      Assert.assertEquals(e.getCause().getMessage().toLowerCase(),
+        "Table statedim is getting accessed via two different names: [citystate, statedim]".toLowerCase());
+    }
+
+    // table accessed through denorm column and chain column
+    Configuration conf = new Configuration(hconf);
+    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C3, C4");
+    String failingQuery = "select testDim2.cityname, testDim2.cityStateCapital FROM testDim2 where " + TWO_DAYS_RANGE;
+    try {
+      rewrite(failingQuery, conf);
+      Assert.fail("Should have failed. "
+        + "The table citydim is getting accessed as both chain and without chain ");
+    } catch (SemanticException e) {
+      Assert.assertNotNull(e.getCause());
+      Assert.assertEquals(e.getCause().getMessage().toLowerCase(),
+        "Table citydim is getting accessed via joinchain: citystate and no chain at all".toLowerCase());
+    }
+  }
+
+  @Test
   public void testMultiPaths() throws SemanticException, ParseException {
     String query, hqlQuery, expected;
 


[22/50] [abbrv] incubator-lens git commit: LENS-473 : Change the queryable field validation logic

Posted by am...@apache.org.
LENS-473 : Change the queryable field validation logic


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/019eb949
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/019eb949
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/019eb949

Branch: refs/heads/current-release-line
Commit: 019eb9498b54512e28e0e85a33a9afad814b1c84
Parents: 511418a
Author: jdhok <jd...@apache.org>
Authored: Thu Apr 2 16:46:13 2015 +0530
Committer: jdhok <jd...@apache.org>
Committed: Thu Apr 2 16:46:13 2015 +0530

----------------------------------------------------------------------
 .../apache/lens/cube/parse/AliasReplacer.java   |  75 +---------
 .../lens/cube/parse/CubeQueryRewriter.java      |   8 +-
 .../apache/lens/cube/parse/FieldValidator.java  | 150 +++++++++++++++++++
 .../lens/cube/parse/TestBaseCubeQueries.java    |  25 +++-
 .../lens/cube/parse/TestQueryMetrics.java       |  25 ++--
 .../lens/cube/parse/TestQueryRewrite.java       |   1 -
 6 files changed, 198 insertions(+), 86 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/019eb949/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
index d81fab1..961faad 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
@@ -30,7 +30,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -59,7 +58,7 @@ class AliasReplacer implements ContextRewriter {
     colToTableAlias = new HashMap<String, String>();
 
     extractTabAliasForCol(cubeql);
-    doFieldValidation(cubeql);
+    findDimAttributesAndMeasures(cubeql);
 
     // Rewrite the all the columns in the query with table alias prefixed.
     // If col1 of table tab1 is accessed, it would be changed as tab1.col1.
@@ -97,10 +96,12 @@ class AliasReplacer implements ContextRewriter {
 
   }
 
-  // Finds all queried dim-attributes and measures from cube
-  // If all fields in cube are not queryable together, does the validation
-  // wrt to derived cubes.
-  private void doFieldValidation(CubeQueryContext cubeql) throws SemanticException {
+  /**
+   * Figure out queried dim attributes and measures from the cube query context
+   * @param cubeql
+   * @throws SemanticException
+   */
+  private void findDimAttributesAndMeasures(CubeQueryContext cubeql) throws SemanticException {
     CubeInterface cube = cubeql.getCube();
     if (cube != null) {
       Set<String> cubeColsQueried = cubeql.getColumnsQueried(cube.getName());
@@ -117,69 +118,7 @@ class AliasReplacer implements ContextRewriter {
       }
       cubeql.addQueriedDimAttrs(queriedDimAttrs);
       cubeql.addQueriedMsrs(queriedMsrs);
-      if (!cube.allFieldsQueriable()) {
-        // do queried field validation
-        List<DerivedCube> dcubes;
-        try {
-          dcubes = cubeql.getMetastoreClient().getAllDerivedQueryableCubes(cube);
-        } catch (HiveException e) {
-          throw new SemanticException(e);
-        }
-        // remove chained ref columns from field validation
-        Iterator<String> iter = queriedDimAttrs.iterator();
-        Set<String> chainedSrcColumns = new HashSet<String>();
-        while (iter.hasNext()) {
-          String attr = iter.next();
-          if (cube.getDimAttributeByName(attr) instanceof ReferencedDimAtrribute
-            && ((ReferencedDimAtrribute) cube.getDimAttributeByName(attr)).isChainedColumn()) {
-            iter.remove();
-            ReferencedDimAtrribute rdim = (ReferencedDimAtrribute)cube.getDimAttributeByName(attr);
-            chainedSrcColumns.addAll(cube.getChainByName(rdim.getChainName()).getSourceColumns());
-          }
-        }
-        for (JoinChain chainQueried : cubeql.getJoinchains().values()) {
-          chainedSrcColumns.addAll(chainQueried.getSourceColumns());
-        }
-        // do validation
-        // Find atleast one derived cube which contains all the dimensions
-        // queried.
-        boolean derivedCubeFound = false;
-        for (DerivedCube dcube : dcubes) {
-          if (dcube.getDimAttributeNames().containsAll(chainedSrcColumns)
-              && dcube.getDimAttributeNames().containsAll(queriedDimAttrs)) {
-            // remove all the measures that are covered
-            queriedMsrs.removeAll(dcube.getMeasureNames());
-            derivedCubeFound = true;
-          }
-        }
-        Set<String> nonQueryableFields = getNonQueryableAttributes(cubeql);
-        if (!derivedCubeFound && !nonQueryableFields.isEmpty()) {
-          throw new SemanticException(ErrorMsg.FIELDS_NOT_QUERYABLE, nonQueryableFields.toString());
-        }
-        if (!queriedMsrs.isEmpty()) {
-          // Add appropriate message to know which fields are not queryable together
-          if (!nonQueryableFields.isEmpty()) {
-            throw new SemanticException(ErrorMsg.FIELDS_NOT_QUERYABLE, nonQueryableFields.toString() + " and "
-              + queriedMsrs.toString());
-          } else {
-            throw new SemanticException(ErrorMsg.FIELDS_NOT_QUERYABLE, queriedMsrs.toString());
-          }
-        }
-      }
-    }
-  }
-
-  private Set<String> getNonQueryableAttributes(CubeQueryContext cubeql) {
-    Set<String> nonQueryableFields = new LinkedHashSet<String>();
-    nonQueryableFields.addAll(cubeql.getQueriedDimAttrs());
-    for (String joinChainAlias : cubeql.getJoinchains().keySet()) {
-      if (cubeql.getColumnsQueried(joinChainAlias) != null) {
-        for (String chaincol : cubeql.getColumnsQueried(joinChainAlias)) {
-          nonQueryableFields.add(joinChainAlias + "." + chaincol);
-        }
-      }
     }
-    return nonQueryableFields;
   }
 
   private void extractTabAliasForCol(CubeQueryContext cubeql) throws SemanticException {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/019eb949/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
index 1833cde..24c6ab1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
@@ -133,6 +133,10 @@ public class CubeQueryRewriter {
     rewriters.add(denormResolver);
     // Resolve candidate fact tables and dimension tables for columns queried
     rewriters.add(candidateTblResolver);
+    // Resolve aggregations and generate base select tree
+    rewriters.add(new AggregateResolver(conf));
+    rewriters.add(new GroupbyResolver(conf));
+    rewriters.add(new FieldValidator());
     // Resolve joins and generate base join tree
     rewriters.add(new JoinResolver(conf));
     // resolve time ranges and do col life validation
@@ -140,9 +144,7 @@ public class CubeQueryRewriter {
     // Resolve candidate fact tables and dimension tables for columns included
     // in join and denorm resolvers
     rewriters.add(candidateTblResolver);
-    // Resolve aggregations and generate base select tree
-    rewriters.add(new AggregateResolver(conf));
-    rewriters.add(new GroupbyResolver(conf));
+
     // Phase 1: resolve fact tables.
     rewriters.add(storageTableResolver);
     if (lightFactFirst) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/019eb949/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
new file mode 100644
index 0000000..eafd661
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import java.util.*;
+
+import org.apache.lens.cube.metadata.CubeInterface;
+import org.apache.lens.cube.metadata.DerivedCube;
+import org.apache.lens.cube.metadata.ReferencedDimAtrribute;
+
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Validate fields based on cube queryability
+ */
+public class FieldValidator implements ContextRewriter {
+
+  @Override
+  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+    validateFields(cubeql);
+  }
+
+  public void validateFields(CubeQueryContext cubeql) throws SemanticException {
+    CubeInterface cube = cubeql.getCube();
+    if (cube == null) {
+      return;
+    }
+
+    if (!cube.allFieldsQueriable()) {
+      // do queried field validation
+      List<DerivedCube> dcubes;
+      try {
+        dcubes = cubeql.getMetastoreClient().getAllDerivedQueryableCubes(cube);
+      } catch (HiveException e) {
+        throw new SemanticException(e);
+      }
+
+      // dim attributes and chained source columns should only come from WHERE and GROUP BY ASTs
+      Set<String> queriedDimAttrs = new LinkedHashSet<String>();
+      Set<String> queriedMsrs = new LinkedHashSet<String>(cubeql.getQueriedMsrs());
+      Set<String> chainedSrcColumns = new HashSet<String>();
+      Set<String> nonQueryableFields = new LinkedHashSet<String>();
+
+      findDimAttrsAndChainSourceColumns(cubeql, cubeql.getGroupByAST(), queriedDimAttrs,
+        chainedSrcColumns, nonQueryableFields);
+      findDimAttrsAndChainSourceColumns(cubeql, cubeql.getWhereAST(), queriedDimAttrs,
+        chainedSrcColumns, nonQueryableFields);
+
+      // do validation
+      // Find atleast one derived cube which contains all the dimensions
+      // queried.
+      boolean derivedCubeFound = false;
+      for (DerivedCube dcube : dcubes) {
+        if (dcube.getDimAttributeNames().containsAll(chainedSrcColumns)
+          && dcube.getDimAttributeNames().containsAll(queriedDimAttrs)) {
+          // remove all the measures that are covered
+          queriedMsrs.removeAll(dcube.getMeasureNames());
+          derivedCubeFound = true;
+        }
+      }
+
+      if (!derivedCubeFound && !nonQueryableFields.isEmpty()) {
+        throw new SemanticException(ErrorMsg.FIELDS_NOT_QUERYABLE, nonQueryableFields.toString());
+      }
+
+      if (!queriedMsrs.isEmpty()) {
+        // Add appropriate message to know which fields are not queryable together
+        if (!nonQueryableFields.isEmpty()) {
+          throw new SemanticException(ErrorMsg.FIELDS_NOT_QUERYABLE, nonQueryableFields.toString() + " and "
+            + queriedMsrs.toString());
+        } else {
+          throw new SemanticException(ErrorMsg.FIELDS_NOT_QUERYABLE, queriedMsrs.toString());
+        }
+      }
+    }
+  }
+
+  // Traverse parse tree to figure out dimension attributes of the cubes and join chains
+  // present in the AST.
+  private void findDimAttrsAndChainSourceColumns(final CubeQueryContext cubeql,
+                                                 final ASTNode tree,
+                                                 final Set<String> dimAttributes,
+                                                 final Set<String> chainSourceColumns,
+                                                 final Set<String> nonQueryableColumns) throws SemanticException {
+    if (tree == null || !cubeql.hasCubeInQuery()) {
+      return;
+    }
+
+    final CubeInterface cube = cubeql.getCube();
+
+    HQLParser.bft(tree, new HQLParser.ASTNodeVisitor() {
+      @Override
+      public void visit(HQLParser.TreeNode treeNode) throws SemanticException {
+        ASTNode astNode = treeNode.getNode();
+        if (astNode.getToken().getType() == HiveParser.DOT) {
+          // At this point alias replacer has run, so all columns are of the type table.column name
+          ASTNode aliasNode = HQLParser.findNodeByPath((ASTNode) astNode.getChild(0), HiveParser.Identifier);
+          String tabName = aliasNode.getText().toLowerCase().trim();
+          ASTNode colNode = (ASTNode) astNode.getChild(1);
+          String colName = colNode.getText().toLowerCase().trim();
+
+          // Check if table is a join chain
+          if (cubeql.getJoinchains().containsKey(tabName)) {
+            // this 'tabName' is a join chain, so add all source columns
+            chainSourceColumns.addAll(cubeql.getJoinchains().get(tabName).getSourceColumns());
+            nonQueryableColumns.add(tabName + "." + colName);
+          } else if (tabName.equalsIgnoreCase(cubeql.getAliasForTabName(cube.getName()))
+            && cube.getDimAttributeNames().contains(colName)) {
+            // Alternatively, check if this is a dimension attribute, if yes add it to the dim attribute set
+            // and non queryable fields set
+            nonQueryableColumns.add(colName);
+
+            // If this is a referenced dim attribute leading to a chain, then instead of adding this
+            // column, we add the source columns of the chain.
+            if (cube.getDimAttributeByName(colName) instanceof ReferencedDimAtrribute
+              && ((ReferencedDimAtrribute) cube.getDimAttributeByName(colName)).isChainedColumn()) {
+              ReferencedDimAtrribute rdim = (ReferencedDimAtrribute) cube.getDimAttributeByName(colName);
+              chainSourceColumns.addAll(cube.getChainByName(rdim.getChainName()).getSourceColumns());
+            } else {
+              // This is a dim attribute, needs to be validated
+              dimAttributes.add(colName);
+            }
+          }
+        }
+      }
+    });
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/019eb949/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index 632829f..69451ad 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -58,10 +58,29 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     Assert.assertEquals(e.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.FIELDS_NOT_QUERYABLE.getErrorCode());
     Assert.assertTrue(e.getMessage().contains("dim2") && e.getMessage().contains("msr1"));
 
+    // Query with only measure should pass, since dim is not in where or group by
+    String hql = rewrite("select SUM(msr1), "
+      + "SUM(CASE WHEN cityState.name ='foo' THEN msr2"
+      + " WHEN dim2 = 'bar' THEN msr1 ELSE msr2 END) "
+      + "from basecube where " + TWO_DAYS_RANGE, conf);
+    Assert.assertNotNull(hql);
+
+    // This query should fail because chain ref in where clause
+    e = getSemanticExceptionInRewrite("select SUM(msr1), "
+      + "SUM(case WHEN cityState.capital ='foo' THEN msr2 ELSE msr1 END) "
+      + "from basecube where " + TWO_DAYS_RANGE + " AND cityState.name='foo'", conf);
+    Assert.assertEquals(e.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.FIELDS_NOT_QUERYABLE.getErrorCode());
+    // Error message should contain chain_name.col_name and it should not contain dim attributes in select clause
+    // it should also contain the measure name
+    Assert.assertTrue(e.getMessage().contains("citystate.name")
+      && e.getMessage().contains("msr1")
+      && !e.getMessage().contains("capital"), e.getMessage());
+
+
     e = getSemanticExceptionInRewrite("select cityStateCapital, SUM(msr1) from basecube" + " where " + TWO_DAYS_RANGE,
       conf);
     Assert.assertEquals(e.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.FIELDS_NOT_QUERYABLE.getErrorCode());
-    Assert.assertTrue(e.getMessage().contains("citystatecapital") && e.getMessage().contains("msr1"));
+    Assert.assertTrue(e.getMessage().contains("citystatecapital") && e.getMessage().contains("msr1"), e.getMessage());
 
     e = getSemanticExceptionInRewrite("select cityState.name, SUM(msr1) from basecube" + " where " + TWO_DAYS_RANGE,
       conf);
@@ -87,7 +106,6 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     e = getSemanticExceptionInRewrite("select msr11 + msr2 from basecube" + " where " + TWO_DAYS_RANGE, conf);
     Assert.assertEquals(e.getCanonicalErrorMsg().getErrorCode(),
       ErrorMsg.EXPRESSION_NOT_IN_ANY_FACT.getErrorCode());
-
     // no fact has the all the dimensions queried
     e = getSemanticExceptionInRewrite("select dim1, stateid, msr3, msr13 from basecube" + " where " + TWO_DAYS_RANGE,
       conf);
@@ -111,6 +129,9 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
         }
       }
     );
+
+
+
   }
 
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/019eb949/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
index aec0828..5cf76ca 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
@@ -50,19 +50,20 @@ public class TestQueryMetrics extends TestQueryRewrite {
       "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.AliasReplacer-ITER-2",
       "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.DenormalizationResolver-ITER-3",
       "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.CandidateTableResolver-ITER-4",
-      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.JoinResolver-ITER-5",
-      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.TimerangeResolver-ITER-6",
-      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.CandidateTableResolver-ITER-7",
-      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.AggregateResolver-ITER-8",
-      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.GroupbyResolver-ITER-9",
-      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-10",
+      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.AggregateResolver-ITER-5",
+      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.GroupbyResolver-ITER-6",
+      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.FieldValidator-ITER-7",
+      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.JoinResolver-ITER-8",
+      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.TimerangeResolver-ITER-9",
+      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.CandidateTableResolver-ITER-10",
       "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-11",
-      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LightestFactResolver-ITER-12",
-      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-13",
-      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.DenormalizationResolver-ITER-14",
-      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LeastPartitionResolver-ITER-15",
-      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LightestFactResolver-ITER-16",
-      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LightestDimensionResolver-ITER-17"
+      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-12",
+      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LightestFactResolver-ITER-13",
+      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-14",
+      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.DenormalizationResolver-ITER-15",
+      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LeastPartitionResolver-ITER-16",
+      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LightestFactResolver-ITER-17",
+      "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LightestDimensionResolver-ITER-18"
     )));
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/019eb949/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
index c4449da..6a37756 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
@@ -89,7 +89,6 @@ public abstract class TestQueryRewrite {
       // unreachable
       return null;
     } catch (SemanticException e) {
-      e.printStackTrace();
       return e;
     }
   }


[11/50] [abbrv] incubator-lens git commit: LENS-449 : Fix JoinResolver skipping some tables while pruning (amareshwari)

Posted by am...@apache.org.
LENS-449 : Fix JoinResolver skipping some tables while pruning (amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/c6c593c6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/c6c593c6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/c6c593c6

Branch: refs/heads/current-release-line
Commit: c6c593c669a2ae2364371f8fee45c39c66fd2871
Parents: e5bc9e9
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Fri Mar 27 13:00:02 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Fri Mar 27 13:00:02 2015 +0530

----------------------------------------------------------------------
 .../apache/lens/cube/parse/JoinResolver.java    |  1 -
 .../apache/lens/cube/parse/CubeTestSetup.java   |  8 ++++++++
 .../lens/cube/parse/TestJoinResolver.java       | 21 ++++++++++++++++++++
 3 files changed, 29 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/c6c593c6/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index 7feb31b..32a4b85 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -879,7 +879,6 @@ class JoinResolver implements ContextRewriter {
               + " not part of any join paths. Join columns:" + minCostClause.chainColumns.get(dim));
             cubeql.addDimPruningMsgs(dim, cdim.dimtable,
               CandidateTablePruneCause.noColumnPartOfAJoinPath(minCostClause.chainColumns.get(dim)));
-            break;
           }
         }
         if (cubeql.getCandidateDimTables().get(dim).size() == 0) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/c6c593c6/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index c9ac651..7d08212 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -1364,6 +1364,14 @@ public class CubeTestSetup {
     dimColumns.add(new FieldSchema("id", "int", "code"));
 
     client.createCubeDimensionTable(cityDim.getName(), dimName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
+
+    dimName = "citytable4";
+
+    dimColumns = new ArrayList<FieldSchema>();
+    dimColumns.add(new FieldSchema("id", "int", "code"));
+
+    client.createCubeDimensionTable(cityDim.getName(), dimName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
+
   }
 
   private void createTestDim2(CubeMetastoreClient client) throws HiveException {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/c6c593c6/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
index df594bd..0e5978e 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
@@ -27,7 +27,9 @@ import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.SchemaGraph.TableRelationship;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ParseException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 
@@ -705,4 +707,23 @@ public class TestJoinResolver extends TestQueryRewrite {
       getWhereForDailyAndHourly2days("testcube", "c1_summary1"));
     TestCubeRewriter.compareQueries(expected, hqlQuery);
   }
+
+  @Test
+  public void testChainsWithMultipleStorage() throws ParseException, HiveException {
+    Configuration conf = new Configuration(hconf);
+    conf.unset(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES); // supports all storages
+    String dimOnlyQuery = "select testDim2.name, testDim2.cityStateCapital FROM testDim2 where " + TWO_DAYS_RANGE;
+    CubeQueryRewriter driver = new CubeQueryRewriter(conf, hconf);
+    CubeQueryContext rewrittenQuery = driver.rewrite(dimOnlyQuery);
+    rewrittenQuery.toHQL();
+    Dimension citydim = CubeMetastoreClient.getInstance(hconf).getDimension("citydim");
+    Set<String> cdimTables = new HashSet<String>();
+    for (CandidateDim cdim : rewrittenQuery.getCandidateDims().get(citydim)) {
+      cdimTables.add(cdim.getName());
+    }
+    Assert.assertTrue(cdimTables.contains("citytable"));
+    Assert.assertTrue(cdimTables.contains("citytable2"));
+    Assert.assertFalse(cdimTables.contains("citytable3"));
+    Assert.assertFalse(cdimTables.contains("citytable4"));
+  }
 }


[31/50] [abbrv] incubator-lens git commit: LENS-472 : Fix adding single partition cli command (Rajat Khandelwal via amareshwari)

Posted by am...@apache.org.
LENS-472 : Fix adding single partition cli command (Rajat Khandelwal via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/d597314c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/d597314c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/d597314c

Branch: refs/heads/current-release-line
Commit: d597314cb2fa8a26c64e050b80978c6024ae6eb7
Parents: 5e492d9
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Tue Apr 7 17:42:51 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Apr 7 17:42:51 2015 +0530

----------------------------------------------------------------------
 .../commands/LensDimensionTableCommands.java    |   6 +-
 .../lens/cli/commands/LensFactCommands.java     |   8 +-
 .../apache/lens/cli/TestLensFactCommands.java   |  16 ++
 .../apache/lens/client/LensMetadataClient.java  | 256 ++++++++++---------
 4 files changed, 165 insertions(+), 121 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/d597314c/lens-cli/src/main/java/org/apache/lens/cli/commands/LensDimensionTableCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensDimensionTableCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensDimensionTableCommands.java
index 5a3fef3..c61be09 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensDimensionTableCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensDimensionTableCommands.java
@@ -333,7 +333,7 @@ public class LensDimensionTableCommands extends BaseLensCommand implements Comma
    * @param specPair the spec pair
    * @return the string
    */
-  @CliCommand(value = "dimtable add partition", help = "add a partition to dim table")
+  @CliCommand(value = "dimtable add single-partition", help = "add a partition to dim table")
   public String addPartitionToDimTable(
     @CliOption(key = {"", "table"}, mandatory = true, help = "<dimension-table-name> <storage-name>"
       + " <path to partition specification>") String specPair) {
@@ -342,7 +342,7 @@ public class LensDimensionTableCommands extends BaseLensCommand implements Comma
     APIResult result;
     if (pair.length != 3) {
       return "Syntax error, please try in following "
-        + "format. dimtable add partition <table> <storage> <partition spec>";
+        + "format. dimtable add single-partition <table> <storage> <partition spec>";
     }
 
     File f = new File(pair[2]);
@@ -373,7 +373,7 @@ public class LensDimensionTableCommands extends BaseLensCommand implements Comma
     APIResult result;
     if (pair.length != 3) {
       return "Syntax error, please try in following "
-        + "format. dimtable add partition <table> <storage> <partition spec>";
+        + "format. dimtable add partitions <table> <storage> <partition spec>";
     }
 
     File f = new File(pair[2]);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/d597314c/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java
index 47686e9..a69d361 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java
@@ -311,7 +311,7 @@ public class LensFactCommands extends BaseLensCommand implements CommandMarker {
    * @param specPair the spec pair
    * @return the string
    */
-  @CliCommand(value = "fact add partition", help = "add a partition to fact table")
+  @CliCommand(value = "fact add single-partition", help = "add a partition to fact table")
   public String addPartitionToFact(
     @CliOption(key = {"", "table"}, mandatory = true, help
       = "<table> <storage> <path to partition spec>") String specPair) {
@@ -319,7 +319,8 @@ public class LensFactCommands extends BaseLensCommand implements CommandMarker {
     String[] pair = Iterables.toArray(parts, String.class);
     APIResult result;
     if (pair.length != 3) {
-      return "Syntax error, please try in following " + "format. fact add partition <table> <storage> <partition spec>";
+      return "Syntax error, please try in following " + "format. fact add single-partition "
+        + "<table> <storage> <partition spec>";
     }
 
     File f = new File(pair[2]);
@@ -349,7 +350,8 @@ public class LensFactCommands extends BaseLensCommand implements CommandMarker {
     String[] pair = Iterables.toArray(parts, String.class);
     APIResult result;
     if (pair.length != 3) {
-      return "Syntax error, please try in following " + "format. fact add partition <table> <storage> <partition spec>";
+      return "Syntax error, please try in following "
+        + "format. fact add partitions <table> <storage> <partitions spec>";
     }
 
     File f = new File(pair[2]);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/d597314c/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
index f225155..f056bb7 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
@@ -199,6 +199,22 @@ public class TestLensFactCommands extends LensCliApplicationTest {
       Assert.fail("Unable to locate the storage part file for adding new storage to fact table fact1");
     }
     verifyAndDeletePartitions();
+
+    // Wrong files:
+    try {
+      command.addPartitionToFact("fact1 " + FACT_LOCAL + " " + new File(
+        TestLensFactCommands.class.getClassLoader().getResource("fact1-local-parts.xml").toURI()).getAbsolutePath());
+      Assert.fail("Should fail");
+    } catch (Throwable t) {
+      // pass
+    }
+    try {
+      command.addPartitionsToFact("fact1 " + FACT_LOCAL + " " + new File(
+        TestLensFactCommands.class.getClassLoader().getResource("fact1-local-part.xml").toURI()).getAbsolutePath());
+      Assert.fail("Should fail");
+    } catch (Throwable t) {
+      // pass
+    }
   }
 
   private void verifyAndDeletePartitions() {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/d597314c/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java b/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
index 3462dc3..ad79cf2 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
@@ -186,23 +186,22 @@ public class LensMetadataClient {
     return result;
   }
 
-  private Object readFromXML(String filename) throws JAXBException, IOException {
+  private <T> T readFromXML(String filename) throws JAXBException, IOException {
     if (filename.startsWith("/")) {
-      return ((JAXBElement) JAXB_UNMARSHALLER.unmarshal(new File(filename))).getValue();
+      return ((JAXBElement<T>) JAXB_UNMARSHALLER.unmarshal(new File(filename))).getValue();
     } else {
       // load from classpath
       InputStream file = Thread.currentThread().getContextClassLoader().getResourceAsStream(filename);
       if (file == null) {
         throw new IOException("File not found:" + filename);
       }
-      return ((JAXBElement) JAXB_UNMARSHALLER.unmarshal(file)).getValue();
+      return ((JAXBElement<T>) JAXB_UNMARSHALLER.unmarshal(file)).getValue();
     }
   }
 
   public APIResult createCube(String cubeSpec) {
-    XCube cube;
     try {
-      cube = (XCube) readFromXML(cubeSpec);
+      return createCube(this.<XCube>readFromXML(cubeSpec));
     } catch (JAXBException e) {
       LOG.info("Unmarshalling error:", e);
       return new APIResult(Status.FAILED, "Unmarshalling failed");
@@ -210,7 +209,6 @@ public class LensMetadataClient {
       LOG.info("File error:", e);
       return new APIResult(Status.FAILED, "File not found");
     }
-    return createCube(cube);
   }
 
   public APIResult updateCube(String cubeName, XCube cube) {
@@ -223,9 +221,8 @@ public class LensMetadataClient {
   }
 
   public APIResult updateCube(String cubeName, String cubeSpec) {
-    XCube cube;
     try {
-      cube = (XCube) readFromXML(cubeSpec);
+      return updateCube(cubeName, this.<XCube>readFromXML(cubeSpec));
     } catch (JAXBException e) {
       LOG.info("Unmarshalling error:", e);
       return new APIResult(Status.FAILED, "Unmarshalling failed");
@@ -233,7 +230,6 @@ public class LensMetadataClient {
       LOG.info("File error:", e);
       return new APIResult(Status.FAILED, "File not found");
     }
-    return updateCube(cubeName, cube);
   }
 
   public XCube getCube(String cubeName) {
@@ -279,12 +275,15 @@ public class LensMetadataClient {
   }
 
   public APIResult createDimension(String dimSpec) {
-    WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimensions")
-      .queryParam("sessionid", this.connection.getSessionHandle())
-      .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(getContent(dimSpec)), APIResult.class);
-    return result;
+    try {
+      return createDimension(this.<XDimension>readFromXML(dimSpec));
+    } catch (JAXBException e) {
+      LOG.info("Unmarshalling error:", e);
+      return new APIResult(Status.FAILED, "Unmarshalling failed");
+    } catch (IOException e) {
+      LOG.info("File error:", e);
+      return new APIResult(Status.FAILED, "File not found");
+    }
   }
 
   public APIResult updateDimension(String dimName, XDimension dimension) {
@@ -297,12 +296,15 @@ public class LensMetadataClient {
   }
 
   public APIResult updateDimension(String dimName, String dimSpec) {
-    WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimensions").path(dimName)
-      .queryParam("sessionid", this.connection.getSessionHandle())
-      .request(MediaType.APPLICATION_XML)
-      .put(Entity.xml(getContent(dimSpec)), APIResult.class);
-    return result;
+    try {
+      return updateDimension(dimName, this.<XDimension>readFromXML(dimSpec));
+    } catch (JAXBException e) {
+      LOG.info("Unmarshalling error:", e);
+      return new APIResult(Status.FAILED, "Unmarshalling failed");
+    } catch (IOException e) {
+      LOG.info("File error:", e);
+      return new APIResult(Status.FAILED, "File not found");
+    }
   }
 
   public XDimension getDimension(String dimName) {
@@ -342,12 +344,15 @@ public class LensMetadataClient {
 
 
   public APIResult createNewStorage(String storage) {
-    WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("storages")
-      .queryParam("sessionid", this.connection.getSessionHandle())
-      .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(getContent(storage)), APIResult.class);
-    return result;
+    try {
+      return createNewStorage(this.<XStorage>readFromXML(storage));
+    } catch (JAXBException e) {
+      LOG.info("Unmarshalling error:", e);
+      return new APIResult(Status.FAILED, "Unmarshalling failed");
+    } catch (IOException e) {
+      LOG.info("File error:", e);
+      return new APIResult(Status.FAILED, "File not found");
+    }
   }
 
   public APIResult dropAllStorages() {
@@ -369,12 +374,15 @@ public class LensMetadataClient {
   }
 
   public APIResult updateStorage(String storageName, String storage) {
-    WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("storages").path(storageName)
-      .queryParam("sessionid", this.connection.getSessionHandle())
-      .request(MediaType.APPLICATION_XML)
-      .put(Entity.xml(getContent(storage)), APIResult.class);
-    return result;
+    try {
+      return updateStorage(storageName, this.<XStorage>readFromXML(storage));
+    } catch (JAXBException e) {
+      LOG.info("Unmarshalling error:", e);
+      return new APIResult(Status.FAILED, "Unmarshalling failed");
+    } catch (IOException e) {
+      LOG.info("File error:", e);
+      return new APIResult(Status.FAILED, "File not found");
+    }
   }
 
   public XStorage getStorage(String storageName) {
@@ -452,18 +460,15 @@ public class LensMetadataClient {
   }
 
   public APIResult createFactTable(String factSpec) {
-    WebTarget target = getMetastoreWebTarget();
-    FormDataMultiPart mp = new FormDataMultiPart();
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid")
-      .build(), this.connection.getSessionHandle(), MediaType.APPLICATION_XML_TYPE));
-    mp.bodyPart(new FormDataBodyPart(
-      FormDataContentDisposition.name("fact").fileName("fact").build(),
-      getContent(factSpec), MediaType.APPLICATION_XML_TYPE));
-    APIResult result = target.path("facts")
-      .request(MediaType.APPLICATION_XML_TYPE)
-      .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
-        APIResult.class);
-    return result;
+    try {
+      return createFactTable(this.<XFactTable>readFromXML(factSpec));
+    } catch (JAXBException e) {
+      LOG.info("Unmarshalling error:", e);
+      return new APIResult(Status.FAILED, "Unmarshalling failed");
+    } catch (IOException e) {
+      LOG.info("File error:", e);
+      return new APIResult(Status.FAILED, "File not found");
+    }
   }
 
   private String getContent(String path) {
@@ -499,12 +504,15 @@ public class LensMetadataClient {
   }
 
   public APIResult updateFactTable(String factName, String table) {
-    WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(factName)
-      .queryParam("sessionid", this.connection.getSessionHandle())
-      .request(MediaType.APPLICATION_XML_TYPE)
-      .put(Entity.xml(getContent(table)), APIResult.class);
-    return result;
+    try {
+      return updateFactTable(factName, this.<XFactTable>readFromXML(table));
+    } catch (JAXBException e) {
+      LOG.info("Unmarshalling error:", e);
+      return new APIResult(Status.FAILED, "Unmarshalling failed");
+    } catch (IOException e) {
+      LOG.info("File error:", e);
+      return new APIResult(Status.FAILED, "File not found");
+    }
   }
 
   public APIResult dropFactTable(String factName, boolean cascade) {
@@ -548,13 +556,16 @@ public class LensMetadataClient {
     return result;
   }
 
-  public APIResult addStorageToFactTable(String factname, String storage) {
-    WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(factname).path("storages")
-      .queryParam("sessionid", this.connection.getSessionHandle())
-      .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(getContent(storage)), APIResult.class);
-    return result;
+  public APIResult addStorageToFactTable(String factname, String storageSpec) {
+    try {
+      return addStorageToFactTable(factname, this.<XStorageTableElement>readFromXML(storageSpec));
+    } catch (JAXBException e) {
+      LOG.info("Unmarshalling error:", e);
+      return new APIResult(Status.FAILED, "Unmarshalling failed");
+    } catch (IOException e) {
+      LOG.info("File error:", e);
+      return new APIResult(Status.FAILED, "File not found");
+    }
   }
 
   public APIResult dropStorageFromFactTable(String factName, String storageName) {
@@ -649,19 +660,15 @@ public class LensMetadataClient {
   }
 
   public APIResult createDimensionTable(String tableXml) {
-    WebTarget target = getMetastoreWebTarget();
-
-    FormDataMultiPart mp = new FormDataMultiPart();
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(),
-      this.connection.getSessionHandle(), MediaType.APPLICATION_XML_TYPE));
-    mp.bodyPart(new FormDataBodyPart(
-      FormDataContentDisposition.name("dimensionTable").fileName("dimtable").build(),
-      getContent(tableXml), MediaType.APPLICATION_XML_TYPE));
-
-    APIResult result = target.path("dimtables")
-      .request(MediaType.APPLICATION_XML)
-      .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
-    return result;
+    try {
+      return createDimensionTable(this.<XDimensionTable>readFromXML(tableXml));
+    } catch (JAXBException e) {
+      LOG.info("Unmarshalling error:", e);
+      return new APIResult(Status.FAILED, "Unmarshalling failed");
+    } catch (IOException e) {
+      LOG.info("File error:", e);
+      return new APIResult(Status.FAILED, "File not found");
+    }
   }
 
 
@@ -676,12 +683,18 @@ public class LensMetadataClient {
   }
 
   public APIResult updateDimensionTable(String dimTblName, String dimSpec) {
-    WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(dimTblName)
-      .queryParam("sessionid", this.connection.getSessionHandle())
-      .request(MediaType.APPLICATION_XML)
-      .put(Entity.xml(getContent(dimSpec)), APIResult.class);
-    return result;
+    try {
+      XDimensionTable dimensionTable = readFromXML(dimSpec);
+      dimensionTable.setTableName(dimTblName);
+      return updateDimensionTable(dimensionTable);
+    } catch (JAXBException e) {
+      LOG.info("Unmarshalling error:", e);
+      return new APIResult(Status.FAILED, "Unmarshalling failed");
+    } catch (IOException e) {
+      LOG.info("File error:", e);
+      return new APIResult(Status.FAILED, "File not found");
+    }
+
   }
 
   public APIResult dropDimensionTable(String table, boolean cascade) {
@@ -728,12 +741,15 @@ public class LensMetadataClient {
   }
 
   public APIResult addStorageToDimTable(String dimTblName, String table) {
-    WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(dimTblName).path("storages")
-      .queryParam("sessionid", this.connection.getSessionHandle())
-      .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(getContent(table)), APIResult.class);
-    return result;
+    try {
+      return addStorageToDimTable(dimTblName, this.<XStorageTableElement>readFromXML(table));
+    } catch (JAXBException e) {
+      LOG.info("Unmarshalling error:", e);
+      return new APIResult(Status.FAILED, "Unmarshalling failed");
+    } catch (IOException e) {
+      LOG.info("File error:", e);
+      return new APIResult(Status.FAILED, "File not found");
+    }
   }
 
   public XStorageTableElement getStorageOfDimensionTable(String dimTblName, String storage) {
@@ -825,15 +841,18 @@ public class LensMetadataClient {
   }
 
   public APIResult addPartitionToDimensionTable(String dimTblName, String storage,
-    String partition) {
-    WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(dimTblName)
-      .path("storages").path(storage).path("partition")
-      .queryParam("sessionid", this.connection.getSessionHandle())
-      .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(getContent(partition)), APIResult.class);
-    return result;
+    String partitionSpec) {
+    try {
+      return addPartitionToDimensionTable(dimTblName, storage, (XPartition) readFromXML(partitionSpec));
+    } catch (JAXBException e) {
+      LOG.info("Unmarshalling error:", e);
+      return new APIResult(Status.FAILED, "Unmarshalling failed");
+    } catch (IOException e) {
+      LOG.info("File error:", e);
+      return new APIResult(Status.FAILED, "File not found");
+    }
   }
+
   public APIResult addPartitionsToDimensionTable(String dimTblName, String storage,
     XPartitionList partitions) {
     WebTarget target = getMetastoreWebTarget();
@@ -846,14 +865,16 @@ public class LensMetadataClient {
   }
 
   public APIResult addPartitionsToDimensionTable(String dimTblName, String storage,
-    String partitions) {
-    WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(dimTblName)
-      .path("storages").path(storage).path("partitions")
-      .queryParam("sessionid", this.connection.getSessionHandle())
-      .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(getContent(partitions)), APIResult.class);
-    return result;
+    String partitionsSpec) {
+    try {
+      return addPartitionsToDimensionTable(dimTblName, storage, (XPartitionList) readFromXML(partitionsSpec));
+    } catch (JAXBException e) {
+      LOG.info("Unmarshalling error:", e);
+      return new APIResult(Status.FAILED, "Unmarshalling failed");
+    } catch (IOException e) {
+      LOG.info("File error:", e);
+      return new APIResult(Status.FAILED, "File not found");
+    }
   }
 
   public APIResult addPartitionToFactTable(String fact, String storage,
@@ -868,15 +889,18 @@ public class LensMetadataClient {
   }
 
   public APIResult addPartitionToFactTable(String fact, String storage,
-    String partition) {
-    WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(fact)
-      .path("storages").path(storage).path("partition")
-      .queryParam("sessionid", this.connection.getSessionHandle())
-      .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(getContent(partition)), APIResult.class);
-    return result;
+    String partitionSpec) {
+    try {
+      return addPartitionToFactTable(fact, storage, (XPartition) readFromXML(partitionSpec));
+    } catch (JAXBException e) {
+      LOG.info("Unmarshalling error:", e);
+      return new APIResult(Status.FAILED, "Unmarshalling failed");
+    } catch (IOException e) {
+      LOG.info("File error:", e);
+      return new APIResult(Status.FAILED, "File not found");
+    }
   }
+
   public APIResult addPartitionsToFactTable(String fact, String storage,
     XPartitionList partitions) {
     WebTarget target = getMetastoreWebTarget();
@@ -889,13 +913,15 @@ public class LensMetadataClient {
   }
 
   public APIResult addPartitionsToFactTable(String fact, String storage,
-    String partitions) {
-    WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(fact)
-      .path("storages").path(storage).path("partitions")
-      .queryParam("sessionid", this.connection.getSessionHandle())
-      .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(getContent(partitions)), APIResult.class);
-    return result;
+    String partitionsSpec) {
+    try {
+      return addPartitionsToFactTable(fact, storage, (XPartitionList) readFromXML(partitionsSpec));
+    } catch (JAXBException e) {
+      LOG.info("Unmarshalling error:", e);
+      return new APIResult(Status.FAILED, "Unmarshalling failed");
+    } catch (IOException e) {
+      LOG.info("File error:", e);
+      return new APIResult(Status.FAILED, "File not found");
+    }
   }
 }


[17/50] [abbrv] incubator-lens git commit: LENS-437 : Remove counts and weights from QueryPlan (amareshwari)

Posted by am...@apache.org.
LENS-437 : Remove counts and weights from QueryPlan (amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/e3e45aef
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/e3e45aef
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/e3e45aef

Branch: refs/heads/current-release-line
Commit: e3e45aef2b596ad18c1579256ad3e6884b6e93ad
Parents: f57a9a1
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Tue Mar 31 10:07:31 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Mar 31 10:07:31 2015 +0530

----------------------------------------------------------------------
 .../org/apache/lens/api/query/QueryPlan.java    | 136 --------
 .../apache/lens/driver/cube/RewriterPlan.java   |  31 --
 .../lens/cube/parse/TestRewriterPlan.java       |  21 --
 .../apache/lens/driver/hive/HiveQueryPlan.java  |  22 +-
 .../apache/lens/driver/hive/TestHiveDriver.java |   2 -
 .../lens/server/api/driver/DriverQueryPlan.java | 328 +------------------
 .../lens/server/query/TestQueryService.java     |   3 -
 7 files changed, 4 insertions(+), 539 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e3e45aef/lens-api/src/main/java/org/apache/lens/api/query/QueryPlan.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/QueryPlan.java b/lens-api/src/main/java/org/apache/lens/api/query/QueryPlan.java
index a578759..a836b1e 100644
--- a/lens-api/src/main/java/org/apache/lens/api/query/QueryPlan.java
+++ b/lens-api/src/main/java/org/apache/lens/api/query/QueryPlan.java
@@ -24,7 +24,6 @@ package org.apache.lens.api.query;
 import java.io.UnsupportedEncodingException;
 import java.net.URLDecoder;
 import java.util.List;
-import java.util.Map;
 
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlElementWrapper;
@@ -39,22 +38,6 @@ import lombok.*;
 /**
  * Instantiates a new query plan.
  *
- * @param numJoins
- *          the num joins
- * @param numGbys
- *          the num gbys
- * @param numSels
- *          the num sels
- * @param numSelDi
- *          the num sel di
- * @param numHaving
- *          the num having
- * @param numObys
- *          the num obys
- * @param numAggrExprs
- *          the num aggr exprs
- * @param numFilters
- *          the num filters
  * @param tablesQueried
  *          the tables queried
  * @param hasSubQuery
@@ -63,20 +46,6 @@ import lombok.*;
  *          the exec mode
  * @param scanMode
  *          the scan mode
- * @param tableWeights
- *          the table weights
- * @param joinWeight
- *          the join weight
- * @param gbyWeight
- *          the gby weight
- * @param filterWeight
- *          the filter weight
- * @param havingWeight
- *          the having weight
- * @param obyWeight
- *          the oby weight
- * @param selectWeight
- *          the select weight
  * @param prepareHandle
  *          the prepare handle
  * @param planString
@@ -96,62 +65,6 @@ import lombok.*;
 public class QueryPlan extends QuerySubmitResult {
 
   /**
-   * The num joins.
-   */
-  @XmlElement
-  @Getter
-  private int numJoins = 0;
-
-  /**
-   * The num gbys.
-   */
-  @XmlElement
-  @Getter
-  private int numGbys = 0;
-
-  /**
-   * The num sels.
-   */
-  @XmlElement
-  @Getter
-  private int numSels = 0;
-
-  /**
-   * The num sel di.
-   */
-  @XmlElement
-  @Getter
-  private int numSelDi = 0;
-
-  /**
-   * The num having.
-   */
-  @XmlElement
-  @Getter
-  private int numHaving = 0;
-
-  /**
-   * The num obys.
-   */
-  @XmlElement
-  @Getter
-  private int numObys = 0;
-
-  /**
-   * The num aggr exprs.
-   */
-  @XmlElement
-  @Getter
-  private int numAggrExprs = 0;
-
-  /**
-   * The num filters.
-   */
-  @XmlElement
-  @Getter
-  private int numFilters = 0;
-
-  /**
    * The tables queried.
    */
   @XmlElementWrapper
@@ -180,55 +93,6 @@ public class QueryPlan extends QuerySubmitResult {
   private String scanMode;
 
   /**
-   * The table weights.
-   */
-  @XmlElementWrapper
-  @Getter
-  private Map<String, Double> tableWeights;
-
-  /**
-   * The join weight.
-   */
-  @XmlElement
-  @Getter
-  private Double joinWeight;
-
-  /**
-   * The gby weight.
-   */
-  @XmlElement
-  @Getter
-  private Double gbyWeight;
-
-  /**
-   * The filter weight.
-   */
-  @XmlElement
-  @Getter
-  private Double filterWeight;
-
-  /**
-   * The having weight.
-   */
-  @XmlElement
-  @Getter
-  private Double havingWeight;
-
-  /**
-   * The oby weight.
-   */
-  @XmlElement
-  @Getter
-  private Double obyWeight;
-
-  /**
-   * The select weight.
-   */
-  @XmlElement
-  @Getter
-  private Double selectWeight;
-
-  /**
    * The prepare handle.
    */
   @Getter

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e3e45aef/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java b/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
index b05814e..841946d 100644
--- a/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
+++ b/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
@@ -46,27 +46,9 @@ public final class RewriterPlan extends DriverQueryPlan {
 
   @SuppressWarnings("unchecked") // required for (Set<FactPartition>) casting
   void extractPlan(Collection<CubeQueryContext> cubeQueries) {
-    int selectCount = 0;
-    int havingCount = 0;
-    int joinCount = 0;
-    int groupbyCount = 0;
-    int orderbyCount = 0;
 
     for (CubeQueryContext ctx : cubeQueries) {
-      if (ctx.getOrderByAST() != null) {
-        orderbyCount += ctx.getOrderByAST().getChildCount();
-      }
-      if (ctx.getGroupByAST() != null) {
-        groupbyCount += ctx.getGroupByAST().getChildCount();
-      }
-      if (ctx.getHavingAST() != null) {
-        havingCount += ctx.getHavingAST().getChildCount();
-      }
-      if (ctx.getSelectAST() != null) {
-        selectCount += ctx.getSelectAST().getChildCount();
-      }
       if (ctx.getPickedDimTables() != null && !ctx.getPickedDimTables().isEmpty()) {
-        joinCount += ctx.getPickedDimTables().size();
         for (CandidateTable dim : ctx.getPickedDimTables()) {
           addTablesQueried(dim.getStorageTables());
           if (partitions.get(dim.getName()) == null || partitions.get(dim.getName()).isEmpty()) {
@@ -85,11 +67,6 @@ public final class RewriterPlan extends DriverQueryPlan {
           }
           factParts.addAll((Set<FactPartition>) fact.getPartsQueried());
         }
-      } else {
-        // if no facts are there, reducing join count by one, as target would be one of the dimtables picked
-        if (joinCount > 0) {
-          joinCount--;
-        }
       }
       for (String table : getTablesQueried()) {
         if (!tableWeights.containsKey(table)) {
@@ -110,14 +87,6 @@ public final class RewriterPlan extends DriverQueryPlan {
       }
     }
     setHasSubQuery(hasSubQuery || cubeQueries.size() > 1);
-    setNumGbys(groupbyCount);
-    setNumJoins(joinCount);
-    setNumOrderBys(orderbyCount);
-    setNumSels(selectCount);
-    setNumHaving(havingCount);
-    setNumAggreagateExprs(-1);
-    setNumSelDistincts(-1);
-    setNumFilters(-1);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e3e45aef/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
index 0738b27..544f4a0 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
@@ -52,13 +52,6 @@ public class TestRewriterPlan extends TestQueryRewrite {
     ctx.toHQL();
     RewriterPlan plan = new RewriterPlan(Collections.singleton(ctx));
     Assert.assertNotNull(plan);
-    Assert.assertEquals(plan.getNumSels(), 1);
-    Assert.assertEquals(plan.getNumGbys(), 0);
-    Assert.assertEquals(plan.getNumJoins(), 0);
-    Assert.assertEquals(plan.getNumHaving(), 0);
-    Assert.assertEquals(plan.getNumFilters(), -1);
-    Assert.assertEquals(plan.getNumOrderBys(), 0);
-    Assert.assertEquals(plan.getNumAggreagateExprs(), -1);
     Assert.assertFalse(plan.getTablesQueried().isEmpty());
     Assert.assertTrue(plan.getTablesQueried().contains("TestQueryRewrite.c2_testfact"));
     Assert.assertEquals(plan.getTableWeights().get("TestQueryRewrite.c2_testfact"), 1.0);
@@ -75,13 +68,6 @@ public class TestRewriterPlan extends TestQueryRewrite {
     ctx.toHQL();
     RewriterPlan plan = new RewriterPlan(Collections.singleton(ctx));
     Assert.assertNotNull(plan);
-    Assert.assertEquals(plan.getNumSels(), 2);
-    Assert.assertEquals(plan.getNumGbys(), 1);
-    Assert.assertEquals(plan.getNumJoins(), 1);
-    Assert.assertEquals(plan.getNumHaving(), 1);
-    Assert.assertEquals(plan.getNumOrderBys(), 1);
-    Assert.assertEquals(plan.getNumFilters(), -1);
-    Assert.assertEquals(plan.getNumAggreagateExprs(), -1);
     Assert.assertFalse(plan.getTablesQueried().isEmpty());
     Assert.assertTrue(plan.getTablesQueried().contains("TestQueryRewrite.c2_testfact"));
     Assert.assertTrue(plan.getTablesQueried().contains("TestQueryRewrite.c1_citytable"));
@@ -104,13 +90,6 @@ public class TestRewriterPlan extends TestQueryRewrite {
     ctx2.toHQL();
     RewriterPlan plan = new RewriterPlan(Arrays.asList(ctx1, ctx2));
     Assert.assertNotNull(plan);
-    Assert.assertEquals(plan.getNumSels(), 3);
-    Assert.assertEquals(plan.getNumGbys(), 1);
-    Assert.assertEquals(plan.getNumJoins(), 1);
-    Assert.assertEquals(plan.getNumHaving(), 1);
-    Assert.assertEquals(plan.getNumOrderBys(), 1);
-    Assert.assertEquals(plan.getNumFilters(), -1);
-    Assert.assertEquals(plan.getNumAggreagateExprs(), -1);
     Assert.assertFalse(plan.getTablesQueried().isEmpty());
     Assert.assertTrue(plan.getTablesQueried().contains("TestQueryRewrite.c2_testfact"));
     Assert.assertTrue(plan.getTablesQueried().contains("TestQueryRewrite.c1_citytable"));

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e3e45aef/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
index fce93a9..9d43fa8 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveQueryPlan.java
@@ -139,26 +139,6 @@ public class HiveQueryPlan extends DriverQueryPlan {
       case TABLE_SCAN:
         // no op
         break;
-      case JOIN:
-        if (tr.equals("condition map:")) {
-          numJoins++;
-        }
-        break;
-      case SELECT:
-        if (tr.startsWith("expressions:") && states.get(states.size() - 1) == ParserState.TABLE_SCAN) {
-          numSels += StringUtils.split(tr, ",").length;
-        }
-        break;
-      case GROUPBY_EXPRS:
-        if (tr.startsWith("aggregations:")) {
-          numAggrExprs += StringUtils.split(tr, ",").length;
-        }
-        break;
-      case GROUPBY_KEYS:
-        if (tr.startsWith("keys:")) {
-          numGbys += StringUtils.split(tr, ",").length;
-        }
-        break;
       case PARTITION:
         String partConditionStr = null;
         for (; i < explainOutput.size(); i++) {
@@ -213,6 +193,8 @@ public class HiveQueryPlan extends DriverQueryPlan {
           }
         }
         break;
+      default :
+        break;
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e3e45aef/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
index 089c496..f02490b 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
@@ -759,7 +759,6 @@ public class TestHiveDriver {
     assertNotNull(plan.getTableWeights());
     assertTrue(plan.getTableWeights().containsKey(dataBase + ".explain_test_1"));
     assertTrue(plan.getTableWeights().containsKey(dataBase + ".explain_test_2"));
-    assertEquals(plan.getNumJoins(), 1);
     assertTrue(plan.getPlan() != null && !plan.getPlan().isEmpty());
     driver.closeQuery(plan.getHandle());
   }
@@ -784,7 +783,6 @@ public class TestHiveDriver {
     assertNotNull(plan2.getTablesQueried());
     assertEquals(plan2.getTablesQueried().size(), 1);
     assertTrue(plan2.getTableWeights().containsKey(dataBase + ".explain_test_1"));
-    assertEquals(plan2.getNumSels(), 1);
     QueryContext ctx = createContext(pctx, conf);
     LensResultSet resultSet = driver.execute(ctx);
     Assert.assertEquals(0, driver.getHiveHandleSize());

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e3e45aef/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryPlan.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryPlan.java b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryPlan.java
index 1d41720..9de22c5 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryPlan.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryPlan.java
@@ -84,46 +84,6 @@ public abstract class DriverQueryPlan {
   }
 
   /**
-   * The num joins.
-   */
-  protected int numJoins = 0;
-
-  /**
-   * The num gbys.
-   */
-  protected int numGbys = 0;
-
-  /**
-   * The num sels.
-   */
-  protected int numSels = 0;
-
-  /**
-   * The num sel di.
-   */
-  protected int numSelDi = 0;
-
-  /**
-   * The num having.
-   */
-  protected int numHaving = 0;
-
-  /**
-   * The num obys.
-   */
-  protected int numObys = 0;
-
-  /**
-   * The num aggr exprs.
-   */
-  protected int numAggrExprs = 0;
-
-  /**
-   * The num filters.
-   */
-  protected int numFilters = 0;
-
-  /**
    * The tables queried.
    */
   protected final Set<String> tablesQueried = new HashSet<String>();
@@ -154,36 +114,6 @@ public abstract class DriverQueryPlan {
   protected final Map<String, Double> tableWeights = new HashMap<String, Double>();
 
   /**
-   * The join weight.
-   */
-  protected Double joinWeight;
-
-  /**
-   * The gby weight.
-   */
-  protected Double gbyWeight;
-
-  /**
-   * The filter weight.
-   */
-  protected Double filterWeight;
-
-  /**
-   * The having weight.
-   */
-  protected Double havingWeight;
-
-  /**
-   * The oby weight.
-   */
-  protected Double obyWeight;
-
-  /**
-   * The select weight.
-   */
-  protected Double selectWeight;
-
-  /**
    * The handle.
    */
   protected QueryPrepareHandle handle;
@@ -205,114 +135,6 @@ public abstract class DriverQueryPlan {
   public abstract QueryCost getCost();
 
   /**
-   * Get the number of group by expressions on query
-   *
-   * @return the numGbys
-   */
-  public int getNumGbys() {
-    return numGbys;
-  }
-
-  /**
-   * Set the number of groupbys
-   *
-   * @param numGbys the numGbys to set
-   */
-  protected void setNumGbys(int numGbys) {
-    this.numGbys = numGbys;
-  }
-
-  /**
-   * Get the number of select expressions
-   *
-   * @return the numSels
-   */
-  public int getNumSels() {
-    return numSels;
-  }
-
-  /**
-   * Set the number of select expressions
-   *
-   * @param numSels the numSels to set
-   */
-  protected void setNumSels(int numSels) {
-    this.numSels = numSels;
-  }
-
-  /**
-   * Get the number distinct select expressions
-   *
-   * @return the numSelDi
-   */
-  public int getNumSelDistincts() {
-    return numSelDi;
-  }
-
-  /**
-   * Set the number of distinct select expressions
-   *
-   * @param numSelDi the numSelDi to set
-   */
-  protected void setNumSelDistincts(int numSelDi) {
-    this.numSelDi = numSelDi;
-  }
-
-  /**
-   * Get number of joins in the query
-   *
-   * @return the numJoins
-   */
-  public int getNumJoins() {
-    return numJoins;
-  }
-
-  /**
-   * Set the number of join expressions on query
-   *
-   * @param numJoins the numJoins to set
-   */
-  protected void setNumJoins(int numJoins) {
-    this.numJoins = numJoins;
-  }
-
-  /**
-   * Get the number of having expressions on query
-   *
-   * @return the numHaving
-   */
-  public int getNumHaving() {
-    return numHaving;
-  }
-
-  /**
-   * Set the number of having expressions on query
-   *
-   * @param numHaving the numHaving to set
-   */
-  protected void setNumHaving(int numHaving) {
-    this.numHaving = numHaving;
-  }
-
-  /**
-   * Get the number of order by expressions on query
-   *
-   * @return the numObys
-   */
-  public int getNumOrderBys() {
-    return numObys;
-  }
-
-  /**
-   * Set the number of order by expressions on query
-   *
-   * @param numObys the numObys to set
-   */
-  protected void setNumOrderBys(int numObys) {
-    this.numObys = numObys;
-  }
-
-  /**
    * Get the list of tables to be queried
    *
    * @return the tablesQueried
@@ -340,24 +162,6 @@ public abstract class DriverQueryPlan {
   }
 
   /**
-   * Get the number of filters in query
-   *
-   * @return the numFilters
-   */
-  public int getNumFilters() {
-    return numFilters;
-  }
-
-  /**
-   * Set the number of filters in query
-   *
-   * @param numFilters the numFilters to set
-   */
-  protected void setNumFilters(int numFilters) {
-    this.numFilters = numFilters;
-  }
-
-  /**
    * Get if the query has a subquery or not.
    *
    * @return the hasSubQuery true if query has subquery, false otherwise
@@ -458,114 +262,6 @@ public abstract class DriverQueryPlan {
   }
 
   /**
-   * Get the weight associated with joins
-   *
-   * @return the joinWeight
-   */
-  public Double getJoinWeight() {
-    return joinWeight;
-  }
-
-  /**
-   * Set the weight associated with joins
-   *
-   * @param joinWeight the joinWeight to set
-   */
-  protected void setJoinWeight(Double joinWeight) {
-    this.joinWeight = joinWeight;
-  }
-
-  /**
-   * Set the weight associated with group by expressions.
-   *
-   * @return the gbyWeight
-   */
-  public Double getGbyWeight() {
-    return gbyWeight;
-  }
-
-  /**
-   * Set the weight associated with group by expressions.
-   *
-   * @param gbyWeight the gbyWeight to set
-   */
-  protected void setGbyWeight(Double gbyWeight) {
-    this.gbyWeight = gbyWeight;
-  }
-
-  /**
-   * Set the weight associated with filter expressions.
-   *
-   * @return the filterWeight
-   */
-  public Double getFilterWeight() {
-    return filterWeight;
-  }
-
-  /**
-   * Set the weight associated with filter expressions.
-   *
-   * @param filterWeight the filterWeight to set
-   */
-  protected void setFilterWeight(Double filterWeight) {
-    this.filterWeight = filterWeight;
-  }
-
-  /**
-   * Get the weight associated with order by expressions.
-   *
-   * @return the obyWeight
-   */
-  public Double getObyWeight() {
-    return obyWeight;
-  }
-
-  /**
-   * Set the weight associated with order by expressions.
-   *
-   * @param obyWeight the obyWeight to set
-   */
-  protected void setObyWeight(Double obyWeight) {
-    this.obyWeight = obyWeight;
-  }
-
-  /**
-   * Set the weight associated with having expressions.
-   *
-   * @return the havingWeight
-   */
-  public Double getHavingWeight() {
-    return havingWeight;
-  }
-
-  /**
-   * Set the weight associated with having expressions.
-   *
-   * @param havingWeight the havingWeight to set
-   */
-  protected void setHavingWeight(Double havingWeight) {
-    this.havingWeight = havingWeight;
-  }
-
-  /**
-   * Get the weight associated with select expressions.
-   *
-   * @return the selectWeight
-   */
-  public Double getSelectWeight() {
-    return selectWeight;
-  }
-
-  /**
-   * Set the weight associated with select expressions.
-   *
-   * @param selectWeight the selectWeight to set
-   */
-  protected void setSelectWeight(Double selectWeight) {
-    this.selectWeight = selectWeight;
-  }
-
-  /**
    * @return the handle
    * @deprecated
    */
@@ -592,24 +288,6 @@ public abstract class DriverQueryPlan {
   }
 
   /**
-   * Get number of aggregate expressions.
-   *
-   * @return
-   */
-  public int getNumAggreagateExprs() {
-    return numAggrExprs;
-  }
-
-  /**
-   * Set num aggregate expressions
-   *
-   * @param numAggrs
-   */
-  protected void setNumAggreagateExprs(int numAggrs) {
-    numAggrExprs = numAggrs;
-  }
-
-  /**
    * Get list of partitions queried for each table
    *
    * @return
@@ -625,10 +303,8 @@ public abstract class DriverQueryPlan {
    * @throws UnsupportedEncodingException the unsupported encoding exception
    */
   public QueryPlan toQueryPlan() throws UnsupportedEncodingException {
-    return new QueryPlan(numJoins, numGbys, numSels, numSelDi, numHaving, numObys, numAggrExprs, numFilters,
-      new ArrayList<String>(tablesQueried), hasSubQuery, execMode != null ? execMode.name() : null,
-      scanMode != null ? scanMode.name() : null, tableWeights, joinWeight, gbyWeight, filterWeight, havingWeight,
-      obyWeight, selectWeight, null,
+    return new QueryPlan(new ArrayList<String>(tablesQueried), hasSubQuery, execMode != null ? execMode.name() : null,
+      scanMode != null ? scanMode.name() : null, handle,
       URLEncoder.encode(getPlan(), "UTF-8"), getCost(), false, null);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e3e45aef/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index a8df41d..bce0ca9 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -389,7 +389,6 @@ public class TestQueryService extends LensJerseyTest {
 
     final QueryPlan plan = target.request()
       .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), QueryPlan.class);
-    Assert.assertEquals(plan.getNumSels(), 1);
     Assert.assertEquals(plan.getTablesQueried().size(), 1);
     Assert.assertTrue(plan.getTablesQueried().get(0).endsWith(TEST_TABLE.toLowerCase()));
     Assert.assertNull(plan.getPrepareHandle());
@@ -408,7 +407,6 @@ public class TestQueryService extends LensJerseyTest {
 
     final QueryPlan plan2 = ptarget.request().post(Entity.entity(mp2, MediaType.MULTIPART_FORM_DATA_TYPE),
       QueryPlan.class);
-    Assert.assertEquals(plan2.getNumSels(), 1);
     Assert.assertEquals(plan2.getTablesQueried().size(), 1);
     Assert.assertTrue(plan2.getTablesQueried().get(0).endsWith(TEST_TABLE.toLowerCase()));
     Assert.assertNotNull(plan2.getPrepareHandle());
@@ -588,7 +586,6 @@ public class TestQueryService extends LensJerseyTest {
 
     final QueryPlan plan = target.request()
       .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), QueryPlan.class);
-    Assert.assertEquals(plan.getNumSels(), 1);
     Assert.assertEquals(plan.getTablesQueried().size(), 1);
     Assert.assertTrue(plan.getTablesQueried().get(0).endsWith(TEST_TABLE.toLowerCase()));
     Assert.assertNotNull(plan.getPrepareHandle());


[03/50] [abbrv] incubator-lens git commit: LENS-435 : Add test jars to fix test failures

Posted by am...@apache.org.
LENS-435 : Add test jars to fix test failures


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/1b811def
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/1b811def
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/1b811def

Branch: refs/heads/current-release-line
Commit: 1b811def33feee58b2e58d7445d0e19e24aa57d2
Parents: b01f726
Author: jdhok <ja...@inmobi.com>
Authored: Tue Mar 24 15:04:31 2015 +0530
Committer: jdhok <ja...@inmobi.com>
Committed: Tue Mar 24 15:04:31 2015 +0530

----------------------------------------------------------------------
 lens-ml-lib/testjars/serde.jar | Bin 0 -> 1369 bytes
 lens-ml-lib/testjars/test.jar  | Bin 0 -> 697 bytes
 2 files changed, 0 insertions(+), 0 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/1b811def/lens-ml-lib/testjars/serde.jar
----------------------------------------------------------------------
diff --git a/lens-ml-lib/testjars/serde.jar b/lens-ml-lib/testjars/serde.jar
new file mode 100644
index 0000000..ec86e49
Binary files /dev/null and b/lens-ml-lib/testjars/serde.jar differ

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/1b811def/lens-ml-lib/testjars/test.jar
----------------------------------------------------------------------
diff --git a/lens-ml-lib/testjars/test.jar b/lens-ml-lib/testjars/test.jar
new file mode 100644
index 0000000..1644d8c
Binary files /dev/null and b/lens-ml-lib/testjars/test.jar differ