You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@atlas.apache.org by sh...@apache.org on 2015/07/22 10:52:13 UTC

[2/2] incubator-atlas git commit: ATLAS-37 atlas repository, webapp, hive-bridge tests fails with Hbase and Solr as Titan storage backend (suma.shivaprasad via shwethags)

ATLAS-37 atlas repository, webapp, hive-bridge tests fails with Hbase and Solr as Titan storage backend (suma.shivaprasad via shwethags)


Project: http://git-wip-us.apache.org/repos/asf/incubator-atlas/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-atlas/commit/266d7cc0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-atlas/tree/266d7cc0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-atlas/diff/266d7cc0

Branch: refs/heads/master
Commit: 266d7cc00bef49356bcdb1290930e05748182b12
Parents: b832faf
Author: Shwetha GS <ss...@hortonworks.com>
Authored: Wed Jul 22 14:22:03 2015 +0530
Committer: Shwetha GS <ss...@hortonworks.com>
Committed: Wed Jul 22 14:22:03 2015 +0530

----------------------------------------------------------------------
 addons/hive-bridge/pom.xml                      |  57 +++--
 .../atlas/hive/hook/BaseSSLAndKerberosTest.java |   5 +-
 .../hook/NegativeSSLAndKerberosHiveHookIT.java  |   2 +-
 .../hive/hook/SSLAndKerberosHiveHookIT.java     |   2 +-
 .../apache/atlas/hive/hook/SSLHiveHookIT.java   |   4 +-
 .../src/test/resources/application.properties   |  34 ++-
 .../apache/atlas/security/BaseSecurityTest.java | 108 ---------
 pom.xml                                         | 191 ++++++++++++++--
 release-log.txt                                 |   1 +
 repository/pom.xml                              |  16 ++
 .../apache/atlas/RepositoryMetadataModule.java  |   1 +
 .../atlas/repository/graph/GraphProvider.java   |   1 -
 .../repository/graph/TitanGraphProvider.java    |  28 ++-
 .../atlas/services/DefaultMetadataService.java  |   3 +-
 .../atlas/RepositoryServiceLoadingTest.java     |   3 +
 .../GraphBackedDiscoveryServiceTest.java        |   7 +
 .../atlas/discovery/HiveLineageServiceTest.java | 223 ++++++++++---------
 .../GraphBackedMetadataRepositoryTest.java      |  28 ++-
 .../typestore/GraphBackedTypeStoreTest.java     |  13 ++
 .../src/test/resources/application.properties   |  26 ++-
 src/conf/application.properties                 |  17 ++
 webapp/pom.xml                                  | 139 ++++++------
 .../atlas/web/listeners/GuiceServletConfig.java |  15 +-
 .../atlas/web/service/EmbeddedServer.java       |  28 ++-
 .../atlas/web/service/SecureEmbeddedServer.java |  54 +++--
 .../src/main/resources/application.properties   |  19 +-
 .../MetadataAuthenticationKerberosFilterIT.java |   4 +-
 .../MetadataAuthenticationSimpleFilterIT.java   |   4 +-
 .../atlas/web/listeners/LoginProcessorIT.java   |   2 +-
 .../web/listeners/TestGuiceServletConfig.java   |  57 +++++
 .../atlas/web/security/BaseSecurityTest.java    | 108 +++++++++
 .../web/service/SecureEmbeddedServerIT.java     |   3 +
 .../web/service/SecureEmbeddedServerITBase.java |   2 +-
 webapp/src/test/webapp/WEB-INF/web.xml          |  53 +++++
 34 files changed, 885 insertions(+), 373 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/addons/hive-bridge/pom.xml
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/pom.xml b/addons/hive-bridge/pom.xml
index 2617aeb..117d76c 100755
--- a/addons/hive-bridge/pom.xml
+++ b/addons/hive-bridge/pom.xml
@@ -59,6 +59,13 @@
             <artifactId>hive-metastore</artifactId>
             <version>${hive.version}</version>
             <scope>provided</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.mortbay.jetty</groupId>
+                    <artifactId>*</artifactId>
+                </exclusion>
+            </exclusions>
+
         </dependency>
 
         <dependency>
@@ -73,6 +80,17 @@
             <artifactId>hive-cli</artifactId>
             <version>${hive.version}</version>
             <scope>test</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>javax.servlet</groupId>
+                    <artifactId>*</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.eclipse.jetty.aggregate</groupId>
+                    <artifactId>*</artifactId>
+                </exclusion>
+            </exclusions>
+
         </dependency>
 
         <dependency>
@@ -116,8 +134,16 @@
         </dependency>
 
         <dependency>
-            <groupId>org.mortbay.jetty</groupId>
-            <artifactId>jetty</artifactId>
+            <groupId>org.apache.atlas</groupId>
+            <artifactId>atlas-webapp</artifactId>
+            <type>test-jar</type>
+            <version>${project.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.eclipse.jetty</groupId>
+            <artifactId>jetty-server</artifactId>
             <scope>test</scope>
         </dependency>
     </dependencies>
@@ -229,21 +255,22 @@
             </plugin>
 
             <plugin>
-                <groupId>org.mortbay.jetty</groupId>
-                <artifactId>maven-jetty-plugin</artifactId>
-                <version>${jetty.version}</version>
+                <groupId>org.eclipse.jetty</groupId>
+                <artifactId>jetty-maven-plugin</artifactId>
                 <configuration>
                     <skip>${skipTests}</skip>
                     <!--only skip int tests -->
-                    <connectors>
-                        <connector implementation="org.mortbay.jetty.nio.SelectChannelConnector">
-                            <port>21000</port>
-                            <maxIdleTime>60000</maxIdleTime>
-                        </connector>
-                    </connectors>
-                    <webApp>../../webapp/target/atlas-webapp-${project.version}.war</webApp>
-                    <contextPath>/</contextPath>
-                    <useTestClasspath>true</useTestClasspath>
+                    <httpConnector>
+                        <port>21000</port>
+                        <idleTimeout>60000</idleTimeout>
+                    </httpConnector>
+                    <war>../../webapp/target/atlas-webapp-${project.version}.war</war>
+                    <daemon>true</daemon>
+                    <webApp>
+                        <contextPath>/</contextPath>
+                        <descriptor>../../webapp/src/test/webapp/WEB-INF/web.xml</descriptor>
+                    </webApp>
+                    <useTestScope>true</useTestScope>
                     <systemProperties>
                         <systemProperty>
                             <name>atlas.log.dir</name>
@@ -251,7 +278,7 @@
                         </systemProperty>
                         <systemProperty>
                             <name>atlas.conf</name>
-                            <value>addons/hive-bridge/src/test/resources</value>
+                            <value>${project.build.directory}/test-classes</value>
                         </systemProperty>
                     </systemProperties>
                     <stopKey>atlas-stop</stopKey>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/BaseSSLAndKerberosTest.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/BaseSSLAndKerberosTest.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/BaseSSLAndKerberosTest.java
index 5ae6fe2..11163c8 100644
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/BaseSSLAndKerberosTest.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/BaseSSLAndKerberosTest.java
@@ -16,8 +16,7 @@
  */
 package org.apache.atlas.hive.hook;
 
-import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
-import org.apache.atlas.security.BaseSecurityTest;
+import org.apache.atlas.web.security.BaseSecurityTest;
 import org.apache.atlas.web.service.SecureEmbeddedServer;
 import org.apache.commons.configuration.PropertiesConfiguration;
 import org.apache.commons.io.FileUtils;
@@ -26,7 +25,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.security.alias.CredentialProvider;
 import org.apache.hadoop.security.alias.CredentialProviderFactory;
-import org.mortbay.jetty.Server;
+import org.eclipse.jetty.server.Server;
 
 import java.io.File;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/NegativeSSLAndKerberosHiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/NegativeSSLAndKerberosHiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/NegativeSSLAndKerberosHiveHookIT.java
index 629a9f3..072c36b 100755
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/NegativeSSLAndKerberosHiveHookIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/NegativeSSLAndKerberosHiveHookIT.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
 import org.apache.hadoop.security.ssl.SSLFactory;
 import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
-import org.mortbay.jetty.webapp.WebAppContext;
+import org.eclipse.jetty.webapp.WebAppContext;
 import org.testng.Assert;
 import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLAndKerberosHiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLAndKerberosHiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLAndKerberosHiveHookIT.java
index b8f29aa..6fab2ee 100755
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLAndKerberosHiveHookIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLAndKerberosHiveHookIT.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
 import org.apache.hadoop.security.ssl.SSLFactory;
 import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
 import org.codehaus.jettison.json.JSONArray;
-import org.mortbay.jetty.webapp.WebAppContext;
+import org.eclipse.jetty.webapp.WebAppContext;
 import org.testng.Assert;
 import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLHiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLHiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLHiveHookIT.java
index 54bfaf6..b114d5a 100755
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLHiveHookIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLHiveHookIT.java
@@ -37,8 +37,8 @@ import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
 import org.apache.hadoop.security.ssl.SSLFactory;
 import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
 import org.codehaus.jettison.json.JSONArray;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.webapp.WebAppContext;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.webapp.WebAppContext;
 import org.testng.Assert;
 import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/addons/hive-bridge/src/test/resources/application.properties
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/resources/application.properties b/addons/hive-bridge/src/test/resources/application.properties
index 50eefed..dda9a18 100644
--- a/addons/hive-bridge/src/test/resources/application.properties
+++ b/addons/hive-bridge/src/test/resources/application.properties
@@ -17,23 +17,45 @@
 #
 
 #########  Graph Database Configs  #########
+#Refer http://s3.thinkaurelius.com/docs/titan/0.5.1/titan-config-ref.html
 # Graph Storage
-atlas.graph.storage.backend=inmemory
+atlas.graph.storage.backend=${titan.storage.backend}
 
-# Graph Search Index
-atlas.graph.index.search.backend=lucene
-atlas.graph.index.search.directory=target/data/lucene
+#Berkeley storage directory
+atlas.graph.storage.directory=target/data/berkley
 
+#hbase
+#For standalone mode , specify localhost
+#for distributed mode, specify zookeeper quorum here - For more information refer http://s3.thinkaurelius.com/docs/titan/current/hbase.html#_remote_server_mode_2
+atlas.graph.storage.hostname=${titan.storage.hostname}
+
+# Graph Search Index Backend
+atlas.graph.index.search.backend=${titan.index.backend}
+
+#lucene
+#atlas.graph.index.search.directory=target/data/lucene
+
+#elasticsearch
+atlas.graph.index.search.directory=./target/data/es
+atlas.graph.index.search.elasticsearch.client-only=false
+atlas.graph.index.search.elasticsearch.local-mode=true
+atlas.graph.index.search.elasticsearch.create.sleep=2000
+
+#solr in cloud mode
+atlas.graph.index.search.solr.mode=cloud
+atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address}
+
+#solr in http mode
+atlas.graph.index.search.solr.http-urls=http://localhost:8983/solr
 
 #########  Hive Lineage Configs  #########
-# This models reflects the base super types for Data and Process
 #atlas.lineage.hive.table.type.name=DataSet
 #atlas.lineage.hive.process.type.name=Process
 #atlas.lineage.hive.process.inputs.name=inputs
 #atlas.lineage.hive.process.outputs.name=outputs
 
 ## Schema
-#atlas.lineage.hive.table.schema.query=hive_table where name=?, columns
+#atlas.lineage.hive.table.schema.query.hive_table=hive_table where name='%s'\, columns
 
 
 #########  Security Properties  #########

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/client/src/test/java/org/apache/atlas/security/BaseSecurityTest.java
----------------------------------------------------------------------
diff --git a/client/src/test/java/org/apache/atlas/security/BaseSecurityTest.java b/client/src/test/java/org/apache/atlas/security/BaseSecurityTest.java
deleted file mode 100644
index 598f56b..0000000
--- a/client/src/test/java/org/apache/atlas/security/BaseSecurityTest.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.security;
-
-import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.hadoop.minikdc.MiniKdc;
-import org.apache.zookeeper.Environment;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.webapp.WebAppContext;
-import org.testng.Assert;
-
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.Writer;
-import java.nio.file.Files;
-import java.util.Locale;
-import java.util.Properties;
-
-/**
- *
- */
-public class BaseSecurityTest {
-    private static final String JAAS_ENTRY = "%s { \n" + " %s required\n"
-            // kerberos module
-            + " keyTab=\"%s\"\n" + " debug=true\n" + " principal=\"%s\"\n" + " useKeyTab=true\n"
-            + " useTicketCache=false\n" + " doNotPrompt=true\n" + " storeKey=true;\n" + "}; \n";
-    protected MiniKdc kdc;
-
-    protected String getWarPath() {
-        return String.format("/target/atlas-webapp-%s.war",
-                System.getProperty("release.version"));
-    }
-
-    protected void generateTestProperties(Properties props) throws ConfigurationException, IOException {
-        PropertiesConfiguration config =
-                new PropertiesConfiguration(System.getProperty("user.dir") + "/../src/conf/application.properties");
-        for (String propName : props.stringPropertyNames()) {
-            config.setProperty(propName, props.getProperty(propName));
-        }
-        File file = new File(System.getProperty("user.dir"), "application.properties");
-        file.deleteOnExit();
-        Writer fileWriter = new FileWriter(file);
-        config.save(fileWriter);
-    }
-
-    protected void startEmbeddedServer(Server server) throws Exception {
-        WebAppContext webapp = new WebAppContext();
-        webapp.setContextPath("/");
-        webapp.setWar(System.getProperty("user.dir") + getWarPath());
-        server.setHandler(webapp);
-
-        server.start();
-    }
-
-    protected File startKDC() throws Exception {
-        File target = Files.createTempDirectory("sectest").toFile();
-        File kdcWorkDir = new File(target, "kdc");
-        Properties kdcConf = MiniKdc.createConf();
-        kdcConf.setProperty(MiniKdc.DEBUG, "true");
-        kdc = new MiniKdc(kdcConf, kdcWorkDir);
-        kdc.start();
-
-        Assert.assertNotNull(kdc.getRealm());
-        return kdcWorkDir;
-    }
-
-    public String createJAASEntry(String context, String principal, File keytab) {
-        String keytabpath = keytab.getAbsolutePath();
-        // fix up for windows; no-op on unix
-        keytabpath = keytabpath.replace('\\', '/');
-        return String.format(Locale.ENGLISH, JAAS_ENTRY, context, getKerberosAuthModuleForJVM(), keytabpath, principal);
-    }
-
-    protected String getKerberosAuthModuleForJVM() {
-        if (System.getProperty("java.vendor").contains("IBM")) {
-            return "com.ibm.security.auth.module.Krb5LoginModule";
-        } else {
-            return "com.sun.security.auth.module.Krb5LoginModule";
-        }
-    }
-
-    protected void bindJVMtoJAASFile(File jaasFile) {
-        String path = jaasFile.getAbsolutePath();
-        System.setProperty(Environment.JAAS_CONF_KEY, path);
-    }
-
-    protected File createKeytab(MiniKdc kdc, File kdcWorkDir, String principal, String filename) throws Exception {
-        File keytab = new File(kdcWorkDir, filename);
-        kdc.createPrincipal(keytab, principal, principal + "/localhost", principal + "/127.0.0.1");
-        return keytab;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index b94c92b..a8afcc3 100755
--- a/pom.xml
+++ b/pom.xml
@@ -322,11 +322,13 @@
         <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
 
         <slf4j.version>1.7.7</slf4j.version>
-        <jetty.version>6.1.26</jetty.version>
+        <jetty.version>9.2.12.v20150709</jetty.version>
         <jersey.version>1.9</jersey.version>
+        <jackson.version>1.8.3</jackson.version>
         <tinkerpop.version>2.6.0</tinkerpop.version>
         <titan.version>0.5.4</titan.version>
         <hadoop.version>2.6.0</hadoop.version>
+        <hbase.version>0.98.9-hadoop2</hbase.version>
 
         <!-- scala versions -->
         <scala.version>2.10.4</scala.version>
@@ -351,6 +353,8 @@
 
         <!-- skips checkstyle and find bugs -->
         <skipCheck>false</skipCheck>
+        <titan.storage.backend>berkeleyje</titan.storage.backend>
+        <titan.index.backend>elasticsearch</titan.index.backend>
     </properties>
 
     <profiles>
@@ -381,8 +385,22 @@
                 </python.path.l>
             </properties>
         </profile>
-    </profiles>
 
+        <!-- Turning on this profile affects only tests and does not affect packaging -->
+        <profile>
+            <id>distributed</id>
+            <activation>
+                <activeByDefault>false</activeByDefault>
+            </activation>
+            <properties>
+                <titan.storage.backend>hbase</titan.storage.backend>
+                <titan.index.backend>solr</titan.index.backend>
+                <solr.zk.address>localhost:9983</solr.zk.address>
+                <titan.storage.hostname>localhost</titan.storage.hostname>
+            </properties>
+        </profile>
+
+    </profiles>
     <modules>
         <module>typesystem</module>
         <module>client</module>
@@ -455,9 +473,15 @@
     <dependencyManagement>
         <dependencies>
             <dependency>
-                <groupId>org.mortbay.jetty</groupId>
-                <artifactId>jsp-2.1</artifactId>
-                <version>6.0.0</version>
+                <groupId>org.eclipse.jetty</groupId>
+                <artifactId>jetty-jsp</artifactId>
+                <version>${jetty.version}</version>
+                <exclusions>
+                    <exclusion>
+                        <groupId>javax.servlet</groupId>
+                        <artifactId>*</artifactId>
+                    </exclusion>
+                </exclusions>
             </dependency>
 
             <!-- Logging -->
@@ -507,25 +531,68 @@
                 <groupId>org.apache.hadoop</groupId>
                 <artifactId>hadoop-common</artifactId>
                 <version>${hadoop.version}</version>
+                <exclusions>
+                    <exclusion>
+                        <groupId>javax.servlet</groupId>
+                        <artifactId>*</artifactId>
+                    </exclusion>
+                    <exclusion>
+                        <groupId>tomcat</groupId>
+                        <artifactId>*</artifactId>
+                    </exclusion>
+                    <exclusion>
+                        <groupId>org.mortbay.jetty</groupId>
+                        <artifactId>*</artifactId>
+                    </exclusion>
+                    <exclusion>
+                        <groupId>org.codehaus.jackson</groupId>
+                        <artifactId>*</artifactId>
+                    </exclusion>
+                    <exclusion>
+                        <groupId>org.htrace</groupId>
+                        <artifactId>*</artifactId>
+                    </exclusion>
+                </exclusions>
             </dependency>
 
             <dependency>
                 <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-client</artifactId>
+                <artifactId>hadoop-hdfs</artifactId>
                 <version>${hadoop.version}</version>
+                <scope>test</scope>
+                <exclusions>
+                    <exclusion>
+                        <groupId>javax.servlet</groupId>
+                        <artifactId>*</artifactId>
+                    </exclusion>
+                    <exclusion>
+                        <groupId>tomcat</groupId>
+                        <artifactId>*</artifactId>
+                    </exclusion>
+                    <exclusion>
+                        <groupId>org.mortbay.jetty</groupId>
+                        <artifactId>*</artifactId>
+                    </exclusion>
+                </exclusions>
             </dependency>
 
+
             <dependency>
                 <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-annotations</artifactId>
+                <artifactId>hadoop-client</artifactId>
                 <version>${hadoop.version}</version>
+                <exclusions>
+                    <exclusion>
+                        <groupId>org.mortbay.jetty</groupId>
+                        <artifactId>*</artifactId>
+                    </exclusion>
+                </exclusions>
             </dependency>
 
             <dependency>
                 <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-hdfs</artifactId>
+                <artifactId>hadoop-annotations</artifactId>
                 <version>${hadoop.version}</version>
-                <scope>test</scope>
             </dependency>
 
             <dependency>
@@ -615,6 +682,12 @@
                 <groupId>com.sun.jersey</groupId>
                 <artifactId>jersey-json</artifactId>
                 <version>${jersey.version}</version>
+                <exclusions>
+                    <exclusion>
+                        <groupId>javax.xml.stream</groupId>
+                        <artifactId>*</artifactId>
+                    </exclusion>
+                </exclusions>
             </dependency>
 
             <dependency>
@@ -639,19 +712,19 @@
             <dependency>
                 <groupId>org.codehaus.jackson</groupId>
                 <artifactId>jackson-core-asl</artifactId>
-                <version>1.5.2</version>
+                <version>${jackson.version}</version>
             </dependency>
 
             <dependency>
                 <groupId>org.codehaus.jackson</groupId>
                 <artifactId>jackson-mapper-asl</artifactId>
-                <version>1.5.2</version>
+                <version>${jackson.version}</version>
             </dependency>
 
             <dependency>
                 <groupId>org.codehaus.jettison</groupId>
                 <artifactId>jettison</artifactId>
-                <version>1.3</version>
+                <version>1.3.7</version>
             </dependency>
 
             <dependency>
@@ -668,15 +741,15 @@
 
             <!-- Jetty -->
             <dependency>
-                <groupId>org.mortbay.jetty</groupId>
-                <artifactId>jetty</artifactId>
+                <groupId>org.eclipse.jetty</groupId>
+                <artifactId>jetty-server</artifactId>
                 <version>${jetty.version}</version>
                 <scope>compile</scope>
             </dependency>
 
             <dependency>
-                <groupId>org.mortbay.jetty</groupId>
-                <artifactId>jetty-plus</artifactId>
+                <groupId>org.eclipse.jetty</groupId>
+                <artifactId>jetty-webapp</artifactId>
                 <version>${jetty.version}</version>
                 <scope>compile</scope>
             </dependency>
@@ -736,12 +809,80 @@
 
             <dependency>
                 <groupId>com.thinkaurelius.titan</groupId>
+                <artifactId>titan-hbase</artifactId>
+                <version>${titan.version}</version>
+            </dependency>
+
+            <dependency>
+                <groupId>org.apache.hbase</groupId>
+                <artifactId>hbase-client</artifactId>
+                <version>${hbase.version}</version>
+                <exclusions>
+                    <exclusion>
+                        <artifactId>avro</artifactId>
+                        <groupId>org.apache.avro</groupId>
+                    </exclusion>
+                    <exclusion>
+                        <artifactId>jruby-complete</artifactId>
+                        <groupId>org.jruby</groupId>
+                    </exclusion>
+                    <exclusion>
+                        <artifactId>asm</artifactId>
+                        <groupId>asm</groupId>
+                    </exclusion>
+                    <exclusion>
+                        <artifactId>*</artifactId>
+                        <groupId>org.apache.hadoop</groupId>
+                    </exclusion>
+                    <exclusion>
+                        <artifactId>*</artifactId>
+                        <groupId>org.mortbay.jetty</groupId>
+                    </exclusion>
+                </exclusions>
+            </dependency>
+
+            <dependency>
+                <groupId>com.thinkaurelius.titan</groupId>
                 <artifactId>titan-es</artifactId>
                 <version>${titan.version}</version>
             </dependency>
 
             <dependency>
                 <groupId>com.thinkaurelius.titan</groupId>
+                <artifactId>titan-solr</artifactId>
+                <version>${titan.version}</version>
+                <exclusions>
+                    <exclusion>
+                        <artifactId>*</artifactId>
+                        <groupId>org.eclipse.jetty</groupId>
+                    </exclusion>
+                    <exclusion>
+                        <artifactId>*</artifactId>
+                        <groupId>javax.servlet</groupId>
+                    </exclusion>
+                    <exclusion>
+                        <artifactId>*</artifactId>
+                        <groupId>org.jruby.joni</groupId>
+                    </exclusion>
+                    <exclusion>
+                        <artifactId>*</artifactId>
+                        <groupId>org.eclipse.jetty.orbit</groupId>
+                    </exclusion>
+
+                    <exclusion>
+                        <artifactId>*</artifactId>
+                        <groupId>org.restlet.jee</groupId>
+                    </exclusion>
+                    <exclusion>
+                        <artifactId>*</artifactId>
+                        <groupId>org.ow2.asm</groupId>
+                    </exclusion>
+                </exclusions>
+
+            </dependency>
+
+            <dependency>
+                <groupId>com.thinkaurelius.titan</groupId>
                 <artifactId>titan-lucene</artifactId>
                 <version>${titan.version}</version>
                 <!--<scope>test</scope>-->
@@ -982,6 +1123,7 @@
         <testResources>
             <testResource>
                 <directory>src/test/resources</directory>
+                <filtering>true</filtering>
             </testResource>
         </testResources>
 
@@ -1072,6 +1214,15 @@
                 </plugin>
 
                 <plugin>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-resources-plugin</artifactId>
+                    <version>2.7</version>
+                    <configuration>
+                        <encoding>UTF-8</encoding>
+                    </configuration>
+                </plugin>
+
+                <plugin>
                     <groupId>net.alchim31.maven</groupId>
                     <artifactId>scala-maven-plugin</artifactId>
                     <version>3.2.0</version>
@@ -1222,12 +1373,12 @@
 
             <!-- Run the application using "mvn jetty:run" -->
             <plugin>
-                <groupId>org.mortbay.jetty</groupId>
-                <artifactId>maven-jetty-plugin</artifactId>
-                <version>6.1.16</version>
+                <groupId>org.eclipse.jetty</groupId>
+                <artifactId>jetty-maven-plugin</artifactId>
+                <version>${jetty.version}</version>
                 <configuration>
                     <!-- Log to the console. -->
-                    <requestLog implementation="org.mortbay.jetty.NCSARequestLog">
+                    <requestLog implementation="org.eclipse.jetty.server.NCSARequestLog">
                         <!-- This doesn't do anything for Jetty, but is a workaround for a Maven bug
                              that prevents the requestLog from being set. -->
                         <append>true</append>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index d655478..76b8778 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -8,6 +8,7 @@ ATLAS-54 Rename configs in hive hook (shwethags)
 ATLAS-3 Mixed Index creation fails with Date types (suma.shivaprasad via shwethags)
 
 ALL CHANGES:
+ATLAS-37 atlas repository, webapp, hive-bridge tests fails with Hbase and Solr as Titan storage backend (suma.shivaprasad via shwethags)
 ATLAS-56 atlas_config.py should give an informative error if jar or java binaries can't be found (dossett@gmail.com via shwethags)
 ATLAS-45 Entity submit fails (suma.shivaprasad via shwethags)
 ATLAS-46 Different data directory with restart (shwethags)

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/repository/pom.xml
----------------------------------------------------------------------
diff --git a/repository/pom.xml b/repository/pom.xml
index b23c289..03aa792 100755
--- a/repository/pom.xml
+++ b/repository/pom.xml
@@ -101,11 +101,26 @@
 
         <dependency>
             <groupId>com.thinkaurelius.titan</groupId>
+            <artifactId>titan-solr</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>com.thinkaurelius.titan</groupId>
             <artifactId>titan-berkeleyje</artifactId>
         </dependency>
 
         <dependency>
             <groupId>com.thinkaurelius.titan</groupId>
+            <artifactId>titan-hbase</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-client</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>com.thinkaurelius.titan</groupId>
             <artifactId>titan-lucene</artifactId>
         </dependency>
 
@@ -172,6 +187,7 @@
     </dependencies>
 
     <build>
+
         <plugins>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/repository/src/main/java/org/apache/atlas/RepositoryMetadataModule.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/RepositoryMetadataModule.java b/repository/src/main/java/org/apache/atlas/RepositoryMetadataModule.java
index e7353f9..fbd01de 100755
--- a/repository/src/main/java/org/apache/atlas/RepositoryMetadataModule.java
+++ b/repository/src/main/java/org/apache/atlas/RepositoryMetadataModule.java
@@ -22,6 +22,7 @@ import com.google.inject.matcher.Matchers;
 import com.google.inject.multibindings.Multibinder;
 import com.google.inject.throwingproviders.ThrowingProviderBinder;
 import com.thinkaurelius.titan.core.TitanGraph;
+import com.tinkerpop.blueprints.Graph;
 import org.aopalliance.intercept.MethodInterceptor;
 import org.apache.atlas.discovery.DiscoveryService;
 import org.apache.atlas.discovery.HiveLineageService;

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/repository/src/main/java/org/apache/atlas/repository/graph/GraphProvider.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/GraphProvider.java b/repository/src/main/java/org/apache/atlas/repository/graph/GraphProvider.java
index 5472081..f89bdf5 100755
--- a/repository/src/main/java/org/apache/atlas/repository/graph/GraphProvider.java
+++ b/repository/src/main/java/org/apache/atlas/repository/graph/GraphProvider.java
@@ -22,7 +22,6 @@ import com.google.inject.throwingproviders.CheckedProvider;
 import com.tinkerpop.blueprints.Graph;
 
 public interface GraphProvider<T extends Graph> extends CheckedProvider<T> {
-
     @Override
     T get();
 }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/repository/src/main/java/org/apache/atlas/repository/graph/TitanGraphProvider.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/TitanGraphProvider.java b/repository/src/main/java/org/apache/atlas/repository/graph/TitanGraphProvider.java
index 3c1d151..d7ed1f1 100755
--- a/repository/src/main/java/org/apache/atlas/repository/graph/TitanGraphProvider.java
+++ b/repository/src/main/java/org/apache/atlas/repository/graph/TitanGraphProvider.java
@@ -30,6 +30,7 @@ import org.slf4j.LoggerFactory;
 
 import javax.inject.Singleton;
 import java.util.Iterator;
+import java.util.Properties;
 
 /**
  * Default implementation for Graph Provider that doles out Titan Graph.
@@ -43,11 +44,17 @@ public class TitanGraphProvider implements GraphProvider<TitanGraph> {
      */
     private static final String ATLAS_PREFIX = "atlas.graph.";
 
+    private static TitanGraph graphInstance;
+
     private static Configuration getConfiguration() throws AtlasException {
         PropertiesConfiguration configProperties = PropertiesUtil.getApplicationProperties();
 
         Configuration graphConfig = new PropertiesConfiguration();
 
+        Properties sysProperties = System.getProperties();
+        LOG.info("System properties: ");
+        LOG.info(sysProperties.toString());
+
         final Iterator<String> iterator = configProperties.getKeys();
         while (iterator.hasNext()) {
             String key = iterator.next();
@@ -66,13 +73,20 @@ public class TitanGraphProvider implements GraphProvider<TitanGraph> {
     @Singleton
     @Provides
     public TitanGraph get() {
-        Configuration config;
-        try {
-            config = getConfiguration();
-        } catch (AtlasException e) {
-            throw new RuntimeException(e);
-        }
+        if(graphInstance == null) {
+            synchronized (TitanGraphProvider.class) {
+                if(graphInstance == null) {
+                    Configuration config;
+                    try {
+                        config = getConfiguration();
+                    } catch (AtlasException e) {
+                        throw new RuntimeException(e);
+                    }
 
-        return TitanFactory.open(config);
+                    graphInstance = TitanFactory.open(config);
+                }
+            }
+        }
+        return graphInstance;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java b/repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java
index 9556272..56168db 100755
--- a/repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java
+++ b/repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java
@@ -81,7 +81,8 @@ public class DefaultMetadataService implements MetadataService {
 
     @Inject
     DefaultMetadataService(final MetadataRepository repository, final ITypeStore typeStore,
-            final Collection<Provider<TypesChangeListener>> typeChangeListeners) throws AtlasException {
+        final Collection<Provider<TypesChangeListener>> typeChangeListeners) throws AtlasException {
+
         this.typeStore = typeStore;
         this.typeSystem = TypeSystem.getInstance();
         this.repository = repository;

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/repository/src/test/java/org/apache/atlas/RepositoryServiceLoadingTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/RepositoryServiceLoadingTest.java b/repository/src/test/java/org/apache/atlas/RepositoryServiceLoadingTest.java
index 4195955..18b1d1b 100755
--- a/repository/src/test/java/org/apache/atlas/RepositoryServiceLoadingTest.java
+++ b/repository/src/test/java/org/apache/atlas/RepositoryServiceLoadingTest.java
@@ -19,8 +19,11 @@
 package org.apache.atlas;
 
 import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.util.TitanCleanup;
 import org.apache.atlas.repository.graph.GraphProvider;
+import org.apache.atlas.typesystem.types.TypeSystem;
 import org.testng.Assert;
+import org.testng.annotations.AfterClass;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/repository/src/test/java/org/apache/atlas/discovery/GraphBackedDiscoveryServiceTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/discovery/GraphBackedDiscoveryServiceTest.java b/repository/src/test/java/org/apache/atlas/discovery/GraphBackedDiscoveryServiceTest.java
index 67cdd78..127e6d7 100755
--- a/repository/src/test/java/org/apache/atlas/discovery/GraphBackedDiscoveryServiceTest.java
+++ b/repository/src/test/java/org/apache/atlas/discovery/GraphBackedDiscoveryServiceTest.java
@@ -20,6 +20,7 @@ package org.apache.atlas.discovery;
 
 import com.google.common.collect.ImmutableList;
 import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.util.TitanCleanup;
 import com.tinkerpop.blueprints.Edge;
 import com.tinkerpop.blueprints.Vertex;
 import org.apache.atlas.RepositoryMetadataModule;
@@ -117,6 +118,12 @@ public class GraphBackedDiscoveryServiceTest {
     @AfterClass
     public void tearDown() throws Exception {
         TypeSystem.getInstance().reset();
+        graphProvider.get().shutdown();
+        try {
+            TitanCleanup.clear(graphProvider.get());
+        } catch(Exception e) {
+            e.printStackTrace();
+        }
     }
 
     @Test

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/repository/src/test/java/org/apache/atlas/discovery/HiveLineageServiceTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/discovery/HiveLineageServiceTest.java b/repository/src/test/java/org/apache/atlas/discovery/HiveLineageServiceTest.java
index ce00f8c..2c7d61a 100644
--- a/repository/src/test/java/org/apache/atlas/discovery/HiveLineageServiceTest.java
+++ b/repository/src/test/java/org/apache/atlas/discovery/HiveLineageServiceTest.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *     http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -20,9 +20,12 @@ package org.apache.atlas.discovery;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.util.TitanCleanup;
 import org.apache.atlas.RepositoryMetadataModule;
 import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
 import org.apache.atlas.repository.EntityNotFoundException;
+import org.apache.atlas.repository.graph.GraphProvider;
 import org.apache.atlas.services.DefaultMetadataService;
 import org.apache.atlas.typesystem.Referenceable;
 import org.apache.atlas.typesystem.TypesDef;
@@ -38,17 +41,22 @@ import org.apache.atlas.typesystem.types.IDataType;
 import org.apache.atlas.typesystem.types.Multiplicity;
 import org.apache.atlas.typesystem.types.StructTypeDefinition;
 import org.apache.atlas.typesystem.types.TraitType;
+import org.apache.atlas.typesystem.types.TypeSystem;
 import org.apache.atlas.typesystem.types.TypeUtils;
 import org.apache.atlas.typesystem.types.utils.TypesUtil;
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONObject;
 import org.testng.Assert;
+import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
 import javax.inject.Inject;
+import java.io.File;
+import java.io.IOException;
+import java.util.HashSet;
 import java.util.List;
 
 /**
@@ -58,65 +66,76 @@ import java.util.List;
 public class HiveLineageServiceTest {
 
     @Inject
+    private GraphBackedDiscoveryService discoveryService;
+
+    @Inject
     private DefaultMetadataService metadataService;
 
     @Inject
-    private GraphBackedDiscoveryService discoveryService;
+    private GraphProvider<TitanGraph> graphProvider;
 
     @Inject
     private HiveLineageService hiveLineageService;
 
-    //    @Inject
-    //    private GraphProvider<TitanGraph> graphProvider;
 
     @BeforeClass
     public void setUp() throws Exception {
         setUpTypes();
         setupInstances();
-
         // TestUtils.dumpGraph(graphProvider.get());
     }
 
+    @AfterClass
+    public void tearDown() throws Exception {
+        TypeSystem.getInstance().reset();
+        graphProvider.get().shutdown();
+        try {
+            TitanCleanup.clear(graphProvider.get());
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
     @DataProvider(name = "dslQueriesProvider")
     private Object[][] createDSLQueries() {
         return new String[][]{
-                // joins
-                {"hive_table where name=\"sales_fact\", columns"},
-                {"hive_table where name=\"sales_fact\", columns select name, dataType, comment"},
-                {"hive_table where name=\"sales_fact\", columns as c select c.name, c.dataType, c.comment"},
-                //            {"hive_db as db where (db.name=\"Reporting\"), hive_table as table select db.name,
-                // table.name"},
-                {"from hive_db"}, {"hive_db"}, {"hive_db where hive_db.name=\"Reporting\""},
-                {"hive_db hive_db.name = \"Reporting\""},
-                {"hive_db where hive_db.name=\"Reporting\" select name, owner"}, {"hive_db has name"},
-                //            {"hive_db, hive_table"},
-                //            {"hive_db, hive_process has name"},
-                //            {"hive_db as db1, hive_table where db1.name = \"Reporting\""},
-                //            {"hive_db where hive_db.name=\"Reporting\" and hive_db.createTime < " + System
-                // .currentTimeMillis()},
-                {"from hive_table"}, {"hive_table"}, {"hive_table is Dimension"},
-                {"hive_column where hive_column isa PII"},
-                //            {"hive_column where hive_column isa PII select hive_column.name"},
-                {"hive_column select hive_column.name"}, {"hive_column select name"},
-                {"hive_column where hive_column.name=\"customer_id\""}, {"from hive_table select hive_table.name"},
-                {"hive_db where (name = \"Reporting\")"},
-                {"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1"},
-                {"hive_db where hive_db has name"},
-                //            {"hive_db hive_table"},
-                {"hive_db where hive_db has name"},
-                //            {"hive_db as db1 hive_table where (db1.name = \"Reporting\")"},
-                {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 "},
-                //            {"hive_db where (name = \"Reporting\") and ((createTime + 1) > 0)"},
-                //            {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name =
-                // \"Reporting\") select db1.name as dbName, tab.name as tabName"},
-                //            {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) or (db1.name =
-                // \"Reporting\") select db1.name as dbName, tab.name as tabName"},
-                //            {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name =
-                // \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"},
-                //            {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name =
-                // \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"},
-                // trait searches
-                {"Dimension"}, {"Fact"}, {"ETL"}, {"Metric"}, {"PII"},};
+            // joins
+            {"hive_table where name=\"sales_fact\", columns"},
+            {"hive_table where name=\"sales_fact\", columns select name, dataType, comment"},
+            {"hive_table where name=\"sales_fact\", columns as c select c.name, c.dataType, c.comment"},
+            //            {"hive_db as db where (db.name=\"Reporting\"), hive_table as table select db.name,
+            // table.name"},
+            {"from hive_db"}, {"hive_db"}, {"hive_db where hive_db.name=\"Reporting\""},
+            {"hive_db hive_db.name = \"Reporting\""},
+            {"hive_db where hive_db.name=\"Reporting\" select name, owner"}, {"hive_db has name"},
+            //            {"hive_db, hive_table"},
+            //            {"hive_db, hive_process has name"},
+            //            {"hive_db as db1, hive_table where db1.name = \"Reporting\""},
+            //            {"hive_db where hive_db.name=\"Reporting\" and hive_db.createTime < " + System
+            // .currentTimeMillis()},
+            {"from hive_table"}, {"hive_table"}, {"hive_table is Dimension"},
+            {"hive_column where hive_column isa PII"},
+            //            {"hive_column where hive_column isa PII select hive_column.name"},
+            {"hive_column select hive_column.name"}, {"hive_column select name"},
+            {"hive_column where hive_column.name=\"customer_id\""}, {"from hive_table select hive_table.name"},
+            {"hive_db where (name = \"Reporting\")"},
+            {"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1"},
+            {"hive_db where hive_db has name"},
+            //            {"hive_db hive_table"},
+            {"hive_db where hive_db has name"},
+            //            {"hive_db as db1 hive_table where (db1.name = \"Reporting\")"},
+            {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 "},
+            //            {"hive_db where (name = \"Reporting\") and ((createTime + 1) > 0)"},
+            //            {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name =
+            // \"Reporting\") select db1.name as dbName, tab.name as tabName"},
+            //            {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) or (db1.name =
+            // \"Reporting\") select db1.name as dbName, tab.name as tabName"},
+            //            {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name =
+            // \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"},
+            //            {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name =
+            // \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"},
+            // trait searches
+            {"Dimension"}, {"Fact"}, {"ETL"}, {"Metric"}, {"PII"},};
     }
 
     @Test(dataProvider = "dslQueriesProvider")
@@ -242,7 +261,7 @@ public class HiveLineageServiceTest {
     @DataProvider(name = "tableNamesProvider")
     private Object[][] tableNames() {
         return new String[][]{{"sales_fact", "4"}, {"time_dim", "3"}, {"sales_fact_daily_mv", "4"},
-                {"sales_fact_monthly_mv", "4"}};
+            {"sales_fact_monthly_mv", "4"}};
     }
 
     @Test(dataProvider = "tableNamesProvider")
@@ -296,45 +315,45 @@ public class HiveLineageServiceTest {
 
     private TypesDef createTypeDefinitions() {
         HierarchicalTypeDefinition<ClassType> dbClsDef = TypesUtil
-                .createClassTypeDef(DATABASE_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
-                        attrDef("description", DataTypes.STRING_TYPE), attrDef("locationUri", DataTypes.STRING_TYPE),
-                        attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.LONG_TYPE));
+            .createClassTypeDef(DATABASE_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
+                attrDef("description", DataTypes.STRING_TYPE), attrDef("locationUri", DataTypes.STRING_TYPE),
+                attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.LONG_TYPE));
 
         HierarchicalTypeDefinition<ClassType> storageDescClsDef = TypesUtil
-                .createClassTypeDef(STORAGE_DESC_TYPE, null, attrDef("location", DataTypes.STRING_TYPE),
-                        attrDef("inputFormat", DataTypes.STRING_TYPE), attrDef("outputFormat", DataTypes.STRING_TYPE),
-                        attrDef("compressed", DataTypes.STRING_TYPE, Multiplicity.REQUIRED, false, null));
+            .createClassTypeDef(STORAGE_DESC_TYPE, null, attrDef("location", DataTypes.STRING_TYPE),
+                attrDef("inputFormat", DataTypes.STRING_TYPE), attrDef("outputFormat", DataTypes.STRING_TYPE),
+                attrDef("compressed", DataTypes.STRING_TYPE, Multiplicity.REQUIRED, false, null));
 
         HierarchicalTypeDefinition<ClassType> columnClsDef = TypesUtil
-                .createClassTypeDef(COLUMN_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
-                        attrDef("dataType", DataTypes.STRING_TYPE), attrDef("comment", DataTypes.STRING_TYPE));
+            .createClassTypeDef(COLUMN_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
+                attrDef("dataType", DataTypes.STRING_TYPE), attrDef("comment", DataTypes.STRING_TYPE));
 
         HierarchicalTypeDefinition<ClassType> tblClsDef = TypesUtil
-                .createClassTypeDef(HIVE_TABLE_TYPE, ImmutableList.of("DataSet"),
-                        attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.LONG_TYPE),
-                        attrDef("lastAccessTime", DataTypes.LONG_TYPE), attrDef("tableType", DataTypes.STRING_TYPE),
-                        attrDef("temporary", DataTypes.BOOLEAN_TYPE),
-                        new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
-                        // todo - uncomment this, something is broken
-                        //                        new AttributeDefinition("sd", STORAGE_DESC_TYPE,
-                        //                                Multiplicity.REQUIRED, true, null),
-                        new AttributeDefinition("columns", DataTypes.arrayTypeName(COLUMN_TYPE),
-                                Multiplicity.COLLECTION, true, null));
+            .createClassTypeDef(HIVE_TABLE_TYPE, ImmutableList.of("DataSet"),
+                attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.LONG_TYPE),
+                attrDef("lastAccessTime", DataTypes.LONG_TYPE), attrDef("tableType", DataTypes.STRING_TYPE),
+                attrDef("temporary", DataTypes.BOOLEAN_TYPE),
+                new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
+                // todo - uncomment this, something is broken
+                //                        new AttributeDefinition("sd", STORAGE_DESC_TYPE,
+                //                                Multiplicity.REQUIRED, true, null),
+                new AttributeDefinition("columns", DataTypes.arrayTypeName(COLUMN_TYPE),
+                    Multiplicity.COLLECTION, true, null));
 
         HierarchicalTypeDefinition<ClassType> loadProcessClsDef = TypesUtil
-                .createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableList.of("Process"),
-                        attrDef("userName", DataTypes.STRING_TYPE), attrDef("startTime", DataTypes.LONG_TYPE),
-                        attrDef("endTime", DataTypes.LONG_TYPE),
-                        attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
-                        attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
-                        attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
-                        attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED));
+            .createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableList.of("Process"),
+                attrDef("userName", DataTypes.STRING_TYPE), attrDef("startTime", DataTypes.LONG_TYPE),
+                attrDef("endTime", DataTypes.LONG_TYPE),
+                attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
+                attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
+                attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
+                attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED));
 
         HierarchicalTypeDefinition<ClassType> viewClsDef = TypesUtil
-                .createClassTypeDef(VIEW_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
-                        new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
-                        new AttributeDefinition("inputTables", DataTypes.arrayTypeName(HIVE_TABLE_TYPE),
-                                Multiplicity.COLLECTION, false, null));
+            .createClassTypeDef(VIEW_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
+                new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("inputTables", DataTypes.arrayTypeName(HIVE_TABLE_TYPE),
+                    Multiplicity.COLLECTION, false, null));
 
         HierarchicalTypeDefinition<TraitType> dimTraitDef = TypesUtil.createTraitTypeDef("Dimension", null);
 
@@ -349,8 +368,8 @@ public class HiveLineageServiceTest {
         HierarchicalTypeDefinition<TraitType> jdbcTraitDef = TypesUtil.createTraitTypeDef("JdbcAccess", null);
 
         return TypeUtils.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
-                ImmutableList.of(dimTraitDef, factTraitDef, piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef),
-                ImmutableList.of(dbClsDef, storageDescClsDef, columnClsDef, tblClsDef, loadProcessClsDef, viewClsDef));
+            ImmutableList.of(dimTraitDef, factTraitDef, piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef),
+            ImmutableList.of(dbClsDef, storageDescClsDef, columnClsDef, tblClsDef, loadProcessClsDef, viewClsDef));
     }
 
     AttributeDefinition attrDef(String name, IDataType dT) {
@@ -362,7 +381,7 @@ public class HiveLineageServiceTest {
     }
 
     AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m, boolean isComposite,
-            String reverseAttributeName) {
+        String reverseAttributeName) {
         Preconditions.checkNotNull(name);
         Preconditions.checkNotNull(dT);
         return new AttributeDefinition(name, dT.getName(), m, isComposite, reverseAttributeName);
@@ -372,62 +391,62 @@ public class HiveLineageServiceTest {
         Id salesDB = database("Sales", "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales");
 
         Referenceable sd =
-                storageDescriptor("hdfs://host:8000/apps/warehouse/sales", "TextInputFormat", "TextOutputFormat", true);
+            storageDescriptor("hdfs://host:8000/apps/warehouse/sales", "TextInputFormat", "TextOutputFormat", true);
 
         List<Referenceable> salesFactColumns = ImmutableList
-                .of(column("time_id", "int", "time id"), column("product_id", "int", "product id"),
-                        column("customer_id", "int", "customer id", "PII"),
-                        column("sales", "double", "product id", "Metric"));
+            .of(column("time_id", "int", "time id"), column("product_id", "int", "product id"),
+                column("customer_id", "int", "customer id", "PII"),
+                column("sales", "double", "product id", "Metric"));
 
         Id salesFact = table("sales_fact", "sales fact table", salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact");
 
         List<Referenceable> timeDimColumns = ImmutableList
-                .of(column("time_id", "int", "time id"), column("dayOfYear", "int", "day Of Year"),
-                        column("weekDay", "int", "week Day"));
+            .of(column("time_id", "int", "time id"), column("dayOfYear", "int", "day Of Year"),
+                column("weekDay", "int", "week Day"));
 
         Id timeDim = table("time_dim", "time dimension table", salesDB, sd, "John Doe", "External", timeDimColumns,
-                "Dimension");
+            "Dimension");
 
         Id reportingDB =
-                database("Reporting", "reporting database", "Jane BI", "hdfs://host:8000/apps/warehouse/reporting");
+            database("Reporting", "reporting database", "Jane BI", "hdfs://host:8000/apps/warehouse/reporting");
 
         Id salesFactDaily =
-                table("sales_fact_daily_mv", "sales fact daily materialized view", reportingDB, sd, "Joe BI", "Managed",
-                        salesFactColumns, "Metric");
+            table("sales_fact_daily_mv", "sales fact daily materialized view", reportingDB, sd, "Joe BI", "Managed",
+                salesFactColumns, "Metric");
 
         loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL", ImmutableList.of(salesFact, timeDim),
-                ImmutableList.of(salesFactDaily), "create table as select ", "plan", "id", "graph", "ETL");
+            ImmutableList.of(salesFactDaily), "create table as select ", "plan", "id", "graph", "ETL");
 
         List<Referenceable> productDimColumns = ImmutableList
-                .of(column("product_id", "int", "product id"), column("product_name", "string", "product name"),
-                        column("brand_name", "int", "brand name"));
+            .of(column("product_id", "int", "product id"), column("product_name", "string", "product name"),
+                column("brand_name", "int", "brand name"));
 
         Id productDim =
-                table("product_dim", "product dimension table", salesDB, sd, "John Doe", "Managed", productDimColumns,
-                        "Dimension");
+            table("product_dim", "product dimension table", salesDB, sd, "John Doe", "Managed", productDimColumns,
+                "Dimension");
 
         view("product_dim_view", reportingDB, ImmutableList.of(productDim), "Dimension", "JdbcAccess");
 
         List<Referenceable> customerDimColumns = ImmutableList.of(column("customer_id", "int", "customer id", "PII"),
-                column("name", "string", "customer name", "PII"),
-                column("address", "string", "customer address", "PII"));
+            column("name", "string", "customer name", "PII"),
+            column("address", "string", "customer address", "PII"));
 
         Id customerDim =
-                table("customer_dim", "customer dimension table", salesDB, sd, "fetl", "External", customerDimColumns,
-                        "Dimension");
+            table("customer_dim", "customer dimension table", salesDB, sd, "fetl", "External", customerDimColumns,
+                "Dimension");
 
         view("customer_dim_view", reportingDB, ImmutableList.of(customerDim), "Dimension", "JdbcAccess");
 
         Id salesFactMonthly =
-                table("sales_fact_monthly_mv", "sales fact monthly materialized view", reportingDB, sd, "Jane BI",
-                        "Managed", salesFactColumns, "Metric");
+            table("sales_fact_monthly_mv", "sales fact monthly materialized view", reportingDB, sd, "Jane BI",
+                "Managed", salesFactColumns, "Metric");
 
         loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL", ImmutableList.of(salesFactDaily),
-                ImmutableList.of(salesFactMonthly), "create table as select ", "plan", "id", "graph", "ETL");
+            ImmutableList.of(salesFactMonthly), "create table as select ", "plan", "id", "graph", "ETL");
     }
 
     Id database(String name, String description, String owner, String locationUri, String... traitNames)
-    throws Exception {
+        throws Exception {
         Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames);
         referenceable.set("name", name);
         referenceable.set("description", description);
@@ -439,7 +458,7 @@ public class HiveLineageServiceTest {
     }
 
     Referenceable storageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed)
-    throws Exception {
+        throws Exception {
         Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE);
         referenceable.set("location", location);
         referenceable.set("inputFormat", inputFormat);
@@ -459,7 +478,7 @@ public class HiveLineageServiceTest {
     }
 
     Id table(String name, String description, Id dbId, Referenceable sd, String owner, String tableType,
-            List<Referenceable> columns, String... traitNames) throws Exception {
+        List<Referenceable> columns, String... traitNames) throws Exception {
         Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, traitNames);
         referenceable.set("name", name);
         referenceable.set("description", description);
@@ -479,8 +498,8 @@ public class HiveLineageServiceTest {
     }
 
     Id loadProcess(String name, String description, String user, List<Id> inputTables, List<Id> outputTables,
-            String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames)
-    throws Exception {
+        String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames)
+        throws Exception {
         Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
         referenceable.set("name", name);
         referenceable.set("description", description);

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryTest.java
index fb697d0..487ac84 100755
--- a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryTest.java
+++ b/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryTest.java
@@ -20,9 +20,11 @@ package org.apache.atlas.repository.graph;
 
 import com.google.common.collect.ImmutableList;
 import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.util.TitanCleanup;
 import com.tinkerpop.blueprints.Compare;
 import com.tinkerpop.blueprints.GraphQuery;
 import com.tinkerpop.blueprints.Vertex;
+import org.apache.atlas.GraphTransaction;
 import org.apache.atlas.RepositoryMetadataModule;
 import org.apache.atlas.TestUtils;
 import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
@@ -51,6 +53,7 @@ import org.apache.commons.lang.RandomStringUtils;
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONObject;
 import org.testng.Assert;
+import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
@@ -105,12 +108,23 @@ public class GraphBackedMetadataRepositoryTest {
         createHiveTypes();
     }
 
-/*
-    @AfterMethod
+
+    @AfterClass
     public void tearDown() throws Exception {
-         TestUtils.dumpGraph(graphProvider.get());
+        TypeSystem.getInstance().reset();
+        try {
+            //TODO - Fix failure during shutdown while using BDB
+            graphProvider.get().shutdown();
+        } catch(Exception e) {
+            e.printStackTrace();
+        }
+        try {
+            TitanCleanup.clear(graphProvider.get());
+        } catch(Exception e) {
+            e.printStackTrace();
+        }
     }
-*/
+
 
     @Test
     public void testSubmitEntity() throws Exception {
@@ -189,7 +203,8 @@ public class GraphBackedMetadataRepositoryTest {
         System.out.println("*** table = " + table);
     }
 
-    private String getGUID() {
+    @GraphTransaction
+    String getGUID() {
         Vertex tableVertex = getTableEntityVertex();
 
         String guid = tableVertex.getProperty(Constants.GUID_PROPERTY_KEY);
@@ -199,7 +214,8 @@ public class GraphBackedMetadataRepositoryTest {
         return guid;
     }
 
-    private Vertex getTableEntityVertex() {
+    @GraphTransaction
+    Vertex getTableEntityVertex() {
         TitanGraph graph = graphProvider.get();
         GraphQuery query = graph.query().has(Constants.ENTITY_TYPE_PROPERTY_KEY, Compare.EQUAL, TABLE_TYPE);
         Iterator<Vertex> results = query.vertices().iterator();

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/repository/src/test/java/org/apache/atlas/repository/typestore/GraphBackedTypeStoreTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/typestore/GraphBackedTypeStoreTest.java b/repository/src/test/java/org/apache/atlas/repository/typestore/GraphBackedTypeStoreTest.java
index c774123..6a8129f 100755
--- a/repository/src/test/java/org/apache/atlas/repository/typestore/GraphBackedTypeStoreTest.java
+++ b/repository/src/test/java/org/apache/atlas/repository/typestore/GraphBackedTypeStoreTest.java
@@ -19,6 +19,7 @@
 package org.apache.atlas.repository.typestore;
 
 import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.util.TitanCleanup;
 import com.tinkerpop.blueprints.Direction;
 import com.tinkerpop.blueprints.Edge;
 import com.tinkerpop.blueprints.Vertex;
@@ -39,6 +40,7 @@ import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
 import org.apache.atlas.typesystem.types.StructTypeDefinition;
 import org.apache.atlas.typesystem.types.TraitType;
 import org.apache.atlas.typesystem.types.TypeSystem;
+import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
@@ -63,6 +65,17 @@ public class GraphBackedTypeStoreTest {
         TestUtils.defineDeptEmployeeTypes(ts);
     }
 
+    @AfterClass
+    public void tearDown() throws Exception {
+        ts.reset();
+        graphProvider.get().shutdown();
+        try {
+            TitanCleanup.clear(graphProvider.get());
+        } catch(Exception e) {
+            e.printStackTrace();
+        }
+    }
+
     @Test
     @GraphTransaction
     public void testStore() throws AtlasException {

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/repository/src/test/resources/application.properties
----------------------------------------------------------------------
diff --git a/repository/src/test/resources/application.properties b/repository/src/test/resources/application.properties
index 20c0c51..d0eaa8c 100755
--- a/repository/src/test/resources/application.properties
+++ b/repository/src/test/resources/application.properties
@@ -19,15 +19,35 @@
 #########  Graph Database Configs  #########
 #Refer http://s3.thinkaurelius.com/docs/titan/0.5.1/titan-config-ref.html
 # Graph Storage
-atlas.graph.storage.backend=inmemory
 
+atlas.graph.storage.backend=${titan.storage.backend}
 
-# Graph Search Index
-atlas.graph.index.search.backend=elasticsearch
+#Berkeley storage directory
+atlas.graph.storage.directory=target/data/berkley
+
+#hbase
+#For standalone mode , specify localhost
+#for distributed mode, specify zookeeper quorum here - For more information refer http://s3.thinkaurelius.com/docs/titan/current/hbase.html#_remote_server_mode_2
+atlas.graph.storage.hostname=${titan.storage.hostname}
+
+# Graph Search Index Backend
+atlas.graph.index.search.backend=${titan.index.backend}
+
+#lucene
+#atlas.graph.index.search.directory=target/data/lucene
+
+#elasticsearch
 atlas.graph.index.search.directory=./target/data/es
 atlas.graph.index.search.elasticsearch.client-only=false
 atlas.graph.index.search.elasticsearch.local-mode=true
+atlas.graph.index.search.elasticsearch.create.sleep=2000
+
+#solr in cloud mode
+atlas.graph.index.search.solr.mode=cloud
+atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address}
 
+#solr in http mode
+atlas.graph.index.search.solr.http-urls=http://localhost:8983/solr
 
 #########  Hive Lineage Configs  #########
 #atlas.lineage.hive.table.type.name=DataSet

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/src/conf/application.properties
----------------------------------------------------------------------
diff --git a/src/conf/application.properties b/src/conf/application.properties
index c6b16cc..5c8c59a 100755
--- a/src/conf/application.properties
+++ b/src/conf/application.properties
@@ -21,6 +21,23 @@
 atlas.graph.storage.backend=berkeleyje
 atlas.graph.storage.directory=data/berkley
 
+#Hbase as stoarge backend
+#hbase
+#For standalone mode , specify localhost
+#for distributed mode, specify zookeeper quorum here - For more information refer http://s3.thinkaurelius.com/docs/titan/current/hbase.html#_remote_server_mode_2
+#atlas.graph.storage.hostname=localhost
+
+#Solr
+#atlas.graph.index.search.backend=solr
+
+# Solr cloud mode properties
+#atlas.graph.index.search.solr.mode=cloud
+#atlas.graph.index.search.solr.zookeeper-url=localhost:2181
+
+#Solr http mode properties
+#atlas.graph.index.search.solr.mode=http
+#atlas.graph.index.search.solr.http-urls=http://localhost:8983/solr
+
 # Graph Search Index
 atlas.graph.index.search.backend=elasticsearch
 atlas.graph.index.search.directory=data/es

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/webapp/pom.xml
----------------------------------------------------------------------
diff --git a/webapp/pom.xml b/webapp/pom.xml
index 0ae4a05..86e1ebf 100755
--- a/webapp/pom.xml
+++ b/webapp/pom.xml
@@ -116,13 +116,13 @@
         </dependency>
 
         <dependency>
-            <groupId>org.mortbay.jetty</groupId>
-            <artifactId>jetty</artifactId>
+            <groupId>org.eclipse.jetty</groupId>
+            <artifactId>jetty-server</artifactId>
         </dependency>
 
         <dependency>
-            <groupId>org.mortbay.jetty</groupId>
-            <artifactId>jetty-plus</artifactId>
+            <groupId>org.eclipse.jetty</groupId>
+            <artifactId>jetty-webapp</artifactId>
         </dependency>
 
         <dependency>
@@ -171,8 +171,13 @@
         </dependency>
 
         <dependency>
-            <groupId>org.mortbay.jetty</groupId>
-            <artifactId>jsp-2.1</artifactId>
+            <groupId>org.eclipse.jetty</groupId>
+            <artifactId>jetty-jsp</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.codehaus.jackson</groupId>
+            <artifactId>jackson-core-asl</artifactId>
         </dependency>
 
         <dependency>
@@ -192,36 +197,36 @@
                     <workingDirectory>../dashboard/v2/</workingDirectory>
                 </configuration>
                 <executions>
-                  <execution>
-                    <id>install node and npm</id>
-                    <goals>
-                      <goal>install-node-and-npm</goal>
-                    </goals>
-                    <configuration>
-                      <nodeVersion>v0.10.30</nodeVersion>
-                      <npmVersion>1.4.3</npmVersion>
-                    </configuration>
-                  </execution>
-
-                  <execution>
-                    <id>npm install</id>
-                    <goals>
-                      <goal>npm</goal>
-                    </goals>
-                    <configuration>
-                      <arguments>install</arguments>
-                    </configuration>
-                  </execution>
-
-                  <execution>
-                    <id>grunt dist</id>
-                    <goals>
-                      <goal>grunt</goal>
-                    </goals>
-                    <configuration>
-                      <arguments>build</arguments>
-                    </configuration>
-                  </execution>
+                    <execution>
+                        <id>install node and npm</id>
+                        <goals>
+                            <goal>install-node-and-npm</goal>
+                        </goals>
+                        <configuration>
+                            <nodeVersion>v0.10.30</nodeVersion>
+                            <npmVersion>1.4.3</npmVersion>
+                        </configuration>
+                    </execution>
+
+                    <execution>
+                        <id>npm install</id>
+                        <goals>
+                            <goal>npm</goal>
+                        </goals>
+                        <configuration>
+                            <arguments>install</arguments>
+                        </configuration>
+                    </execution>
+
+                    <execution>
+                        <id>grunt dist</id>
+                        <goals>
+                            <goal>grunt</goal>
+                        </goals>
+                        <configuration>
+                            <arguments>build</arguments>
+                        </configuration>
+                    </execution>
                 </executions>
             </plugin>
             <plugin>
@@ -245,7 +250,17 @@
                     </execution>
                 </executions>
             </plugin>
-
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-jar-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>test-jar</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-war-plugin</artifactId>
@@ -260,7 +275,16 @@
                             <directory>src/main/webapp/WEB-INF</directory>
                             <targetPath>WEB-INF</targetPath>
                         </resource>
+                        <resource>
+                            <directory>${project.build.directory}/test-classes</directory>
+                            <targetPath>WEB-INF/classes</targetPath>
+                        </resource>
                     </webResources>
+                    <archive>
+                        <manifest>
+                            <addClasspath>true</addClasspath>
+                        </manifest>
+                    </archive>
                 </configuration>
             </plugin>
 
@@ -312,30 +336,24 @@
             </plugin>
 
             <plugin>
-                <groupId>org.mortbay.jetty</groupId>
-                <artifactId>maven-jetty-plugin</artifactId>
-                <version>${jetty.version}</version>
+                <groupId>org.eclipse.jetty</groupId>
+                <artifactId>jetty-maven-plugin</artifactId>
                 <configuration>
                     <skip>${skipTests}</skip>
                     <!--only skip int tests -->
-                    <connectors>
-                        <!--
-                        <connector implementation="org.mortbay.jetty.security.SslSocketConnector">
-                            <port>21443</port>
-                            <maxIdleTime>60000</maxIdleTime>
-                            <keystore>${project.build.directory}/../../webapp/target/atlas.keystore</keystore>
-                            <keyPassword>atlas-passwd</keyPassword>
-                            <password>atlas-passwd</password>
-                        </connector>
-                        -->
-                        <connector implementation="org.mortbay.jetty.nio.SelectChannelConnector">
-                            <port>21000</port>
-                            <maxIdleTime>60000</maxIdleTime>
-                        </connector>
-                    </connectors>
-                    <webApp>${project.build.directory}/atlas-webapp-${project.version}</webApp>
-                    <contextPath>/</contextPath>
-                    <useTestClasspath>true</useTestClasspath>
+                    <httpConnector>
+                        <port>21000</port>
+                        <idleTimeout>60000</idleTimeout>
+                    </httpConnector>
+                    <war>${project.build.directory}/atlas-webapp-${project.version}.war</war>
+                    <daemon>true</daemon>
+                    <!--<webAppSourceDirectory>webapp/src/test/webapp</webAppSourceDirectory>-->
+                    <webApp>
+                        <contextPath>/</contextPath>
+                        <descriptor>webapp/src/test/webapp/WEB-INF/web.xml</descriptor>
+                        <!-- ${project.build.directory}/atlas-webapp-${project.version} -->
+                    </webApp>
+                    <useTestScope>true</useTestScope>
                     <systemProperties>
                         <systemProperty>
                             <name>atlas.log.dir</name>
@@ -364,11 +382,8 @@
                         <id>start-jetty</id>
                         <phase>pre-integration-test</phase>
                         <goals>
-                            <goal>run</goal>
+                            <goal>deploy-war</goal>
                         </goals>
-                        <configuration>
-                            <daemon>true</daemon>
-                        </configuration>
                     </execution>
                     <execution>
                         <id>stop-jetty</id>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/webapp/src/main/java/org/apache/atlas/web/listeners/GuiceServletConfig.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/listeners/GuiceServletConfig.java b/webapp/src/main/java/org/apache/atlas/web/listeners/GuiceServletConfig.java
index 18ce17c..b643f13 100755
--- a/webapp/src/main/java/org/apache/atlas/web/listeners/GuiceServletConfig.java
+++ b/webapp/src/main/java/org/apache/atlas/web/listeners/GuiceServletConfig.java
@@ -20,14 +20,20 @@ package org.apache.atlas.web.listeners;
 
 import com.google.inject.Guice;
 import com.google.inject.Injector;
+import com.google.inject.Key;
+import com.google.inject.Provider;
+import com.google.inject.TypeLiteral;
 import com.google.inject.servlet.GuiceServletContextListener;
 import com.sun.jersey.api.core.PackagesResourceConfig;
 import com.sun.jersey.guice.JerseyServletModule;
 import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.tinkerpop.blueprints.Graph;
 import org.apache.atlas.AtlasClient;
 import org.apache.atlas.AtlasException;
 import org.apache.atlas.PropertiesUtil;
 import org.apache.atlas.RepositoryMetadataModule;
+import org.apache.atlas.repository.graph.GraphProvider;
 import org.apache.atlas.web.filters.AtlasAuthenticationFilter;
 import org.apache.atlas.web.filters.AuditFilter;
 import org.apache.commons.configuration.ConfigurationException;
@@ -35,6 +41,7 @@ import org.apache.commons.configuration.PropertiesConfiguration;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import javax.inject.Inject;
 import javax.servlet.ServletContextEvent;
 import java.util.HashMap;
 import java.util.Map;
@@ -45,7 +52,7 @@ public class GuiceServletConfig extends GuiceServletContextListener {
 
     private static final String GUICE_CTX_PARAM = "guice.packages";
     static final String HTTP_AUTHENTICATION_ENABLED = "atlas.http.authentication.enabled";
-    private Injector injector;
+    protected Injector injector;
 
     @Override
     protected Injector getInjector() {
@@ -106,5 +113,11 @@ public class GuiceServletConfig extends GuiceServletContextListener {
     @Override
     public void contextDestroyed(ServletContextEvent servletContextEvent) {
         super.contextDestroyed(servletContextEvent);
+        if(injector != null) {
+            TypeLiteral<GraphProvider<TitanGraph>> graphProviderType = new TypeLiteral<GraphProvider<TitanGraph>>() {};
+            Provider<GraphProvider<TitanGraph>> graphProvider = injector.getProvider(Key.get(graphProviderType));
+            final Graph graph = graphProvider.get().get();
+            graph.shutdown();
+        }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/266d7cc0/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java b/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java
index 88200f0..16591d8 100755
--- a/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java
+++ b/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java
@@ -20,10 +20,13 @@ package org.apache.atlas.web.service;
 
 import org.apache.commons.configuration.ConfigurationException;
 import org.apache.commons.configuration.PropertiesConfiguration;
-import org.mortbay.jetty.Connector;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.bio.SocketConnector;
-import org.mortbay.jetty.webapp.WebAppContext;
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.webapp.WebAppClassLoader;
+import org.eclipse.jetty.webapp.WebAppContext;
 
 import java.io.IOException;
 
@@ -40,6 +43,7 @@ public class EmbeddedServer {
         server.addConnector(connector);
 
         WebAppContext application = new WebAppContext(path, "/");
+        application.setClassLoader(Thread.currentThread().getContextClassLoader());
         server.setHandler(application);
     }
 
@@ -52,19 +56,21 @@ public class EmbeddedServer {
     }
 
     protected Connector getConnector(int port) throws IOException {
-        Connector connector = new SocketConnector();
-        connector.setPort(port);
-        connector.setHost("0.0.0.0");
 
+        HttpConfiguration http_config = new HttpConfiguration();
         // this is to enable large header sizes when Kerberos is enabled with AD
-        final Integer bufferSize = getBufferSize();
-        connector.setHeaderBufferSize(bufferSize);
-        connector.setRequestBufferSize(bufferSize);
+        final int bufferSize = getBufferSize();
+        http_config.setResponseHeaderSize(bufferSize);
+        http_config.setRequestHeaderSize(bufferSize);
 
+        ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(http_config));
+        connector.setPort(port);
+        connector.setHost("0.0.0.0");
+        server.addConnector(connector);
         return connector;
     }
 
-    private Integer getBufferSize() {
+    protected Integer getBufferSize() {
         try {
             PropertiesConfiguration configuration = new PropertiesConfiguration("application.properties");
             return configuration.getInt("atlas.jetty.request.buffer.size", DEFAULT_BUFFER_SIZE);