You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@atlas.apache.org by ap...@apache.org on 2017/05/23 22:07:05 UTC

[07/12] incubator-atlas git commit: ATLAS-1198: Spring Framework (v4 with Spring security) over Guice

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/main/java/org/apache/atlas/web/service/ServiceState.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/service/ServiceState.java b/webapp/src/main/java/org/apache/atlas/web/service/ServiceState.java
index 2d9e00a..3fe8d18 100644
--- a/webapp/src/main/java/org/apache/atlas/web/service/ServiceState.java
+++ b/webapp/src/main/java/org/apache/atlas/web/service/ServiceState.java
@@ -19,13 +19,15 @@
 package org.apache.atlas.web.service;
 
 import com.google.common.base.Preconditions;
-import com.google.inject.Singleton;
 import org.apache.atlas.ApplicationProperties;
 import org.apache.atlas.AtlasException;
 import org.apache.atlas.ha.HAConfiguration;
 import org.apache.commons.configuration.Configuration;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import javax.inject.Singleton;
 
 /**
  * A class that maintains the state of this instance.
@@ -34,6 +36,7 @@ import org.slf4j.LoggerFactory;
  * directed by {@link ActiveInstanceElectorService}.
  */
 @Singleton
+@Component
 public class ServiceState {
 
     private static final Logger LOG = LoggerFactory.getLogger(ServiceState.class);

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/main/java/org/apache/atlas/web/service/UserService.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/service/UserService.java b/webapp/src/main/java/org/apache/atlas/web/service/UserService.java
index 6e5c210..24fd7cf 100644
--- a/webapp/src/main/java/org/apache/atlas/web/service/UserService.java
+++ b/webapp/src/main/java/org/apache/atlas/web/service/UserService.java
@@ -17,18 +17,23 @@
 
 package org.apache.atlas.web.service;
 
-import org.springframework.beans.factory.annotation.Autowired;
+import org.apache.atlas.web.dao.UserDao;
+import org.apache.atlas.web.model.User;
 import org.springframework.security.core.userdetails.UserDetailsService;
 import org.springframework.security.core.userdetails.UsernameNotFoundException;
 import org.springframework.stereotype.Service;
-import org.apache.atlas.web.dao.UserDao;
-import org.apache.atlas.web.model.User;
+
+import javax.inject.Inject;
 
 @Service
 public class UserService implements UserDetailsService {
 
-    @Autowired
-    private UserDao userDao;
+    private final UserDao userDao;
+
+    @Inject
+    public UserService(UserDao userDao) {
+        this.userDao = userDao;
+    }
 
     @Override
     public User loadUserByUsername(final String username)

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/main/java/org/apache/atlas/web/setup/AtlasSetup.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/setup/AtlasSetup.java b/webapp/src/main/java/org/apache/atlas/web/setup/AtlasSetup.java
deleted file mode 100644
index 41eccd1..0000000
--- a/webapp/src/main/java/org/apache/atlas/web/setup/AtlasSetup.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.web.setup;
-
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import org.apache.atlas.ApplicationProperties;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.setup.SetupException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * An application that is used to setup dependencies for the Atlas web service.
- *
- * This should be executed immediately after installation with the same configuration
- * as the Atlas web service itself. The application runs all steps registered with {@link SetupSteps}.
- */
-public class AtlasSetup {
-
-    private static final Logger LOG = LoggerFactory.getLogger(AtlasSetup.class);
-
-    private final Injector injector;
-
-    public AtlasSetup() {
-        injector = Guice.createInjector(new AtlasSetupModule());
-        LOG.info("Got injector: {}", injector);
-    }
-
-    public static void main(String[] args) {
-        try {
-            AtlasSetup atlasSetup = new AtlasSetup();
-            atlasSetup.run();
-            LOG.info("Finished running all setup steps.");
-        } catch (SetupException e) {
-            LOG.error("Could not run setup step.", e);
-        }
-    }
-
-    public void run() throws SetupException {
-        SetupSteps setupSteps = injector.getInstance(SetupSteps.class);
-        LOG.info("Got setup steps.");
-        try {
-            setupSteps.runSetup(ApplicationProperties.get());
-        } catch (AtlasException e) {
-            throw new SetupException("Cannot get application properties.", e);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/main/java/org/apache/atlas/web/setup/AtlasSetupModule.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/setup/AtlasSetupModule.java b/webapp/src/main/java/org/apache/atlas/web/setup/AtlasSetupModule.java
deleted file mode 100644
index 6edfb12..0000000
--- a/webapp/src/main/java/org/apache/atlas/web/setup/AtlasSetupModule.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.web.setup;
-
-import com.google.inject.AbstractModule;
-import com.google.inject.multibindings.Multibinder;
-import org.apache.atlas.repository.graph.GraphSchemaInitializer;
-import org.apache.atlas.setup.SetupStep;
-
-public class AtlasSetupModule extends AbstractModule {
-    @Override
-    protected void configure() {
-        Multibinder<SetupStep> setupStepMultibinder = Multibinder.newSetBinder(binder(), SetupStep.class);
-        setupStepMultibinder.addBinding().to(GraphSchemaInitializer.class);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/main/java/org/apache/atlas/web/setup/SetupSteps.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/setup/SetupSteps.java b/webapp/src/main/java/org/apache/atlas/web/setup/SetupSteps.java
index eadd5ce..cfb49b6 100644
--- a/webapp/src/main/java/org/apache/atlas/web/setup/SetupSteps.java
+++ b/webapp/src/main/java/org/apache/atlas/web/setup/SetupSteps.java
@@ -19,8 +19,7 @@
 package org.apache.atlas.web.setup;
 
 import com.google.common.base.Charsets;
-import com.google.inject.Inject;
-import com.google.inject.Singleton;
+import org.apache.atlas.ApplicationProperties;
 import org.apache.atlas.AtlasConstants;
 import org.apache.atlas.AtlasException;
 import org.apache.atlas.ha.AtlasServerIdSelector;
@@ -37,32 +36,44 @@ import org.apache.zookeeper.data.ACL;
 import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.springframework.context.annotation.Condition;
+import org.springframework.context.annotation.ConditionContext;
+import org.springframework.context.annotation.Conditional;
+import org.springframework.core.type.AnnotatedTypeMetadata;
+import org.springframework.stereotype.Component;
 
+import javax.annotation.PostConstruct;
+import javax.inject.Inject;
+import javax.inject.Singleton;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Set;
 
 @Singleton
+@Component
+@Conditional(SetupSteps.SetupRequired.class)
 public class SetupSteps {
     private static final Logger LOG = LoggerFactory.getLogger(SetupSteps.class);
     public static final String SETUP_IN_PROGRESS_NODE = "/setup_in_progress";
 
     private final Set<SetupStep> setupSteps;
+    private final Configuration configuration;
     private CuratorFactory curatorFactory;
 
     @Inject
-    public SetupSteps(Set<SetupStep> steps, CuratorFactory curatorFactory) {
+    public SetupSteps(Set<SetupStep> steps, CuratorFactory curatorFactory, Configuration configuration) {
         setupSteps = steps;
         this.curatorFactory = curatorFactory;
+        this.configuration = configuration;
     }
 
     /**
      * Call each registered {@link SetupStep} one after the other.
      * @throws SetupException Thrown with any error during running setup, including Zookeeper interactions, and
      *                          individual failures in the {@link SetupStep}.
-     * @param configuration Configuration for Atlas server.
      */
-    public void runSetup(Configuration configuration) throws SetupException {
+    @PostConstruct
+    public void runSetup() throws SetupException {
         HAConfiguration.ZookeeperProperties zookeeperProperties = HAConfiguration.getZookeeperProperties(configuration);
         InterProcessMutex lock = curatorFactory.lockInstance(zookeeperProperties.getZkRoot());
         try {
@@ -162,4 +173,25 @@ public class SetupSteps {
             throw new SetupException("Could not create lock node before running setup.", e);
         }
     }
+
+    static class SetupRequired implements Condition {
+        private static final String ATLAS_SERVER_RUN_SETUP_KEY = "atlas.server.run.setup.on.start";
+
+        @Override
+        public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) {
+            try {
+                Configuration configuration = ApplicationProperties.get();
+                boolean shouldRunSetup = configuration.getBoolean(ATLAS_SERVER_RUN_SETUP_KEY, false);
+                if (shouldRunSetup) {
+                    LOG.warn("Running setup per configuration {}.", ATLAS_SERVER_RUN_SETUP_KEY);
+                    return true;
+                } else {
+                    LOG.info("Not running setup per configuration {}.", ATLAS_SERVER_RUN_SETUP_KEY);
+                }
+            } catch (AtlasException e) {
+                LOG.error("Unable to read config to determine if setup is needed. Not running setup.");
+            }
+            return false;
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/main/resources/spring-security.xml
----------------------------------------------------------------------
diff --git a/webapp/src/main/resources/spring-security.xml b/webapp/src/main/resources/spring-security.xml
index 208c325..1743218 100644
--- a/webapp/src/main/resources/spring-security.xml
+++ b/webapp/src/main/resources/spring-security.xml
@@ -1,30 +1,23 @@
 <?xml version="1.0" encoding="UTF-8"?>
-<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor 
-    license agreements. See the NOTICE file distributed with this work for additional 
-    information regarding copyright ownership. The ASF licenses this file to 
-    You under the Apache License, Version 2.0 (the "License"); you may not use 
-    this file except in compliance with the License. You may obtain a copy of 
-    the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required 
-    by applicable law or agreed to in writing, software distributed under the 
-    License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS 
-    OF ANY KIND, either express or implied. See the License for the specific 
+<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor
+    license agreements. See the NOTICE file distributed with this work for additional
+    information regarding copyright ownership. The ASF licenses this file to
+    You under the Apache License, Version 2.0 (the "License"); you may not use
+    this file except in compliance with the License. You may obtain a copy of
+    the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
+    by applicable law or agreed to in writing, software distributed under the
+    License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+    OF ANY KIND, either express or implied. See the License for the specific
     language governing permissions and limitations under the License. -->
 
 <beans:beans xmlns="http://www.springframework.org/schema/security"
              xmlns:beans="http://www.springframework.org/schema/beans"
-             xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
              xmlns:security="http://www.springframework.org/schema/security"
-             xmlns:context="http://www.springframework.org/schema/context"
-             xsi:schemaLocation="http://www.springframework.org/schema/beans
-    http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
-    http://www.springframework.org/schema/security
-    http://www.springframework.org/schema/security/spring-security-3.1.xsd
-
-
-
+             xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+             xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
+						http://www.springframework.org/schema/security http://www.springframework.org/schema/security/spring-security.xsd">
 
-    http://www.springframework.org/schema/context 
-    http://www.springframework.org/schema/context/spring-context-3.1.xsd">
+    <!-- This XML is no longer being used, @see AtlasSecurityConfig for the equivalent java config -->
 
     <security:http pattern="/login.jsp" security="none" />
     <security:http pattern="/css/**" security="none" />
@@ -35,8 +28,7 @@
     <security:http pattern="/api/atlas/admin/status" security="none" />
     <security:http pattern="/api/atlas/admin/metrics" security="none" />
 
-    <security:http disable-url-rewriting="true"
-                   use-expressions="true" create-session="always"
+    <security:http create-session="always"
                    entry-point-ref="entryPoint">
         <security:session-management
                 session-fixation-protection="newSession" />
@@ -48,6 +40,7 @@
 
         <form-login
                 login-page="/login.jsp"
+                login-processing-url="/j_spring_security_check"
                 authentication-success-handler-ref="atlasAuthenticationSuccessHandler"
                 authentication-failure-handler-ref="atlasAuthenticationFailureHandler"
                 username-parameter="j_username"
@@ -56,6 +49,8 @@
         <security:logout logout-success-url="/login.jsp" delete-cookies="ATLASSESSIONID"
                          logout-url="/logout.html" />
         <http-basic />
+        <headers disabled="true"/>
+        <csrf disabled="true"/>
         <security:custom-filter position="LAST" ref="atlasAuthorizationFilter"/>
     </security:http>
 
@@ -76,7 +71,7 @@
 
     <beans:bean id="formAuthenticationEntryPoint"
                 class="org.apache.atlas.web.filters.AtlasAuthenticationEntryPoint">
-        <beans:property name="loginFormUrl" value="/login.jsp" />
+        <beans:constructor-arg value="/login.jsp"/>
     </beans:bean>
 
     <beans:bean id="authenticationEntryPoint"
@@ -108,7 +103,5 @@
     <security:global-method-security
             pre-post-annotations="enabled" />
 
-    <context:component-scan base-package="org.apache.atlas.web" />
-    
     <beans:bean id = "atlasAuthorizationFilter" class="org.apache.atlas.web.filters.AtlasAuthorizationFilter"/>
-</beans:beans>
+</beans:beans>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/main/webapp/WEB-INF/applicationContext.xml
----------------------------------------------------------------------
diff --git a/webapp/src/main/webapp/WEB-INF/applicationContext.xml b/webapp/src/main/webapp/WEB-INF/applicationContext.xml
index d4ad14e..aae2aa0 100644
--- a/webapp/src/main/webapp/WEB-INF/applicationContext.xml
+++ b/webapp/src/main/webapp/WEB-INF/applicationContext.xml
@@ -12,9 +12,12 @@
 
 <beans xmlns="http://www.springframework.org/schema/beans"
        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+       xmlns:context="http://www.springframework.org/schema/context"
+       xmlns:aop="http://www.springframework.org/schema/aop"
        xsi:schemaLocation="http://www.springframework.org/schema/beans
-	http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
-
-        <import resource="classpath:/spring-security.xml" />
+	http://www.springframework.org/schema/beans/spring-beans-3.1.xsd http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd">
 
+        <context:annotation-config/>
+        <aop:config proxy-target-class="true"/>
+        <context:component-scan base-package="org.apache.atlas" />
 </beans>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/main/webapp/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/webapp/src/main/webapp/WEB-INF/web.xml b/webapp/src/main/webapp/WEB-INF/web.xml
index f7e2028..d2d08f5 100755
--- a/webapp/src/main/webapp/WEB-INF/web.xml
+++ b/webapp/src/main/webapp/WEB-INF/web.xml
@@ -24,51 +24,46 @@
     <display-name>Apache Atlas</display-name>
     <description>Metadata Management and Data Governance Platform over Hadoop</description>
 
-    <context-param>
-        <param-name>guice.packages</param-name>
-        <param-value>
-            org.apache.atlas.web.resources,org.apache.atlas.web.params,org.apache.atlas.web.rest,org.apache.atlas.web.errors
-        </param-value>
-    </context-param>
-  
-    <context-param>
-        <param-name>com.sun.jersey.api.json.POJOMappingFeature</param-name>
-        <param-value>true</param-value>
-    </context-param>
+    <servlet>
+        <servlet-name>jersey-servlet</servlet-name>
+        <servlet-class>
+            com.sun.jersey.spi.spring.container.servlet.SpringServlet
+        </servlet-class>
+        <init-param>
+            <param-name>com.sun.jersey.api.json.POJOMappingFeature</param-name>
+            <param-value>true</param-value>
+        </init-param>
+        <load-on-startup>1</load-on-startup>
+    </servlet>
 
-    <!--
-        More information can be found here:
-
-        https://jersey.java.net/nonav/apidocs/1.11/contribs/jersey-guice/com/sun/jersey/guice/spi/container/servlet/package-summary.html
-     -->
+    <servlet-mapping>
+        <servlet-name>jersey-servlet</servlet-name>
+        <url-pattern>/api/atlas/*</url-pattern>
+    </servlet-mapping>
 
     <filter>
         <filter-name>springSecurityFilterChain</filter-name>
         <filter-class>org.springframework.web.filter.DelegatingFilterProxy</filter-class>
     </filter>
-    
-    <filter>
-        <filter-name>guiceFilter</filter-name>
-        <filter-class>com.google.inject.servlet.GuiceFilter</filter-class>
-    </filter>
-    
+
     <filter-mapping>
         <filter-name>springSecurityFilterChain</filter-name>
         <url-pattern>/*</url-pattern>
     </filter-mapping>
-    
+
+    <filter>
+        <filter-name>AuditFilter</filter-name>
+        <filter-class>org.apache.atlas.web.filters.AuditFilter</filter-class>
+    </filter>
+
     <filter-mapping>
-        <filter-name>guiceFilter</filter-name>
+        <filter-name>AuditFilter</filter-name>
         <url-pattern>/*</url-pattern>
     </filter-mapping>
 
     <listener>
         <listener-class>org.springframework.web.util.Log4jConfigListener</listener-class>
     </listener>
-    
-    <listener>
-        <listener-class>org.apache.atlas.web.listeners.GuiceServletConfig</listener-class>
-    </listener>
 
     <listener>
         <listener-class>org.springframework.web.context.request.RequestContextListener</listener-class>
@@ -88,5 +83,5 @@
         </cookie-config>
     </session-config>
 
-	
+
 </web-app>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/examples/QuickStartIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/examples/QuickStartIT.java b/webapp/src/test/java/org/apache/atlas/examples/QuickStartIT.java
index 06c78be..592c2a6 100644
--- a/webapp/src/test/java/org/apache/atlas/examples/QuickStartIT.java
+++ b/webapp/src/test/java/org/apache/atlas/examples/QuickStartIT.java
@@ -22,7 +22,7 @@ import org.apache.atlas.AtlasClient;
 import org.apache.atlas.AtlasServiceException;
 import org.apache.atlas.typesystem.Referenceable;
 import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.web.resources.BaseResourceIT;
+import org.apache.atlas.web.integration.BaseResourceIT;
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/examples/QuickStartV2IT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/examples/QuickStartV2IT.java b/webapp/src/test/java/org/apache/atlas/examples/QuickStartV2IT.java
index 7f3192a..773a514 100644
--- a/webapp/src/test/java/org/apache/atlas/examples/QuickStartV2IT.java
+++ b/webapp/src/test/java/org/apache/atlas/examples/QuickStartV2IT.java
@@ -26,9 +26,8 @@ import org.apache.atlas.model.instance.AtlasEntityHeader;
 import org.apache.atlas.model.lineage.AtlasLineageInfo;
 import org.apache.atlas.model.lineage.AtlasLineageInfo.LineageDirection;
 import org.apache.atlas.model.lineage.AtlasLineageInfo.LineageRelation;
-import org.apache.atlas.web.resources.BaseResourceIT;
+import org.apache.atlas.web.integration.BaseResourceIT;
 import org.codehaus.jettison.json.JSONException;
-import org.testng.Assert;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
 

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/notification/EntityNotificationIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/notification/EntityNotificationIT.java b/webapp/src/test/java/org/apache/atlas/notification/EntityNotificationIT.java
index 29be942..ac3b538 100644
--- a/webapp/src/test/java/org/apache/atlas/notification/EntityNotificationIT.java
+++ b/webapp/src/test/java/org/apache/atlas/notification/EntityNotificationIT.java
@@ -19,8 +19,8 @@
 package org.apache.atlas.notification;
 
 import com.google.common.collect.ImmutableSet;
-import com.google.inject.Inject;
 import org.apache.atlas.AtlasClient;
+import org.apache.atlas.kafka.NotificationProvider;
 import org.apache.atlas.notification.entity.EntityNotification;
 import org.apache.atlas.typesystem.IReferenceableInstance;
 import org.apache.atlas.typesystem.IStruct;
@@ -32,9 +32,8 @@ import org.apache.atlas.typesystem.persistence.Id;
 import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
 import org.apache.atlas.typesystem.types.TraitType;
 import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.apache.atlas.web.resources.BaseResourceIT;
+import org.apache.atlas.web.integration.BaseResourceIT;
 import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
 import java.util.Collections;
@@ -48,13 +47,11 @@ import static org.testng.Assert.assertTrue;
 /**
  * Entity Notification Integration Tests.
  */
-@Guice(modules = NotificationModule.class)
 public class EntityNotificationIT extends BaseResourceIT {
 
     private final String DATABASE_NAME = "db" + randomString();
     private final String TABLE_NAME = "table" + randomString();
-    @Inject
-    private NotificationInterface notificationInterface;
+    private NotificationInterface notificationInterface = NotificationProvider.get();
     private Id tableId;
     private Id dbId;
     private String traitName;

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerIT.java b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerIT.java
index 1c2cdc6..9c5597e 100644
--- a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerIT.java
+++ b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerIT.java
@@ -18,23 +18,21 @@
 
 package org.apache.atlas.notification;
 
-import com.google.inject.Inject;
 import org.apache.atlas.EntityAuditEvent;
+import org.apache.atlas.kafka.NotificationProvider;
 import org.apache.atlas.notification.hook.HookNotification;
 import org.apache.atlas.typesystem.Referenceable;
 import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.web.resources.BaseResourceIT;
+import org.apache.atlas.web.integration.BaseResourceIT;
 import org.codehaus.jettison.json.JSONArray;
 import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
 import java.util.List;
 
 import static org.testng.Assert.assertEquals;
 
-@Guice(modules = NotificationModule.class)
 public class NotificationHookConsumerIT extends BaseResourceIT {
 
     private static final String TEST_USER = "testuser";
@@ -43,8 +41,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
     public static final String QUALIFIED_NAME = "qualifiedName";
     public static final String CLUSTER_NAME = "clusterName";
 
-    @Inject
-    private NotificationInterface kafka;
+    private NotificationInterface notificationInterface = NotificationProvider.get();
 
     @BeforeClass
     public void setUp() throws Exception {
@@ -54,11 +51,11 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
 
     @AfterClass
     public void teardown() throws Exception {
-        kafka.close();
+        notificationInterface.close();
     }
 
     private void sendHookMessage(HookNotification.HookNotificationMessage message) throws NotificationException {
-        kafka.send(NotificationInterface.NotificationType.HOOK, message);
+        notificationInterface.send(NotificationInterface.NotificationType.HOOK, message);
     }
 
     @Test

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java
index e744e2e..18fd2ee 100644
--- a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java
+++ b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java
@@ -18,12 +18,12 @@
 
 package org.apache.atlas.notification;
 
-import com.google.inject.Inject;
 import org.apache.atlas.AtlasClient;
 import org.apache.atlas.AtlasException;
 import org.apache.atlas.AtlasServiceException;
 import org.apache.atlas.exception.AtlasBaseException;
 import org.apache.atlas.kafka.KafkaNotification;
+import org.apache.atlas.kafka.NotificationProvider;
 import org.apache.atlas.model.instance.AtlasEntity;
 import org.apache.atlas.notification.hook.HookNotification;
 import org.apache.atlas.repository.converters.AtlasInstanceConverter;
@@ -39,7 +39,6 @@ import org.mockito.MockitoAnnotations;
 import org.testng.Assert;
 import org.testng.annotations.AfterTest;
 import org.testng.annotations.BeforeTest;
-import org.testng.annotations.Guice;
 import org.testng.annotations.Test;
 
 import static org.mockito.Matchers.any;
@@ -47,14 +46,12 @@ import static org.mockito.Matchers.anyBoolean;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.*;
 
-@Guice(modules = NotificationModule.class)
 public class NotificationHookConsumerKafkaTest {
 
     public static final String NAME = "name";
     public static final String DESCRIPTION = "description";
     public static final String QUALIFIED_NAME = "qualifiedName";
-    @Inject
-    private NotificationInterface notificationInterface;
+    private NotificationInterface notificationInterface = NotificationProvider.get();
 
 
     @Mock

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntitiesREST.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntitiesREST.java b/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntitiesREST.java
index 9272203..e753881 100644
--- a/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntitiesREST.java
+++ b/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntitiesREST.java
@@ -20,7 +20,7 @@ package org.apache.atlas.web.adapters;
 import org.apache.atlas.AtlasClient;
 import org.apache.atlas.RequestContext;
 import org.apache.atlas.RequestContextV1;
-import org.apache.atlas.TestOnlyModule;
+import org.apache.atlas.TestModules;
 import org.apache.atlas.TestUtilsV2;
 import org.apache.atlas.model.instance.AtlasClassification;
 import org.apache.atlas.model.instance.AtlasEntity;
@@ -32,7 +32,6 @@ import org.apache.atlas.model.instance.ClassificationAssociateRequest;
 import org.apache.atlas.model.instance.EntityMutationResponse;
 import org.apache.atlas.model.instance.EntityMutations;
 import org.apache.atlas.model.typedef.AtlasTypesDef;
-import org.apache.atlas.repository.graph.AtlasGraphProvider;
 import org.apache.atlas.repository.store.bootstrap.AtlasTypeDefStoreInitializer;
 import org.apache.atlas.store.AtlasTypeDefStore;
 import org.apache.atlas.type.AtlasType;
@@ -42,7 +41,6 @@ import org.apache.atlas.web.rest.EntityREST;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.testng.Assert;
-import org.testng.annotations.AfterClass;
 import org.testng.annotations.AfterMethod;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Guice;
@@ -57,7 +55,7 @@ import java.util.List;
 import java.util.Map;
 
 
-@Guice(modules = {TestOnlyModule.class})
+@Guice(modules = {TestModules.TestOnlyModule.class})
 public class TestEntitiesREST {
 
     private static final Logger LOG = LoggerFactory.getLogger(TestEntitiesREST.class);
@@ -105,11 +103,6 @@ public class TestEntitiesREST {
         RequestContextV1.clear();
     }
 
-    @AfterClass
-    public void tearDown() throws Exception {
-        AtlasGraphProvider.cleanup();
-    }
-
     @Test
     public void testCreateOrUpdateEntities() throws Exception {
         AtlasEntitiesWithExtInfo entities = new AtlasEntitiesWithExtInfo();

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntityREST.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntityREST.java b/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntityREST.java
index cadf0ff..b90ea64 100644
--- a/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntityREST.java
+++ b/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntityREST.java
@@ -17,9 +17,9 @@
  */
 package org.apache.atlas.web.adapters;
 
+import org.apache.atlas.TestModules;
 import org.apache.atlas.RequestContext;
 import org.apache.atlas.RequestContextV1;
-import org.apache.atlas.TestOnlyModule;
 import org.apache.atlas.TestUtilsV2;
 import org.apache.atlas.model.instance.AtlasClassification;
 import org.apache.atlas.model.instance.AtlasClassification.AtlasClassifications;
@@ -30,7 +30,6 @@ import org.apache.atlas.model.instance.AtlasEntityHeader;
 import org.apache.atlas.model.instance.EntityMutationResponse;
 import org.apache.atlas.model.instance.EntityMutations;
 import org.apache.atlas.model.typedef.AtlasTypesDef;
-import org.apache.atlas.repository.graph.AtlasGraphProvider;
 import org.apache.atlas.store.AtlasTypeDefStore;
 import org.apache.atlas.type.AtlasTypeUtil;
 import org.apache.atlas.web.rest.EntityREST;
@@ -50,7 +49,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-@Guice(modules = {TestOnlyModule.class})
+@Guice(modules = {TestModules.TestOnlyModule.class})
 public class TestEntityREST {
 
     @Inject
@@ -73,7 +72,7 @@ public class TestEntityREST {
 
     @AfterClass
     public void tearDown() throws Exception {
-        AtlasGraphProvider.cleanup();
+//        AtlasGraphProvider.cleanup();
     }
 
     @AfterMethod

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/web/integration/AdminJerseyResourceIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/AdminJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/AdminJerseyResourceIT.java
new file mode 100755
index 0000000..cfe09d6
--- /dev/null
+++ b/webapp/src/test/java/org/apache/atlas/web/integration/AdminJerseyResourceIT.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.web.integration;
+
+import org.apache.atlas.AtlasClient;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.codehaus.jettison.json.JSONObject;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+/**
+ * Integration test for Admin jersey resource.
+ */
+public class AdminJerseyResourceIT extends BaseResourceIT {
+
+    @BeforeClass
+    public void setUp() throws Exception {
+        super.setUp();
+    }
+
+    @Test
+    public void testGetVersion() throws Exception {
+        JSONObject response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API.VERSION, null, (String[]) null);
+        Assert.assertNotNull(response);
+
+        PropertiesConfiguration buildConfiguration = new PropertiesConfiguration("atlas-buildinfo.properties");
+
+        Assert.assertEquals(response.get("Version"), buildConfiguration.getString("build.version"));
+        Assert.assertEquals(response.get("Name"), buildConfiguration.getString("project.name"));
+        Assert.assertEquals(response.get("Description"), buildConfiguration.getString("project.description"));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/web/integration/BaseResourceIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/BaseResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/BaseResourceIT.java
new file mode 100755
index 0000000..b59d3ee
--- /dev/null
+++ b/webapp/src/test/java/org/apache/atlas/web/integration/BaseResourceIT.java
@@ -0,0 +1,669 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.web.integration;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+import kafka.consumer.ConsumerTimeoutException;
+import org.apache.atlas.ApplicationProperties;
+import org.apache.atlas.AtlasClient;
+import org.apache.atlas.AtlasClientV2;
+import org.apache.atlas.AtlasServiceException;
+import org.apache.atlas.model.instance.AtlasClassification;
+import org.apache.atlas.model.instance.AtlasEntity;
+import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
+import org.apache.atlas.model.instance.AtlasEntityHeader;
+import org.apache.atlas.model.instance.AtlasStruct;
+import org.apache.atlas.model.instance.EntityMutationResponse;
+import org.apache.atlas.model.instance.EntityMutations;
+import org.apache.atlas.model.typedef.AtlasClassificationDef;
+import org.apache.atlas.model.typedef.AtlasEntityDef;
+import org.apache.atlas.model.typedef.AtlasEnumDef;
+import org.apache.atlas.model.typedef.AtlasStructDef;
+import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef;
+import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef.Cardinality;
+import org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef;
+import org.apache.atlas.model.typedef.AtlasTypesDef;
+import org.apache.atlas.notification.NotificationConsumer;
+import org.apache.atlas.notification.entity.EntityNotification;
+import org.apache.atlas.type.AtlasTypeUtil;
+import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.typesystem.Struct;
+import org.apache.atlas.typesystem.TypesDef;
+import org.apache.atlas.typesystem.json.TypesSerialization;
+import org.apache.atlas.typesystem.persistence.Id;
+import org.apache.atlas.typesystem.types.*;
+import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.apache.atlas.utils.AuthenticationUtil;
+import org.apache.atlas.utils.ParamChecker;
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.lang.RandomStringUtils;
+import org.codehaus.jettison.json.JSONArray;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef.CONSTRAINT_PARAM_ATTRIBUTE;
+import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef.CONSTRAINT_TYPE_INVERSE_REF;
+import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef.CONSTRAINT_TYPE_OWNED_REF;
+import static org.testng.Assert.assertNotNull;
+import static org.testng.Assert.assertTrue;
+
+/**
+ * Base class for integration tests.
+ * Sets up the web resource and has helper methods to created type and entity.
+ */
+public abstract class BaseResourceIT {
+
+    public static final String ATLAS_REST_ADDRESS = "atlas.rest.address";
+    public static final String NAME = "name";
+    public static final String QUALIFIED_NAME = "qualifiedName";
+    public static final String CLUSTER_NAME = "clusterName";
+    public static final String DESCRIPTION = "description";
+
+    // All service clients
+    protected AtlasClient atlasClientV1;
+    protected AtlasClientV2 atlasClientV2;
+
+    public static final Logger LOG = LoggerFactory.getLogger(BaseResourceIT.class);
+    protected static final int MAX_WAIT_TIME = 60000;
+    protected String[] atlasUrls;
+
+    @BeforeClass
+    public void setUp() throws Exception {
+
+        //set high timeouts so that tests do not fail due to read timeouts while you
+        //are stepping through the code in a debugger
+        ApplicationProperties.get().setProperty("atlas.client.readTimeoutMSecs", "100000000");
+        ApplicationProperties.get().setProperty("atlas.client.connectTimeoutMSecs", "100000000");
+
+
+        Configuration configuration = ApplicationProperties.get();
+        atlasUrls = configuration.getStringArray(ATLAS_REST_ADDRESS);
+
+        if (atlasUrls == null || atlasUrls.length == 0) {
+            atlasUrls = new String[] { "http://localhost:21000/" };
+        }
+
+        if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) {
+            atlasClientV1 = new AtlasClient(atlasUrls, new String[]{"admin", "admin"});
+            atlasClientV2 = new AtlasClientV2(atlasUrls, new String[]{"admin", "admin"});
+        } else {
+            atlasClientV1 = new AtlasClient(atlasUrls);
+            atlasClientV2 = new AtlasClientV2(atlasUrls);
+        }
+    }
+
+    protected void batchCreateTypes(AtlasTypesDef typesDef) throws AtlasServiceException {
+        AtlasTypesDef toCreate = new AtlasTypesDef();
+        for (AtlasEnumDef enumDef : typesDef.getEnumDefs()) {
+            if (atlasClientV2.typeWithNameExists(enumDef.getName())) {
+                LOG.warn("Type with name {} already exists. Skipping", enumDef.getName());
+            } else {
+                toCreate.getEnumDefs().add(enumDef);
+            }
+        }
+
+        for (AtlasStructDef structDef : typesDef.getStructDefs()) {
+            if (atlasClientV2.typeWithNameExists(structDef.getName())) {
+                LOG.warn("Type with name {} already exists. Skipping", structDef.getName());
+            } else {
+                toCreate.getStructDefs().add(structDef);
+            }
+        }
+
+        for (AtlasEntityDef entityDef : typesDef.getEntityDefs()) {
+            if (atlasClientV2.typeWithNameExists(entityDef.getName())) {
+                LOG.warn("Type with name {} already exists. Skipping", entityDef.getName());
+            } else  {
+                toCreate.getEntityDefs().add(entityDef);
+            }
+        }
+
+        for (AtlasClassificationDef classificationDef : typesDef.getClassificationDefs()) {
+            if (atlasClientV2.typeWithNameExists(classificationDef.getName())) {
+                LOG.warn("Type with name {} already exists. Skipping", classificationDef.getName());
+            } else  {
+                toCreate.getClassificationDefs().add(classificationDef);
+            }
+        }
+
+        atlasClientV2.createAtlasTypeDefs(toCreate);
+    }
+
+    protected void createType(AtlasTypesDef typesDef) throws AtlasServiceException {
+        // Since the bulk create bails out on a single failure, this has to be done as a workaround
+        batchCreateTypes(typesDef);
+    }
+
+    protected List<String> createType(TypesDef typesDef) throws Exception {
+        List<EnumTypeDefinition> enumTypes = new ArrayList<>();
+        List<StructTypeDefinition> structTypes = new ArrayList<>();
+        List<HierarchicalTypeDefinition<TraitType>> traitTypes = new ArrayList<>();
+        List<HierarchicalTypeDefinition<ClassType>> classTypes = new ArrayList<>();
+
+        for (EnumTypeDefinition enumTypeDefinition : typesDef.enumTypesAsJavaList()) {
+            if (atlasClientV2.typeWithNameExists(enumTypeDefinition.name)) {
+                LOG.warn("Type with name {} already exists. Skipping", enumTypeDefinition.name);
+            } else {
+                enumTypes.add(enumTypeDefinition);
+            }
+        }
+        for (StructTypeDefinition structTypeDefinition : typesDef.structTypesAsJavaList()) {
+            if (atlasClientV2.typeWithNameExists(structTypeDefinition.typeName)) {
+                LOG.warn("Type with name {} already exists. Skipping", structTypeDefinition.typeName);
+            } else {
+                structTypes.add(structTypeDefinition);
+            }
+        }
+        for (HierarchicalTypeDefinition<TraitType> hierarchicalTypeDefinition : typesDef.traitTypesAsJavaList()) {
+            if (atlasClientV2.typeWithNameExists(hierarchicalTypeDefinition.typeName)) {
+                LOG.warn("Type with name {} already exists. Skipping", hierarchicalTypeDefinition.typeName);
+            } else {
+                traitTypes.add(hierarchicalTypeDefinition);
+            }
+        }
+        for (HierarchicalTypeDefinition<ClassType> hierarchicalTypeDefinition : typesDef.classTypesAsJavaList()) {
+            if (atlasClientV2.typeWithNameExists(hierarchicalTypeDefinition.typeName)) {
+                LOG.warn("Type with name {} already exists. Skipping", hierarchicalTypeDefinition.typeName);
+            } else {
+                classTypes.add(hierarchicalTypeDefinition);
+            }
+        }
+
+        TypesDef toCreate = TypesUtil.getTypesDef(ImmutableList.copyOf(enumTypes),
+                ImmutableList.copyOf(structTypes),
+                ImmutableList.copyOf(traitTypes),
+                ImmutableList.copyOf(classTypes));
+        return atlasClientV1.createType(toCreate);
+    }
+
+    protected List<String> createType(String typesAsJSON) throws Exception {
+        return createType(TypesSerialization.fromJson(typesAsJSON));
+    }
+
+    protected Id createInstance(Referenceable referenceable) throws Exception {
+        String typeName = referenceable.getTypeName();
+        System.out.println("creating instance of type " + typeName);
+
+        List<String> guids = atlasClientV1.createEntity(referenceable);
+        System.out.println("created instance for type " + typeName + ", guid: " + guids);
+
+        // return the reference to created instance with guid
+        if (guids.size() > 0) {
+            return new Id(guids.get(guids.size() - 1), 0, referenceable.getTypeName());
+        }
+        return null;
+    }
+
+    protected TypesDef getTypesDef(ImmutableList<EnumTypeDefinition> enums,
+                                   ImmutableList<StructTypeDefinition> structs,
+                                   ImmutableList<HierarchicalTypeDefinition<TraitType>> traits,
+                                   ImmutableList<HierarchicalTypeDefinition<ClassType>> classes){
+        enums = (enums != null) ? enums : ImmutableList
+                .<EnumTypeDefinition>of();
+        structs =
+                (structs != null) ? structs : ImmutableList.<StructTypeDefinition>of();
+
+        traits = (traits != null) ? traits : ImmutableList
+                .<HierarchicalTypeDefinition<TraitType>>of();
+
+        classes = (classes != null) ? classes : ImmutableList
+                .<HierarchicalTypeDefinition<ClassType>>of();
+        return TypesUtil.getTypesDef(enums, structs, traits, classes);
+
+    }
+
+    protected AtlasEntityHeader modifyEntity(AtlasEntity atlasEntity, boolean update) {
+        EntityMutationResponse entity = null;
+        try {
+            if (!update) {
+                entity = atlasClientV2.createEntity(new AtlasEntityWithExtInfo(atlasEntity));
+                assertNotNull(entity);
+                assertNotNull(entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE));
+                assertTrue(entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).size() > 0);
+                return entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).get(0);
+            } else {
+                entity = atlasClientV2.updateEntity(new AtlasEntityWithExtInfo(atlasEntity));
+                assertNotNull(entity);
+                assertNotNull(entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE));
+                assertTrue(entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).size() > 0);
+                return entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).get(0);
+            }
+
+        } catch (AtlasServiceException e) {
+            LOG.error("Entity {} failed", update ? "update" : "creation", entity);
+        }
+        return null;
+    }
+
+    protected AtlasEntityHeader createEntity(AtlasEntity atlasEntity) {
+        return modifyEntity(atlasEntity, false);
+    }
+
+    protected AtlasEntityHeader updateEntity(AtlasEntity atlasEntity) {
+        return modifyEntity(atlasEntity, true);
+    }
+
+    protected static final String DATABASE_TYPE_V2 = "hive_db_v2";
+    protected static final String HIVE_TABLE_TYPE_V2 = "hive_table_v2";
+    protected static final String COLUMN_TYPE_V2 = "hive_column_v2";
+    protected static final String HIVE_PROCESS_TYPE_V2 = "hive_process_v2";
+
+    protected static final String DATABASE_TYPE = "hive_db_v1";
+    protected static final String HIVE_TABLE_TYPE = "hive_table_v1";
+    protected static final String COLUMN_TYPE = "hive_column_v1";
+    protected static final String HIVE_PROCESS_TYPE = "hive_process_v1";
+
+    protected static final String DATABASE_TYPE_BUILTIN = "hive_db";
+    protected static final String HIVE_TABLE_TYPE_BUILTIN = "hive_table";
+    protected static final String COLUMN_TYPE_BUILTIN = "hive_column";
+    protected static final String HIVE_PROCESS_TYPE_BUILTIN = "hive_process";
+
+    protected void createTypeDefinitionsV1() throws Exception {
+        HierarchicalTypeDefinition<ClassType> dbClsDef = TypesUtil
+                .createClassTypeDef(DATABASE_TYPE, null,
+                        TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
+                        TypesUtil.createRequiredAttrDef(DESCRIPTION, DataTypes.STRING_TYPE),
+                        attrDef("locationUri", DataTypes.STRING_TYPE),
+                        attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.INT_TYPE),
+                        new AttributeDefinition("tables", DataTypes.arrayTypeName(HIVE_TABLE_TYPE),
+                                Multiplicity.OPTIONAL, false, "db")
+                );
+
+        HierarchicalTypeDefinition<ClassType> columnClsDef = TypesUtil
+                .createClassTypeDef(COLUMN_TYPE, null, attrDef(NAME, DataTypes.STRING_TYPE),
+                        attrDef("dataType", DataTypes.STRING_TYPE), attrDef("comment", DataTypes.STRING_TYPE));
+
+        StructTypeDefinition structTypeDefinition = new StructTypeDefinition("serdeType",
+                new AttributeDefinition[]{TypesUtil.createRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
+                        TypesUtil.createRequiredAttrDef("serde", DataTypes.STRING_TYPE)});
+
+        EnumValue values[] = {new EnumValue("MANAGED", 1), new EnumValue("EXTERNAL", 2),};
+
+        EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("tableType", values);
+
+        HierarchicalTypeDefinition<ClassType> tblClsDef = TypesUtil
+                .createClassTypeDef(HIVE_TABLE_TYPE, ImmutableSet.of("DataSet"),
+                        attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.LONG_TYPE),
+                        attrDef("lastAccessTime", DataTypes.DATE_TYPE),
+                        attrDef("temporary", DataTypes.BOOLEAN_TYPE),
+                        new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.OPTIONAL, true, "tables"),
+                        new AttributeDefinition("columns", DataTypes.arrayTypeName(COLUMN_TYPE),
+                                Multiplicity.OPTIONAL, true, null),
+                        new AttributeDefinition("tableType", "tableType", Multiplicity.OPTIONAL, false, null),
+                        new AttributeDefinition("serde1", "serdeType", Multiplicity.OPTIONAL, false, null),
+                        new AttributeDefinition("serde2", "serdeType", Multiplicity.OPTIONAL, false, null));
+
+        HierarchicalTypeDefinition<ClassType> loadProcessClsDef = TypesUtil
+                .createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableSet.of("Process"),
+                        attrDef("userName", DataTypes.STRING_TYPE), attrDef("startTime", DataTypes.INT_TYPE),
+                        attrDef("endTime", DataTypes.LONG_TYPE),
+                        attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
+                        attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
+                        attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
+                        attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED));
+
+        HierarchicalTypeDefinition<TraitType> classificationTrait = TypesUtil
+                .createTraitTypeDef("classification", ImmutableSet.<String>of(),
+                        TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
+        HierarchicalTypeDefinition<TraitType> piiTrait =
+                TypesUtil.createTraitTypeDef("pii", ImmutableSet.<String>of());
+        HierarchicalTypeDefinition<TraitType> phiTrait =
+                TypesUtil.createTraitTypeDef("phi", ImmutableSet.<String>of());
+        HierarchicalTypeDefinition<TraitType> pciTrait =
+                TypesUtil.createTraitTypeDef("pci", ImmutableSet.<String>of());
+        HierarchicalTypeDefinition<TraitType> soxTrait =
+                TypesUtil.createTraitTypeDef("sox", ImmutableSet.<String>of());
+        HierarchicalTypeDefinition<TraitType> secTrait =
+                TypesUtil.createTraitTypeDef("sec", ImmutableSet.<String>of());
+        HierarchicalTypeDefinition<TraitType> financeTrait =
+                TypesUtil.createTraitTypeDef("finance", ImmutableSet.<String>of());
+        HierarchicalTypeDefinition<TraitType> factTrait =
+                TypesUtil.createTraitTypeDef("Fact", ImmutableSet.<String>of());
+        HierarchicalTypeDefinition<TraitType> etlTrait =
+                TypesUtil.createTraitTypeDef("ETL", ImmutableSet.<String>of());
+        HierarchicalTypeDefinition<TraitType> dimensionTrait =
+                TypesUtil.createTraitTypeDef("Dimension", ImmutableSet.<String>of());
+        HierarchicalTypeDefinition<TraitType> metricTrait =
+                TypesUtil.createTraitTypeDef("Metric", ImmutableSet.<String>of());
+
+        createType(getTypesDef(ImmutableList.of(enumTypeDefinition), ImmutableList.of(structTypeDefinition),
+                ImmutableList.of(classificationTrait, piiTrait, phiTrait, pciTrait,
+                        soxTrait, secTrait, financeTrait, factTrait, etlTrait, dimensionTrait, metricTrait),
+                ImmutableList.of(dbClsDef, columnClsDef, tblClsDef, loadProcessClsDef)));
+    }
+
+    protected void createTypeDefinitionsV2() throws Exception {
+
+        AtlasConstraintDef isCompositeSourceConstraint = new AtlasConstraintDef(CONSTRAINT_TYPE_OWNED_REF);
+
+        AtlasConstraintDef isCompositeTargetConstraint = new AtlasConstraintDef(CONSTRAINT_TYPE_INVERSE_REF,
+                Collections.<String, Object>singletonMap(CONSTRAINT_PARAM_ATTRIBUTE, "randomTable"));
+
+        AtlasEntityDef dbClsTypeDef = AtlasTypeUtil.createClassTypeDef(
+                DATABASE_TYPE_V2,
+                null,
+                AtlasTypeUtil.createUniqueRequiredAttrDef(NAME, "string"),
+                AtlasTypeUtil.createRequiredAttrDef(DESCRIPTION, "string"),
+                AtlasTypeUtil.createOptionalAttrDef("locationUri", "string"),
+                AtlasTypeUtil.createOptionalAttrDef("owner", "string"),
+                AtlasTypeUtil.createOptionalAttrDef("createTime", "int"),
+                AtlasTypeUtil.createOptionalAttrDef("createTime", "int"),
+                //there is a serializ
+                new AtlasAttributeDef("randomTable",
+                        DataTypes.arrayTypeName(HIVE_TABLE_TYPE_V2),
+                        true,
+                        Cardinality.SET,
+                        0, -1, false, true, Collections.singletonList(isCompositeSourceConstraint))
+        );
+
+        AtlasEntityDef columnClsDef = AtlasTypeUtil
+                .createClassTypeDef(COLUMN_TYPE_V2, null,
+                        AtlasTypeUtil.createOptionalAttrDef(NAME, "string"),
+                        AtlasTypeUtil.createOptionalAttrDef("dataType", "string"),
+                        AtlasTypeUtil.createOptionalAttrDef("comment", "string"));
+
+        AtlasStructDef structTypeDef = AtlasTypeUtil.createStructTypeDef("serdeType",
+                AtlasTypeUtil.createRequiredAttrDef(NAME, "string"),
+                AtlasTypeUtil.createRequiredAttrDef("serde", "string")
+        );
+
+        AtlasEnumDef enumDef = new AtlasEnumDef("tableType", DESCRIPTION, Arrays.asList(
+                new AtlasEnumDef.AtlasEnumElementDef("MANAGED", null, 1),
+                new AtlasEnumDef.AtlasEnumElementDef("EXTERNAL", null, 2)
+        ));
+
+        AtlasEntityDef tblClsDef = AtlasTypeUtil
+                .createClassTypeDef(HIVE_TABLE_TYPE_V2,
+                        ImmutableSet.of("DataSet"),
+                        AtlasTypeUtil.createOptionalAttrDef("owner", "string"),
+                        AtlasTypeUtil.createOptionalAttrDef("createTime", "long"),
+                        AtlasTypeUtil.createOptionalAttrDef("lastAccessTime", "date"),
+                        AtlasTypeUtil.createOptionalAttrDef("temporary", "boolean"),
+                        new AtlasAttributeDef("db",
+                                DATABASE_TYPE_V2,
+                                true,
+                                Cardinality.SINGLE,
+                                0, 1, false, true, Collections.singletonList(isCompositeTargetConstraint)),
+
+                        //some tests don't set the columns field or set it to null...
+                        AtlasTypeUtil.createOptionalAttrDef("columns", DataTypes.arrayTypeName(COLUMN_TYPE_V2)),
+                        AtlasTypeUtil.createOptionalAttrDef("tableType", "tableType"),
+                        AtlasTypeUtil.createOptionalAttrDef("serde1", "serdeType"),
+                        AtlasTypeUtil.createOptionalAttrDef("serde2", "serdeType"));
+
+        AtlasEntityDef loadProcessClsDef = AtlasTypeUtil
+                .createClassTypeDef(HIVE_PROCESS_TYPE_V2,
+                        ImmutableSet.of("Process"),
+                        AtlasTypeUtil.createOptionalAttrDef("userName", "string"),
+                        AtlasTypeUtil.createOptionalAttrDef("startTime", "int"),
+                        AtlasTypeUtil.createOptionalAttrDef("endTime", "long"),
+                        AtlasTypeUtil.createRequiredAttrDef("queryText", "string"),
+                        AtlasTypeUtil.createRequiredAttrDef("queryPlan", "string"),
+                        AtlasTypeUtil.createRequiredAttrDef("queryId", "string"),
+                        AtlasTypeUtil.createRequiredAttrDef("queryGraph", "string"));
+
+        AtlasClassificationDef classificationTrait = AtlasTypeUtil
+                .createTraitTypeDef("classification",ImmutableSet.<String>of(),
+                        AtlasTypeUtil.createRequiredAttrDef("tag", "string"));
+        AtlasClassificationDef piiTrait =
+                AtlasTypeUtil.createTraitTypeDef("pii", ImmutableSet.<String>of());
+        AtlasClassificationDef phiTrait =
+                AtlasTypeUtil.createTraitTypeDef("phi", ImmutableSet.<String>of());
+        AtlasClassificationDef pciTrait =
+                AtlasTypeUtil.createTraitTypeDef("pci", ImmutableSet.<String>of());
+        AtlasClassificationDef soxTrait =
+                AtlasTypeUtil.createTraitTypeDef("sox", ImmutableSet.<String>of());
+        AtlasClassificationDef secTrait =
+                AtlasTypeUtil.createTraitTypeDef("sec", ImmutableSet.<String>of());
+        AtlasClassificationDef financeTrait =
+                AtlasTypeUtil.createTraitTypeDef("finance", ImmutableSet.<String>of());
+
+        AtlasTypesDef typesDef = new AtlasTypesDef(ImmutableList.of(enumDef),
+                ImmutableList.of(structTypeDef),
+                ImmutableList.of(classificationTrait, piiTrait, phiTrait, pciTrait, soxTrait, secTrait, financeTrait),
+                ImmutableList.of(dbClsTypeDef, columnClsDef, tblClsDef, loadProcessClsDef));
+
+        batchCreateTypes(typesDef);
+    }
+
+    AttributeDefinition attrDef(String name, IDataType dT) {
+        return attrDef(name, dT, Multiplicity.OPTIONAL, false, null);
+    }
+
+    AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m) {
+        return attrDef(name, dT, m, false, null);
+    }
+
+    AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m, boolean isComposite,
+                                String reverseAttributeName) {
+        Preconditions.checkNotNull(name);
+        Preconditions.checkNotNull(dT);
+        return new AttributeDefinition(name, dT.getName(), m, isComposite, reverseAttributeName);
+    }
+
+    protected String randomString() {
+        //names cannot start with a digit
+        return RandomStringUtils.randomAlphabetic(1) + RandomStringUtils.randomAlphanumeric(9);
+    }
+
+    protected Referenceable createHiveTableInstanceBuiltIn(String dbName, String tableName, Id dbId) throws Exception {
+        Map<String, Object> values = new HashMap<>();
+        values.put(NAME, dbName);
+        values.put(DESCRIPTION, "foo database");
+        values.put(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName);
+        values.put("owner", "user1");
+        values.put(CLUSTER_NAME, "cl1");
+        values.put("parameters", Collections.EMPTY_MAP);
+        values.put("location", "/tmp");
+        Referenceable databaseInstance = new Referenceable(dbId._getId(), dbId.getTypeName(), values);
+        Referenceable tableInstance =
+                new Referenceable(HIVE_TABLE_TYPE_BUILTIN, "classification", "pii", "phi", "pci", "sox", "sec", "finance");
+        tableInstance.set(NAME, tableName);
+        tableInstance.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName);
+        tableInstance.set("db", databaseInstance);
+        tableInstance.set(DESCRIPTION, "bar table");
+        tableInstance.set("lastAccessTime", "2014-07-11T08:00:00.000Z");
+        tableInstance.set("type", "managed");
+        tableInstance.set("level", 2);
+        tableInstance.set("tableType", 1); // enum
+        tableInstance.set("compressed", false);
+
+        Struct traitInstance = (Struct) tableInstance.getTrait("classification");
+        traitInstance.set("tag", "foundation_etl");
+
+        Struct serde1Instance = new Struct("serdeType");
+        serde1Instance.set(NAME, "serde1");
+        serde1Instance.set("serde", "serde1");
+        tableInstance.set("serde1", serde1Instance);
+
+        Struct serde2Instance = new Struct("serdeType");
+        serde2Instance.set(NAME, "serde2");
+        serde2Instance.set("serde", "serde2");
+        tableInstance.set("serde2", serde2Instance);
+
+        List<String> traits = tableInstance.getTraits();
+        Assert.assertEquals(traits.size(), 7);
+
+        return tableInstance;
+    }
+
+    protected AtlasEntity createHiveTableInstanceV2(AtlasEntity databaseInstance, String tableName) throws Exception {
+        AtlasEntity tableInstance = new AtlasEntity(HIVE_TABLE_TYPE_V2);
+        tableInstance.setClassifications(
+                Arrays.asList(new AtlasClassification("classification"),
+                        new AtlasClassification("pii"),
+                        new AtlasClassification("phi"),
+                        new AtlasClassification("pci"),
+                        new AtlasClassification("sox"),
+                        new AtlasClassification("sec"),
+                        new AtlasClassification("finance"))
+        );
+
+        tableInstance.setAttribute(NAME, tableName);
+        tableInstance.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName);
+        tableInstance.setAttribute("db", AtlasTypeUtil.getAtlasObjectId(databaseInstance));
+        tableInstance.setAttribute(DESCRIPTION, "bar table");
+        tableInstance.setAttribute("lastAccessTime", "2014-07-11T08:00:00.000Z");
+        tableInstance.setAttribute("type", "managed");
+        tableInstance.setAttribute("level", 2);
+        tableInstance.setAttribute("tableType", "MANAGED"); // enum
+        tableInstance.setAttribute("compressed", false);
+
+        AtlasClassification classification = tableInstance.getClassifications().get(0);
+        classification.setAttribute("tag", "foundation_etl");
+
+        AtlasStruct serde1Instance = new AtlasStruct("serdeType");
+        serde1Instance.setAttribute(NAME, "serde1");
+        serde1Instance.setAttribute("serde", "serde1");
+        tableInstance.setAttribute("serde1", serde1Instance);
+
+        AtlasStruct serde2Instance = new AtlasStruct("serdeType");
+        serde2Instance.setAttribute(NAME, "serde2");
+        serde2Instance.setAttribute("serde", "serde2");
+        tableInstance.setAttribute("serde2", serde2Instance);
+
+        List<AtlasClassification> traits = tableInstance.getClassifications();
+        Assert.assertEquals(traits.size(), 7);
+
+        return tableInstance;
+    }
+    protected Referenceable createHiveDBInstanceBuiltIn(String dbName) {
+        Referenceable databaseInstance = new Referenceable(DATABASE_TYPE_BUILTIN);
+        databaseInstance.set(NAME, dbName);
+        databaseInstance.set(QUALIFIED_NAME, dbName);
+        databaseInstance.set(CLUSTER_NAME, randomString());
+        databaseInstance.set(DESCRIPTION, "foo database");
+        return databaseInstance;
+    }
+
+
+    protected Referenceable createHiveDBInstanceV1(String dbName) {
+        Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
+        databaseInstance.set(NAME, dbName);
+        databaseInstance.set(DESCRIPTION, "foo database");
+        databaseInstance.set(CLUSTER_NAME, "fooCluster");
+        return databaseInstance;
+    }
+
+    protected AtlasEntity createHiveDBInstanceV2(String dbName) {
+        AtlasEntity atlasEntity = new AtlasEntity(DATABASE_TYPE_V2);
+        atlasEntity.setAttribute(NAME, dbName);
+        atlasEntity.setAttribute(DESCRIPTION, "foo database");
+        atlasEntity.setAttribute(CLUSTER_NAME, "fooCluster");
+        atlasEntity.setAttribute("owner", "user1");
+        atlasEntity.setAttribute("locationUri", "/tmp");
+        atlasEntity.setAttribute("createTime",1000);
+        return atlasEntity;
+    }
+
+
+    public interface Predicate {
+
+        /**
+         * Perform a predicate evaluation.
+         *
+         * @return the boolean result of the evaluation.
+         * @throws Exception thrown if the predicate evaluation could not evaluate.
+         */
+        boolean evaluate() throws Exception;
+    }
+
+    public interface NotificationPredicate {
+
+        /**
+         * Perform a predicate evaluation.
+         *
+         * @return the boolean result of the evaluation.
+         * @throws Exception thrown if the predicate evaluation could not evaluate.
+         */
+        boolean evaluate(EntityNotification notification) throws Exception;
+    }
+
+    /**
+     * Wait for a condition, expressed via a {@link Predicate} to become true.
+     *
+     * @param timeout maximum time in milliseconds to wait for the predicate to become true.
+     * @param predicate predicate waiting on.
+     */
+    protected void waitFor(int timeout, Predicate predicate) throws Exception {
+        ParamChecker.notNull(predicate, "predicate");
+        long mustEnd = System.currentTimeMillis() + timeout;
+
+        boolean eval;
+        while (!(eval = predicate.evaluate()) && System.currentTimeMillis() < mustEnd) {
+            LOG.info("Waiting up to {} msec", mustEnd - System.currentTimeMillis());
+            Thread.sleep(100);
+        }
+        if (!eval) {
+            throw new Exception("Waiting timed out after " + timeout + " msec");
+        }
+    }
+
+    protected EntityNotification waitForNotification(final NotificationConsumer<EntityNotification> consumer, int maxWait,
+                                                     final NotificationPredicate predicate) throws Exception {
+        final TypeUtils.Pair<EntityNotification, String> pair = TypeUtils.Pair.of(null, null);
+        final long maxCurrentTime = System.currentTimeMillis() + maxWait;
+        waitFor(maxWait, new Predicate() {
+            @Override
+            public boolean evaluate() throws Exception {
+                try {
+                    while (consumer.hasNext() && System.currentTimeMillis() < maxCurrentTime) {
+                        EntityNotification notification = consumer.next();
+                        if (predicate.evaluate(notification)) {
+                            pair.left = notification;
+                            return true;
+                        }
+                    }
+                } catch(ConsumerTimeoutException e) {
+                    //ignore
+                }
+                return false;
+            }
+        });
+        return pair.left;
+    }
+
+    protected NotificationPredicate newNotificationPredicate(final EntityNotification.OperationType operationType,
+                                                             final String typeName, final String guid) {
+        return new NotificationPredicate() {
+            @Override
+            public boolean evaluate(EntityNotification notification) throws Exception {
+                return notification != null &&
+                        notification.getOperationType() == operationType &&
+                        notification.getEntity().getTypeName().equals(typeName) &&
+                        notification.getEntity().getId()._getId().equals(guid);
+            }
+        };
+    }
+
+    protected JSONArray searchByDSL(String dslQuery) throws AtlasServiceException {
+        return atlasClientV1.searchByDSL(dslQuery, 10, 0);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/web/integration/DataSetLineageJerseyResourceIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/DataSetLineageJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/DataSetLineageJerseyResourceIT.java
new file mode 100644
index 0000000..8c6ba77
--- /dev/null
+++ b/webapp/src/test/java/org/apache/atlas/web/integration/DataSetLineageJerseyResourceIT.java
@@ -0,0 +1,298 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.web.integration;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+import org.apache.atlas.AtlasClient;
+import org.apache.atlas.AtlasServiceException;
+import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.typesystem.Struct;
+import org.apache.atlas.typesystem.json.InstanceSerialization;
+import org.apache.atlas.typesystem.persistence.Id;
+import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
+import org.apache.atlas.typesystem.types.TraitType;
+import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONObject;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.util.List;
+import java.util.Map;
+
+import static org.testng.Assert.assertEquals;
+
+/**
+ * Hive Lineage Integration Tests.
+ */
+public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
+
+    private String salesFactTable;
+    private String salesMonthlyTable;
+    private String salesDBName;
+
+    @BeforeClass
+    public void setUp() throws Exception {
+        super.setUp();
+
+        createTypeDefinitionsV1();
+        setupInstances();
+    }
+
+    @Test
+    public void testInputsGraph() throws Exception {
+        JSONObject response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API.NAME_LINEAGE_INPUTS_GRAPH, null, salesMonthlyTable, "inputs", "graph");
+        Assert.assertNotNull(response);
+        System.out.println("inputs graph = " + response);
+
+        Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID));
+
+        JSONObject results = response.getJSONObject(AtlasClient.RESULTS);
+        Assert.assertNotNull(results);
+
+        Struct resultsInstance = InstanceSerialization.fromJsonStruct(results.toString(), true);
+        Map<String, Struct> vertices = (Map<String, Struct>) resultsInstance.get("vertices");
+        Assert.assertEquals(vertices.size(), 4);
+
+        Map<String, Struct> edges = (Map<String, Struct>) resultsInstance.get("edges");
+        Assert.assertEquals(edges.size(), 4);
+    }
+
+    @Test
+    public void testInputsGraphForEntity() throws Exception {
+        String tableId = atlasClientV1.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                salesMonthlyTable).getId()._getId();
+        JSONObject results = atlasClientV1.getInputGraphForEntity(tableId);
+        Assert.assertNotNull(results);
+
+        Struct resultsInstance = InstanceSerialization.fromJsonStruct(results.toString(), true);
+        Map<String, Struct> vertices = (Map<String, Struct>) resultsInstance.get("vertices");
+        Assert.assertEquals(vertices.size(), 4);
+        Struct vertex = vertices.get(tableId);
+        assertEquals(((Struct) vertex.get("vertexId")).get("state"), Id.EntityState.ACTIVE.name());
+
+        Map<String, Struct> edges = (Map<String, Struct>) resultsInstance.get("edges");
+        Assert.assertEquals(edges.size(), 4);
+    }
+
+    @Test
+    public void testOutputsGraph() throws Exception {
+        JSONObject response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API.NAME_LINEAGE_OUTPUTS_GRAPH, null, salesFactTable, "outputs", "graph");
+        Assert.assertNotNull(response);
+        System.out.println("outputs graph= " + response);
+
+        Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID));
+
+        JSONObject results = response.getJSONObject(AtlasClient.RESULTS);
+        Assert.assertNotNull(results);
+
+        Struct resultsInstance = InstanceSerialization.fromJsonStruct(results.toString(), true);
+        Map<String, Struct> vertices = (Map<String, Struct>) resultsInstance.get("vertices");
+        Assert.assertEquals(vertices.size(), 3);
+
+        Map<String, Struct> edges = (Map<String, Struct>) resultsInstance.get("edges");
+        Assert.assertEquals(edges.size(), 4);
+    }
+
+    @Test
+    public void testOutputsGraphForEntity() throws Exception {
+        String tableId = atlasClientV1.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                salesFactTable).getId()._getId();
+        JSONObject results = atlasClientV1.getOutputGraphForEntity(tableId);
+        Assert.assertNotNull(results);
+
+        Struct resultsInstance = InstanceSerialization.fromJsonStruct(results.toString(), true);
+        Map<String, Struct> vertices = (Map<String, Struct>) resultsInstance.get("vertices");
+        Assert.assertEquals(vertices.size(), 3);
+        Struct vertex = vertices.get(tableId);
+        assertEquals(((Struct) vertex.get("vertexId")).get("state"), Id.EntityState.ACTIVE.name());
+
+        Map<String, Struct> edges = (Map<String, Struct>) resultsInstance.get("edges");
+        Assert.assertEquals(edges.size(), 4);
+    }
+
+    @Test
+    public void testSchema() throws Exception {
+        JSONObject response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API.NAME_LINEAGE_SCHEMA, null, salesFactTable, "schema");
+
+        Assert.assertNotNull(response);
+        System.out.println("schema = " + response);
+
+        Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID));
+
+        JSONObject results = response.getJSONObject(AtlasClient.RESULTS);
+        Assert.assertNotNull(results);
+
+        JSONArray rows = results.getJSONArray("rows");
+        Assert.assertEquals(rows.length(), 4);
+
+        for (int index = 0; index < rows.length(); index++) {
+            final JSONObject row = rows.getJSONObject(index);
+            LOG.info("JsonRow - {}", row);
+            Assert.assertNotNull(row.getString("name"));
+            Assert.assertNotNull(row.getString("comment"));
+            Assert.assertEquals(row.getString("$typeName$"), "hive_column_v1");
+        }
+    }
+
+    @Test
+    public void testSchemaForEntity() throws Exception {
+        String tableId = atlasClientV1.getEntity(HIVE_TABLE_TYPE, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, salesFactTable).getId()._getId();
+        JSONObject results = atlasClientV1.getSchemaForEntity(tableId);
+        Assert.assertNotNull(results);
+
+        JSONArray rows = results.getJSONArray("rows");
+        Assert.assertEquals(rows.length(), 4);
+
+        for (int index = 0; index < rows.length(); index++) {
+            final JSONObject row = rows.getJSONObject(index);
+            LOG.info("JsonRow - {}", row);
+            Assert.assertNotNull(row.getString("name"));
+            Assert.assertNotNull(row.getString("comment"));
+            Assert.assertEquals(row.getString("$typeName$"), "hive_column_v1");
+        }
+    }
+
+    @Test(expectedExceptions = AtlasServiceException.class)
+    public void testSchemaForInvalidTable() throws Exception {
+        JSONObject response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API.NAME_LINEAGE_SCHEMA, null, "blah", "schema");
+    }
+
+    @Test(expectedExceptions = AtlasServiceException.class)
+    public void testSchemaForDB() throws Exception {
+        JSONObject response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API.NAME_LINEAGE_SCHEMA, null, salesDBName, "schema");
+    }
+
+    private void setupInstances() throws Exception {
+        HierarchicalTypeDefinition<TraitType> factTrait =
+                TypesUtil.createTraitTypeDef("Fact", ImmutableSet.<String>of());
+        HierarchicalTypeDefinition<TraitType> etlTrait =
+                TypesUtil.createTraitTypeDef("ETL", ImmutableSet.<String>of());
+        HierarchicalTypeDefinition<TraitType> dimensionTrait =
+                TypesUtil.createTraitTypeDef("Dimension", ImmutableSet.<String>of());
+        HierarchicalTypeDefinition<TraitType> metricTrait =
+                TypesUtil.createTraitTypeDef("Metric", ImmutableSet.<String>of());
+        createType(getTypesDef(null, null,
+                        ImmutableList.of(factTrait, etlTrait, dimensionTrait, metricTrait), null));
+
+        salesDBName = "Sales" + randomString();
+        Id salesDB = database(salesDBName, "Sales Database", "John ETL",
+                "hdfs://host:8000/apps/warehouse/sales");
+
+        List<Referenceable> salesFactColumns = ImmutableList
+                .of(column("time_id", "int", "time id"), column("product_id", "int", "product id"),
+                        column("customer_id", "int", "customer id", "pii"),
+                        column("sales", "double", "product id", "Metric"));
+
+        salesFactTable = "sales_fact" + randomString();
+        Id salesFact = table(salesFactTable, "sales fact table", salesDB, "Joe", "MANAGED", salesFactColumns, "Fact");
+
+        List<Referenceable> timeDimColumns = ImmutableList
+                .of(column("time_id", "int", "time id"), column("dayOfYear", "int", "day Of Year"),
+                        column("weekDay", "int", "week Day"));
+
+        Id timeDim =
+                table("time_dim" + randomString(), "time dimension table", salesDB, "John Doe", "EXTERNAL",
+                        timeDimColumns, "Dimension");
+
+        Id reportingDB =
+                database("Reporting" + randomString(), "reporting database", "Jane BI",
+                        "hdfs://host:8000/apps/warehouse/reporting");
+
+        Id salesFactDaily =
+                table("sales_fact_daily_mv" + randomString(), "sales fact daily materialized view", reportingDB,
+                        "Joe BI", "MANAGED", salesFactColumns, "Metric");
+
+        loadProcess("loadSalesDaily" + randomString(), "John ETL", ImmutableList.of(salesFact, timeDim),
+                ImmutableList.of(salesFactDaily), "create table as select ", "plan", "id", "graph", "ETL");
+
+        salesMonthlyTable = "sales_fact_monthly_mv" + randomString();
+        Id salesFactMonthly =
+                table(salesMonthlyTable, "sales fact monthly materialized view", reportingDB, "Jane BI",
+                        "MANAGED", salesFactColumns, "Metric");
+
+        loadProcess("loadSalesMonthly" + randomString(), "John ETL", ImmutableList.of(salesFactDaily),
+                ImmutableList.of(salesFactMonthly), "create table as select ", "plan", "id", "graph", "ETL");
+    }
+
+    Id database(String name, String description, String owner, String locationUri, String... traitNames)
+    throws Exception {
+        Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames);
+        referenceable.set(NAME, name);
+        referenceable.set(QUALIFIED_NAME, name);
+        referenceable.set(CLUSTER_NAME, locationUri + name);
+        referenceable.set("description", description);
+        referenceable.set("owner", owner);
+        referenceable.set("locationUri", locationUri);
+        referenceable.set("createTime", System.currentTimeMillis());
+
+        return createInstance(referenceable);
+    }
+
+    Referenceable column(String name, String type, String comment, String... traitNames) throws Exception {
+        Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames);
+        referenceable.set(NAME, name);
+        referenceable.set(QUALIFIED_NAME, name);
+        referenceable.set("type", type);
+        referenceable.set("comment", comment);
+
+        return referenceable;
+    }
+
+    Id table(String name, String description, Id dbId, String owner, String tableType, List<Referenceable> columns,
+            String... traitNames) throws Exception {
+        Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, traitNames);
+        referenceable.set("name", name);
+        referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
+        referenceable.set("description", description);
+        referenceable.set("owner", owner);
+        referenceable.set("tableType", tableType);
+        referenceable.set("createTime", System.currentTimeMillis());
+        referenceable.set("lastAccessTime", System.currentTimeMillis());
+        referenceable.set("retention", System.currentTimeMillis());
+
+        referenceable.set("db", dbId);
+        referenceable.set("columns", columns);
+
+        return createInstance(referenceable);
+    }
+
+    Id loadProcess(String name, String user, List<Id> inputTables, List<Id> outputTables, String queryText,
+            String queryPlan, String queryId, String queryGraph, String... traitNames) throws Exception {
+        Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
+        referenceable.set("name", name);
+        referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
+        referenceable.set("userName", user);
+        referenceable.set("startTime", System.currentTimeMillis());
+        referenceable.set("endTime", System.currentTimeMillis() + 10000);
+
+        referenceable.set("inputs", inputTables);
+        referenceable.set("outputs", outputTables);
+
+        referenceable.set("operationType", "testOperation");
+        referenceable.set("queryText", queryText);
+        referenceable.set("queryPlan", queryPlan);
+        referenceable.set("queryId", queryId);
+        referenceable.set("queryGraph", queryGraph);
+
+        return createInstance(referenceable);
+    }
+}