You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@atlas.apache.org by su...@apache.org on 2016/03/14 18:53:11 UTC

incubator-atlas git commit: ATLAS-523 Support alter view ( sumasai via shwethags)

Repository: incubator-atlas
Updated Branches:
  refs/heads/master 44dbfe576 -> ad7604fcf


ATLAS-523 Support alter view ( sumasai via shwethags)


Project: http://git-wip-us.apache.org/repos/asf/incubator-atlas/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-atlas/commit/ad7604fc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-atlas/tree/ad7604fc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-atlas/diff/ad7604fc

Branch: refs/heads/master
Commit: ad7604fcf4bf4f84a66b339b8ea1f4473cd9f84a
Parents: 44dbfe5
Author: Suma Shivaprasad <su...@gmail.com>
Authored: Mon Mar 14 10:52:28 2016 -0700
Committer: Suma Shivaprasad <su...@gmail.com>
Committed: Mon Mar 14 10:53:00 2016 -0700

----------------------------------------------------------------------
 .../org/apache/atlas/hive/hook/HiveHook.java    |   7 +-
 .../org/apache/atlas/hive/hook/HiveHookIT.java  | 123 +++++++++++++++----
 release-log.txt                                 |   1 +
 3 files changed, 99 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/ad7604fc/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
index b5a4ce9..fc2a57a 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
@@ -86,7 +86,6 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
     private static final long keepAliveTimeDefault = 10;
     private static final int queueSizeDefault = 10000;
 
-    private static boolean typesRegistered = false;
     private static Configuration atlasProperties;
 
     class HiveEvent {
@@ -212,6 +211,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
 
         case CREATETABLE_AS_SELECT:
         case CREATEVIEW:
+        case ALTERVIEW_AS:
         case LOAD:
         case EXPORT:
         case IMPORT:
@@ -229,15 +229,12 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
         case ALTERTABLE_CLUSTER_SORT:
         case ALTERTABLE_BUCKETNUM:
         case ALTERTABLE_PROPERTIES:
+        case ALTERVIEW_PROPERTIES:
         case ALTERTABLE_SERDEPROPERTIES:
         case ALTERTABLE_SERIALIZER:
             alterTable(dgiBridge, event);
             break;
 
-        case ALTERVIEW_AS:
-            //update inputs/outputs?
-            break;
-
         case ALTERTABLE_ADDCOLS:
         case ALTERTABLE_REPLACECOLS:
         case ALTERTABLE_RENAMECOL:

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/ad7604fc/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
index cb215db..f2084b0 100755
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
@@ -18,8 +18,8 @@
 
 package org.apache.atlas.hive.hook;
 
+import com.google.common.base.Joiner;
 import com.google.common.collect.ImmutableList;
-import groovy.transform.Immutable;
 import org.apache.atlas.ApplicationProperties;
 import org.apache.atlas.AtlasClient;
 import org.apache.atlas.AtlasServiceException;
@@ -38,13 +38,6 @@ import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONObject;
 import org.slf4j.Logger;
@@ -52,14 +45,7 @@ import org.testng.Assert;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
 
-import java.io.DataInput;
-import java.io.DataOutput;
 import java.io.File;
-import java.io.IOException;
-import java.net.URLClassLoader;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -221,6 +207,55 @@ public class HiveHookIT {
     }
 
     @Test
+    public void testAlterViewAsSelect() throws Exception {
+
+        //Create the view from table1
+        String table1Name = createTable();
+        String viewName = tableName();
+        String query = "create view " + viewName + " as select * from " + table1Name;
+        runCommand(query);
+
+        String table1Id = assertTableIsRegistered(DEFAULT_DB, table1Name);
+        assertProcessIsRegistered(query);
+        String viewId = assertTableIsRegistered(DEFAULT_DB, viewName);
+
+        //Check lineage which includes table1
+        String datasetName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName);
+        JSONObject response = dgiCLient.getInputGraph(datasetName);
+        JSONObject vertices = response.getJSONObject("values").getJSONObject("vertices");
+        Assert.assertTrue(vertices.has(viewId));
+        Assert.assertTrue(vertices.has(table1Id));
+
+        //Alter the view from table2
+        String table2Name = createTable();
+        query = "alter view " + viewName + " as select * from " + table2Name;
+        runCommand(query);
+
+        //Check if alter view process is reqistered
+        assertProcessIsRegistered(query);
+        String table2Id = assertTableIsRegistered(DEFAULT_DB, table2Name);
+        Assert.assertEquals(assertTableIsRegistered(DEFAULT_DB, viewName), viewId);
+
+        //Check lineage which includes both table1 and table2
+        datasetName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, viewName);
+        response = dgiCLient.getInputGraph(datasetName);
+        vertices = response.getJSONObject("values").getJSONObject("vertices");
+        Assert.assertTrue(vertices.has(viewId));
+
+        //This is through the alter view process
+        Assert.assertTrue(vertices.has(table2Id));
+
+        //This is through the Create view process
+        Assert.assertTrue(vertices.has(table1Id));
+
+        //Outputs dont exist
+        response = dgiCLient.getOutputGraph(datasetName);
+        vertices = response.getJSONObject("values").getJSONObject("vertices");
+        Assert.assertEquals(vertices.length(), 0);
+    }
+
+
+    @Test
     public void testLoadData() throws Exception {
         String tableName = createTable(false);
 
@@ -520,7 +555,7 @@ public class HiveHookIT {
     }
 
     private String getSerializedProps(Map<String, String> expectedProps) {
-        StringBuffer sb = new StringBuffer();
+        StringBuilder sb = new StringBuilder();
         for(String expectedPropKey : expectedProps.keySet()) {
             if(sb.length() > 0) {
                 sb.append(",");
@@ -535,34 +570,68 @@ public class HiveHookIT {
     @Test
     public void testAlterTableProperties() throws Exception {
         String tableName = createTable();
+        final String fmtQuery = "alter table %s %s TBLPROPERTIES (%s)";
+        testAlterProperties(tableName, fmtQuery);
+    }
+
+    private void testAlterProperties(String tableName, String fmtQuery) throws Exception {
+
+        final String SET_OP = "set";
+        final String UNSET_OP = "unset";
+
         final Map<String, String> expectedProps = new HashMap<String, String>() {{
             put("testPropKey1", "testPropValue1");
             put("comment", "test comment");
         }};
 
-        final String fmtQuery = "alter table %s set TBLPROPERTIES (%s)";
-        String query = String.format(fmtQuery, tableName, getSerializedProps(expectedProps));
+        String query = String.format(fmtQuery, tableName, SET_OP, getSerializedProps(expectedProps));
         runCommand(query);
 
-        verifyTableProperties(tableName, expectedProps);
-
+        verifyTableProperties(tableName, expectedProps, false);
 
         expectedProps.put("testPropKey2", "testPropValue2");
         //Add another property
-        query = String.format(fmtQuery, tableName, getSerializedProps(expectedProps));
+        query = String.format(fmtQuery, tableName, SET_OP, getSerializedProps(expectedProps));
+        runCommand(query);
+
+        verifyTableProperties(tableName, expectedProps, false);
+
+        //Unset all the props
+        StringBuilder sb = new StringBuilder("'");
+        query = String.format(fmtQuery, tableName, UNSET_OP, Joiner.on("','").skipNulls().appendTo(sb, expectedProps.keySet()).append('\''));
+        runCommand(query);
+
+        verifyTableProperties(tableName, expectedProps, true);
+    }
+
+    @Test
+    public void testAlterViewProperties() throws Exception {
+        String tableName = createTable();
+        String viewName = tableName();
+        String query = "create view " + viewName + " as select * from " + tableName;
         runCommand(query);
 
-        verifyTableProperties(tableName, expectedProps);
+        final String fmtQuery = "alter view %s %s TBLPROPERTIES (%s)";
+        testAlterProperties(viewName, fmtQuery);
     }
 
-    private void verifyTableProperties(String tableName, Map<String, String> expectedProps) throws Exception {
+    private void verifyTableProperties(String tableName, Map<String, String> expectedProps, boolean checkIfNotExists) throws Exception {
         String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
         Referenceable tableRef = dgiCLient.getEntity(tableId);
         Map<String, String> parameters = (Map<String, String>) tableRef.get(HiveDataModelGenerator.PARAMETERS);
-        Assert.assertNotNull(parameters);
-        //Comment should exist since SET TBLPOPERTIES only adds properties. Doe not remove existing ones
-        for (String propKey : expectedProps.keySet()) {
-            Assert.assertEquals(parameters.get(propKey), expectedProps.get(propKey));
+        if (checkIfNotExists == false) {
+            //Check if properties exist
+            Assert.assertNotNull(parameters);
+            for (String propKey : expectedProps.keySet()) {
+                Assert.assertEquals(parameters.get(propKey), expectedProps.get(propKey));
+            }
+        } else {
+            //Check if properties dont exist
+            if (expectedProps != null && parameters != null) {
+                for (String propKey : expectedProps.keySet()) {
+                    Assert.assertFalse(parameters.containsKey(propKey));
+                }
+            }
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/ad7604fc/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index a4cd781..9c73da2 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -13,6 +13,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file (dosset
 ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via shwethags)
 
 ALL CHANGES:
+ATLAS-523 Support alter view (sumasai via shwethags)
 ATLAS-555 Tag creation from UI fails due to missing description attribute (guptaneeru via shwethags)
 ATLAS-522 Support Alter table commands (sumasai via shwethags)
 ATLAS-512 Decouple currently integrating components from availability of Atlas service for raising metadata events ( yhemanth via sumasai)