You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@atlas.apache.org by ve...@apache.org on 2015/05/13 23:28:23 UTC

[42/50] [abbrv] incubator-atlas git commit: import hive fixes for hdp sandbox 2.2.4

import hive fixes for hdp sandbox 2.2.4


Project: http://git-wip-us.apache.org/repos/asf/incubator-atlas/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-atlas/commit/01ee72a3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-atlas/tree/01ee72a3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-atlas/diff/01ee72a3

Branch: refs/remotes/origin/master
Commit: 01ee72a3ccef99323c73d1796087f84080995a66
Parents: ab91112
Author: Shwetha GS <ss...@hortonworks.com>
Authored: Wed May 6 16:44:29 2015 +0530
Committer: Shwetha GS <ss...@hortonworks.com>
Committed: Wed May 6 16:44:39 2015 +0530

----------------------------------------------------------------------
 addons/hive-bridge/src/bin/import-hive.sh       |  11 +-
 .../hive/bridge/HiveMetaStoreBridge.java        | 221 +++++++++++++------
 .../hadoop/metadata/hive/hook/HiveHook.java     |  65 +-----
 .../hive/model/HiveDataModelGenerator.java      |   7 +-
 .../src/site/twiki/Bridge-Hive.twiki            |   5 +-
 .../hadoop/metadata/hive/hook/HiveHookIT.java   |   5 +-
 .../hadoop/metadata/MetadataServiceClient.java  |  39 +++-
 src/bin/metadata-config.sh                      |   2 +-
 src/conf/log4j.xml                              |   2 +-
 9 files changed, 208 insertions(+), 149 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/01ee72a3/addons/hive-bridge/src/bin/import-hive.sh
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/bin/import-hive.sh b/addons/hive-bridge/src/bin/import-hive.sh
index e95bf6a..7517e76 100755
--- a/addons/hive-bridge/src/bin/import-hive.sh
+++ b/addons/hive-bridge/src/bin/import-hive.sh
@@ -59,9 +59,11 @@ for i in "${BASEDIR}/bridge/hive/"*.jar; do
   METADATACPPATH="${METADATACPPATH}:$i"
 done
 
-echo $METADATACPPATH
+# log dir for applications
+METADATA_LOG_DIR="${METADATA_LOG_DIR:-$BASEDIR/logs}"
+export METADATA_LOG_DIR
 
-JAVA_PROPERTIES="$METADATA_OPTS"
+JAVA_PROPERTIES="$METADATA_OPTS -Dmetadata.log.dir=$METADATA_LOG_DIR -Dmetadata.log.file=import-hive.log"
 shift
 
 while [[ ${1} =~ ^\-D ]]; do
@@ -70,6 +72,7 @@ while [[ ${1} =~ ^\-D ]]; do
 done
 TIME=`date +%Y%m%d%H%M%s`
 
+#Add hive conf in classpath
 if [ ! -z "$HIVE_CONF_DIR" ]; then
     HIVE_CP=$HIVE_CONF_DIR
 elif [ ! -z "$HIVE_HOME" ]; then
@@ -86,5 +89,5 @@ echo Using Hive configuration directory [$HIVE_CP]
 ${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${HIVE_CP}:${METADATACPPATH} org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge
 
 RETVAL=$?
-[ $RETVAL -eq 0 ] && echo Hive Data Model Imported!!!
-[ $RETVAL -ne 0 ] && echo Failure in Hive Data Model import!!!
+[ $RETVAL -eq 0 ] && echo Hive Data Model imported successfully!!!
+[ $RETVAL -ne 0 ] && echo Failed to import Hive Data Model!!!

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/01ee72a3/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/bridge/HiveMetaStoreBridge.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/bridge/HiveMetaStoreBridge.java b/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/bridge/HiveMetaStoreBridge.java
index 0a36c36..a7fd77e 100755
--- a/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/bridge/HiveMetaStoreBridge.java
+++ b/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/bridge/HiveMetaStoreBridge.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.metadata.hive.bridge;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -31,9 +32,14 @@ import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.metadata.MetadataServiceClient;
 import org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator;
 import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
+import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
 import org.apache.hadoop.metadata.typesystem.Referenceable;
 import org.apache.hadoop.metadata.typesystem.Struct;
 import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
+import org.apache.hadoop.metadata.typesystem.json.Serialization;
+import org.apache.hadoop.metadata.typesystem.persistence.Id;
+import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
+import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -97,22 +103,48 @@ public class HiveMetaStoreBridge {
         }
     }
 
-    public Referenceable registerDatabase(String databaseName) throws Exception {
-        LOG.info("Importing objects from databaseName : " + databaseName);
-
-        Database hiveDB = hiveClient.getDatabase(databaseName);
-
-        Referenceable dbRef = new Referenceable(HiveDataTypes.HIVE_DB.getName());
-        dbRef.set("name", hiveDB.getName());
-        dbRef.set("description", hiveDB.getDescription());
-        dbRef.set("locationUri", hiveDB.getLocationUri());
-        dbRef.set("parameters", hiveDB.getParameters());
-        dbRef.set("ownerName", hiveDB.getOwnerName());
-        if (hiveDB.getOwnerType() != null) {
-            dbRef.set("ownerType", hiveDB.getOwnerType().getValue());
+    /**
+     * Gets reference for the database
+     *
+     * @param dbName    database name
+     * @return Reference for database if exists, else null
+     * @throws Exception
+     */
+    private Referenceable getDatabaseReference(String dbName) throws Exception {
+        LOG.debug("Getting reference for database {}", dbName);
+        String typeName = HiveDataTypes.HIVE_DB.getName();
+        MetadataServiceClient dgiClient = getMetadataServiceClient();
+
+        JSONArray results = dgiClient.rawSearch(typeName, "name", dbName);
+        if (results.length() == 0) {
+            return null;
+        } else {
+            ITypedReferenceableInstance reference = Serialization.fromJson(results.get(0).toString());
+            return new Referenceable(reference.getId().id, typeName, null);
         }
+    }
 
-        return createInstance(dbRef);
+    public Referenceable registerDatabase(String databaseName) throws Exception {
+        Referenceable dbRef = getDatabaseReference(databaseName);
+        if (dbRef == null) {
+            LOG.info("Importing objects from databaseName : " + databaseName);
+            Database hiveDB = hiveClient.getDatabase(databaseName);
+
+            dbRef = new Referenceable(HiveDataTypes.HIVE_DB.getName());
+            dbRef.set("name", hiveDB.getName());
+            dbRef.set("description", hiveDB.getDescription());
+            dbRef.set("locationUri", hiveDB.getLocationUri());
+            dbRef.set("parameters", hiveDB.getParameters());
+            dbRef.set("ownerName", hiveDB.getOwnerName());
+            if (hiveDB.getOwnerType() != null) {
+                dbRef.set("ownerType", hiveDB.getOwnerType().getValue());
+            }
+
+            dbRef = createInstance(dbRef);
+        } else {
+            LOG.info("Database {} is already registered with id {}", databaseName, dbRef.getId().id);
+        }
+        return dbRef;
     }
 
     public Referenceable createInstance(Referenceable referenceable) throws Exception {
@@ -132,71 +164,124 @@ public class HiveMetaStoreBridge {
         List<String> hiveTables = hiveClient.getAllTables(databaseName);
 
         for (String tableName : hiveTables) {
-            Pair<Referenceable, Referenceable> tableReferenceable = registerTable(databaseReferenceable, databaseName, tableName);
+            Referenceable tableReferenceable = registerTable(databaseReferenceable, databaseName, tableName);
 
             // Import Partitions
-            importPartitions(databaseName, tableName, databaseReferenceable, tableReferenceable.first, tableReferenceable.second);
+            Referenceable sdReferenceable = getSDForTable(databaseReferenceable, tableName);
+            importPartitions(databaseName, tableName, databaseReferenceable, tableReferenceable, sdReferenceable);
 
             // Import Indexes
-            importIndexes(databaseName, tableName, databaseReferenceable, tableReferenceable.first);
+            importIndexes(databaseName, tableName, databaseReferenceable, tableReferenceable);
         }
     }
 
-    public Pair<Referenceable, Referenceable> registerTable(Referenceable dbReference, String dbName, String tableName) throws Exception {
-        LOG.info("Importing objects from " + dbName + "." + tableName);
+    /**
+     * Gets reference for the table
+     *
+     * @param dbRef
+     * @param tableName table name
+     * @return table reference if exists, else null
+     * @throws Exception
+     */
+    private Referenceable getTableReference(Referenceable dbRef, String tableName) throws Exception {
+        LOG.debug("Getting reference for table {}.{}", dbRef, tableName);
 
-        Table hiveTable = hiveClient.getTable(dbName, tableName);
+        String typeName = HiveDataTypes.HIVE_TABLE.getName();
+        MetadataServiceClient dgiClient = getMetadataServiceClient();
 
-        Referenceable tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
-        tableRef.set("tableName", hiveTable.getTableName());
-        tableRef.set("owner", hiveTable.getOwner());
-        //todo fix
-        tableRef.set("createTime", hiveTable.getLastAccessTime());
-        tableRef.set("lastAccessTime", hiveTable.getLastAccessTime());
-        tableRef.set("retention", hiveTable.getRetention());
-
-        // add reference to the database
-        tableRef.set("dbName", dbReference);
-
-        // add reference to the StorageDescriptor
-        StorageDescriptor storageDesc = hiveTable.getSd();
-        Referenceable sdReferenceable = fillStorageDescStruct(storageDesc);
-        tableRef.set("sd", sdReferenceable);
-
-        // add reference to the Partition Keys
-        List<Referenceable> partKeys = new ArrayList<>();
-        Referenceable colRef;
-        if (hiveTable.getPartitionKeys().size() > 0) {
-            for (FieldSchema fs : hiveTable.getPartitionKeys()) {
-                colRef = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
-                colRef.set("name", fs.getName());
-                colRef.set("type", fs.getType());
-                colRef.set("comment", fs.getComment());
-                Referenceable colRefTyped = createInstance(colRef);
-                partKeys.add(colRefTyped);
-            }
+        //todo DSL support for reference doesn't work. is the usage right?
+//        String query = String.format("%s where dbName = \"%s\" and tableName = \"%s\"", typeName, dbRef.getId().id,
+//                tableName);
+        String query = String.format("%s where tableName = \"%s\"", typeName, tableName);
+        JSONArray results = dgiClient.searchByDSL(query);
+        if (results.length() == 0) {
+            return null;
+        } else {
+            //There should be just one instance with the given name
+            ITypedReferenceableInstance reference = Serialization.fromJson(results.get(0).toString());
+            String guid = reference.getId().id;
+            LOG.debug("Got reference for table {}.{} = {}", dbRef, tableName, guid);
+            return new Referenceable(guid, typeName, null);
+        }
+    }
 
-            tableRef.set("partitionKeys", partKeys);
+    private Referenceable getSDForTable(Referenceable dbRef, String tableName) throws Exception {
+        Referenceable tableRef = getTableReference(dbRef, tableName);
+        if (tableRef == null) {
+            throw new IllegalArgumentException("Table " + dbRef + "." + tableName + " doesn't exist");
         }
 
-        tableRef.set("parameters", hiveTable.getParameters());
+        MetadataServiceClient dgiClient = getMetadataServiceClient();
+        ITypedReferenceableInstance tableInstance = dgiClient.getEntity(tableRef.getId().id);
+        Id sdId = (Id) tableInstance.get("sd");
+        return new Referenceable(sdId.id, sdId.getTypeName(), null);
+    }
 
-        if (hiveTable.getViewOriginalText() != null) {
-            tableRef.set("viewOriginalText", hiveTable.getViewOriginalText());
-        }
+    public Referenceable registerTable(String dbName, String tableName) throws Exception {
+        Referenceable dbReferenceable = registerDatabase(dbName);
+        return registerTable(dbReferenceable, dbName, tableName);
+    }
 
-        if (hiveTable.getViewExpandedText() != null) {
-            tableRef.set("viewExpandedText", hiveTable.getViewExpandedText());
-        }
+    public Referenceable registerTable(Referenceable dbReference, String dbName, String tableName) throws Exception {
+        Referenceable tableRef = getTableReference(dbReference, tableName);
+        if (tableRef == null) {
+            LOG.info("Importing objects from " + dbName + "." + tableName);
+
+            Table hiveTable = hiveClient.getTable(dbName, tableName);
+
+            tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
+            tableRef.set("tableName", hiveTable.getTableName());
+            tableRef.set("owner", hiveTable.getOwner());
+            //todo fix
+            tableRef.set("createTime", hiveTable.getLastAccessTime());
+            tableRef.set("lastAccessTime", hiveTable.getLastAccessTime());
+            tableRef.set("retention", hiveTable.getRetention());
+
+            // add reference to the database
+            tableRef.set("dbName", dbReference);
+
+            // add reference to the StorageDescriptor
+            StorageDescriptor storageDesc = hiveTable.getSd();
+            Referenceable sdReferenceable = fillStorageDescStruct(storageDesc);
+            tableRef.set("sd", sdReferenceable);
+
+            // add reference to the Partition Keys
+            List<Referenceable> partKeys = new ArrayList<>();
+            Referenceable colRef;
+            if (hiveTable.getPartitionKeys().size() > 0) {
+                for (FieldSchema fs : hiveTable.getPartitionKeys()) {
+                    colRef = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
+                    colRef.set("name", fs.getName());
+                    colRef.set("type", fs.getType());
+                    colRef.set("comment", fs.getComment());
+                    Referenceable colRefTyped = createInstance(colRef);
+                    partKeys.add(colRefTyped);
+                }
+
+                tableRef.set("partitionKeys", partKeys);
+            }
 
-        tableRef.set("tableType", hiveTable.getTableType());
-        tableRef.set("temporary", hiveTable.isTemporary());
+            tableRef.set("parameters", hiveTable.getParameters());
 
-        // List<Referenceable> fieldsList = getColumns(storageDesc);
-        // tableRef.set("columns", fieldsList);
+            if (hiveTable.getViewOriginalText() != null) {
+                tableRef.set("viewOriginalText", hiveTable.getViewOriginalText());
+            }
+
+            if (hiveTable.getViewExpandedText() != null) {
+                tableRef.set("viewExpandedText", hiveTable.getViewExpandedText());
+            }
+
+            tableRef.set("tableType", hiveTable.getTableType());
+            tableRef.set("temporary", hiveTable.isTemporary());
 
-        Referenceable tableReferenceable = createInstance(tableRef);
-        return Pair.of(tableReferenceable, sdReferenceable);
+            // List<Referenceable> fieldsList = getColumns(storageDesc);
+            // tableRef.set("columns", fieldsList);
+
+            tableRef = createInstance(tableRef);
+        } else {
+            LOG.info("Table {}.{} is already registered with id {}", dbName, tableName, tableRef.getId().id);
+        }
+        return tableRef;
     }
 
     private void importPartitions(String db, String tableName,
@@ -212,10 +297,13 @@ public class HiveMetaStoreBridge {
         }
     }
 
+    //todo should be idempotent
     private Referenceable importPartition(Partition hivePart,
                                           Referenceable dbReferenceable,
                                           Referenceable tableReferenceable,
                                           Referenceable sdReferenceable) throws Exception {
+        LOG.info("Importing partition for {}.{} with values {}", dbReferenceable, tableReferenceable,
+                StringUtils.join(hivePart.getValues(), ","));
         Referenceable partRef = new Referenceable(HiveDataTypes.HIVE_PARTITION.getName());
         partRef.set("values", hivePart.getValues());
 
@@ -247,9 +335,11 @@ public class HiveMetaStoreBridge {
         }
     }
 
+    //todo should be idempotent
     private void importIndex(Index index,
                              Referenceable dbReferenceable,
                              Referenceable tableReferenceable) throws Exception {
+        LOG.info("Importing index {} for {}.{}", index.getIndexName(), dbReferenceable, tableReferenceable);
         Referenceable indexRef = new Referenceable(HiveDataTypes.HIVE_INDEX.getName());
 
         indexRef.set("indexName", index.getIndexName());
@@ -359,10 +449,15 @@ public class HiveMetaStoreBridge {
 
         //Register hive data model if its not already registered
         if (dgiClient.getType(HiveDataTypes.HIVE_PROCESS.getName()) == null ) {
+            LOG.info("Registering Hive data model");
             dgiClient.createType(dataModelGenerator.getModelAsJson());
         } else {
-            LOG.debug("Hive data model is already registered!");
+            LOG.info("Hive data model is already registered!");
         }
+
+        //todo remove when fromJson(entityJson) is supported on client
+        dataModelGenerator.createDataModel();
+        TypeSystem.getInstance().defineTypes(dataModelGenerator.getTypesDef());
     }
 
     public static void main(String[] argv) throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/01ee72a3/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/hook/HiveHook.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/hook/HiveHook.java b/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/hook/HiveHook.java
index 4af7178..6019405 100755
--- a/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/hook/HiveHook.java
+++ b/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/hook/HiveHook.java
@@ -207,7 +207,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
                         Table table = entity.getTable();
                         //TODO table.getDbName().toLowerCase() is required as hive stores in lowercase,
                         // but table.getDbName() is not lowercase
-                        Referenceable dbReferenceable = getDatabaseReference(dgiBridge, table.getDbName().toLowerCase());
+                        Referenceable dbReferenceable = dgiBridge.registerDatabase(table.getDbName().toLowerCase());
                         dgiBridge.registerTable(dbReferenceable, table.getDbName(), table.getTableName());
                     }
                 }
@@ -230,7 +230,8 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
             LOG.info("Explain statement. Skipping...");
         }
 
-        String user = hookContext.getUserName();
+        //todo hookContext.getUserName() is null in hdp sandbox 2.2.4
+        String user = hookContext.getUserName() == null ? System.getProperty("user.name") : hookContext.getUserName();
         HiveOperation operation = HiveOperation.valueOf(hookContext.getOperationName());
         String queryId = null;
         String queryStr = null;
@@ -253,19 +254,19 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
             if (readEntity.getTyp() == Entity.Type.TABLE) {
                 Table table = readEntity.getTable();
                 String dbName = table.getDbName().toLowerCase();
-                source.add(getTableReference(dgiBridge, dbName, table.getTableName()));
+                source.add(dgiBridge.registerTable(dbName, table.getTableName()));
             }
         }
-        processReferenceable.set("sourceTableNames", source);
+        processReferenceable.set("inputTables", source);
         List<Referenceable> target = new ArrayList<>();
         for (WriteEntity writeEntity : outputs) {
             if (writeEntity.getTyp() == Entity.Type.TABLE) {
                 Table table = writeEntity.getTable();
                 String dbName = table.getDbName().toLowerCase();
-                target.add(getTableReference(dgiBridge, dbName, table.getTableName()));
+                target.add(dgiBridge.registerTable(dbName, table.getTableName()));
             }
         }
-        processReferenceable.set("targetTableNames", target);
+        processReferenceable.set("outputTables", target);
         processReferenceable.set("queryText", queryStr);
         processReferenceable.set("queryId", queryId);
         processReferenceable.set("queryPlan", getQueryPlan(hookContext, conf));
@@ -276,58 +277,6 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
         dgiBridge.createInstance(processReferenceable);
     }
 
-    /**
-     * Gets reference for the database. Creates new instance if it doesn't exist
-     *
-     * @param dgiBridge
-     * @param dbName    database name
-     * @return Reference for database
-     * @throws Exception
-     */
-    private Referenceable getDatabaseReference(HiveMetaStoreBridge dgiBridge, String dbName) throws Exception {
-        String typeName = HiveDataTypes.HIVE_DB.getName();
-        MetadataServiceClient dgiClient = dgiBridge.getMetadataServiceClient();
-
-        JSONObject result = dgiClient.rawSearch(typeName, "name", dbName);
-        JSONArray results = (JSONArray) result.get("results");
-
-        if (results.length() == 0) {
-            //Create new instance
-            return dgiBridge.registerDatabase(dbName);
-
-        } else {
-            String guid = (String) ((JSONObject) results.get(0)).get("guid");
-            return new Referenceable(guid, typeName, null);
-        }
-    }
-
-    /**
-     * Gets reference for the table. Creates new instance if it doesn't exist
-     *
-     * @param dgiBridge
-     * @param dbName
-     * @param tableName table name
-     * @return table reference
-     * @throws Exception
-     */
-    private Referenceable getTableReference(HiveMetaStoreBridge dgiBridge, String dbName, String tableName) throws Exception {
-        String typeName = HiveDataTypes.HIVE_TABLE.getName();
-        MetadataServiceClient dgiClient = dgiBridge.getMetadataServiceClient();
-
-        JSONObject result = dgiClient.rawSearch(typeName, "tableName", tableName);
-        JSONArray results = (JSONArray) result.get("results");
-
-        if (results.length() == 0) {
-            Referenceable dbRererence = getDatabaseReference(dgiBridge, dbName);
-            return dgiBridge.registerTable(dbRererence, dbName, tableName).first;
-
-        } else {
-            //There should be just one instance with the given name
-            String guid = (String) ((JSONObject) results.get(0)).get("guid");
-            return new Referenceable(guid, typeName, null);
-        }
-    }
-
 
     private String getQueryPlan(HookContext hookContext, HiveConf conf) throws Exception {
         //We need to somehow get the sem associated with the plan and use it here.

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/01ee72a3/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/model/HiveDataModelGenerator.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/model/HiveDataModelGenerator.java b/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/model/HiveDataModelGenerator.java
index 6e1dfa1..58d2aa6 100755
--- a/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/model/HiveDataModelGenerator.java
+++ b/addons/hive-bridge/src/main/java/org/apache/hadoop/metadata/hive/model/HiveDataModelGenerator.java
@@ -340,8 +340,8 @@ public class HiveDataModelGenerator {
     private void createPartitionClass() throws MetadataException {
 
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
-                new AttributeDefinition("values", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.COLLECTION, false, null),
+                new AttributeDefinition("values", DataTypes.arrayTypeName(DataTypes.STRING_TYPE.getName()),
+                        Multiplicity.OPTIONAL, false, null),
                 new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.getName(),
                         Multiplicity.REQUIRED, false, null),
                 new AttributeDefinition("tableName", HiveDataTypes.HIVE_TABLE.getName(),
@@ -354,10 +354,9 @@ public class HiveDataModelGenerator {
                         Multiplicity.REQUIRED, false, null),
                 new AttributeDefinition("columns",
                         DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
-                        Multiplicity.COLLECTION, true, null),
+                        Multiplicity.OPTIONAL, true, null),
                 new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
                         Multiplicity.OPTIONAL, false, null),
-
         };
         HierarchicalTypeDefinition<ClassType> definition =
                 new HierarchicalTypeDefinition<>(ClassType.class,

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/01ee72a3/addons/hive-bridge/src/site/twiki/Bridge-Hive.twiki
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/site/twiki/Bridge-Hive.twiki b/addons/hive-bridge/src/site/twiki/Bridge-Hive.twiki
index 1e8bd0d..5782b86 100644
--- a/addons/hive-bridge/src/site/twiki/Bridge-Hive.twiki
+++ b/addons/hive-bridge/src/site/twiki/Bridge-Hive.twiki
@@ -21,8 +21,8 @@ Hive metadata can be modelled in DGI using its Type System. The default modellin
 
 ---++ Importing Hive Metadata
 org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge imports the hive metadata into DGI using the typesystem defined in org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator. import-hive.sh command can be used to facilitate this.
-Set-up the following configs in <dgi package>/conf/hive-site.xml:
-   * Hive metastore configuration - Refer [[https://cwiki.apache.org/confluence/display/Hive/AdminManual+MetastoreAdmin][Hive Metastore Configuration documentation]]
+Set-up the following configs in hive-site.xml of your hive set-up and set environment variable HIVE_CONFIG to the
+hive conf directory:
    * DGI endpoint - Add the following property with the DGI endpoint for your set-up
 <verbatim>
 <property>
@@ -57,4 +57,5 @@ The following properties in hive-site.xml control the thread pool details:
    * hive.hook.dgi.minThreads - core number of threads. default 5
    * hive.hook.dgi.maxThreads - maximum number of threads. default 5
    * hive.hook.dgi.keepAliveTime - keep alive time in msecs. default 10
+   * hive.hook.dgi.synchronous - boolean, true to run the hook synchronously. default false
 

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/01ee72a3/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/HiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/HiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/HiveHookIT.java
index 7b6ba1b..231fd53 100755
--- a/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/HiveHookIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/HiveHookIT.java
@@ -114,10 +114,7 @@ public class HiveHookIT {
     }
 
     private void assertInstanceIsRegistered(String typeName, String colName, String colValue) throws Exception{
-        JSONObject result = dgiCLient.rawSearch(typeName, colName, colValue);
-        JSONArray results = (JSONArray) result.get("results");
+        JSONArray results = dgiCLient.rawSearch(typeName, colName, colValue);
         Assert.assertEquals(results.length(), 1);
-        JSONObject resultRow = (JSONObject) results.get(0);
-        Assert.assertEquals(resultRow.get(typeName + "." + colName), colValue);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/01ee72a3/client/src/main/java/org/apache/hadoop/metadata/MetadataServiceClient.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/hadoop/metadata/MetadataServiceClient.java b/client/src/main/java/org/apache/hadoop/metadata/MetadataServiceClient.java
index 5487281..9379aa5 100755
--- a/client/src/main/java/org/apache/hadoop/metadata/MetadataServiceClient.java
+++ b/client/src/main/java/org/apache/hadoop/metadata/MetadataServiceClient.java
@@ -22,6 +22,10 @@ import com.sun.jersey.api.client.Client;
 import com.sun.jersey.api.client.ClientResponse;
 import com.sun.jersey.api.client.WebResource;
 import com.sun.jersey.api.client.config.DefaultClientConfig;
+import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
+import org.apache.hadoop.metadata.typesystem.Referenceable;
+import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
+import org.apache.hadoop.metadata.typesystem.json.Serialization;
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
@@ -149,8 +153,14 @@ public class MetadataServiceClient {
      * @return result json object
      * @throws MetadataServiceException
      */
-    public JSONObject getEntity(String guid) throws MetadataServiceException {
-        return callAPI(API.GET_ENTITY, null, guid);
+    public ITypedReferenceableInstance getEntity(String guid) throws MetadataServiceException {
+        JSONObject jsonResponse = callAPI(API.GET_ENTITY, null, guid);
+        try {
+            String entityInstanceDefinition = jsonResponse.getString(MetadataServiceClient.RESULTS);
+            return Serialization.fromJson(entityInstanceDefinition);
+        } catch (JSONException e) {
+            throw new MetadataServiceException(e);
+        }
     }
 
     public JSONObject searchEntity(String searchQuery) throws MetadataServiceException {
@@ -167,14 +177,14 @@ public class MetadataServiceClient {
      * @return result json object
      * @throws MetadataServiceException
      */
-    public JSONObject rawSearch(String typeName, String attributeName,
-                                Object attributeValue) throws MetadataServiceException {
-        String gremlinQuery = String.format(
-                "g.V.has(\"typeName\",\"%s\").and(_().has(\"%s.%s\", T.eq, \"%s\")).toList()",
-                typeName, typeName, attributeName, attributeValue);
-        return searchByGremlin(gremlinQuery);
-//        String dslQuery = String.format("%s where %s = \"%s\"", typeName, attributeName, attributeValue);
-//        return searchByDSL(dslQuery);
+    public JSONArray rawSearch(String typeName, String attributeName, Object attributeValue) throws
+            MetadataServiceException {
+//        String gremlinQuery = String.format(
+//                "g.V.has(\"typeName\",\"%s\").and(_().has(\"%s.%s\", T.eq, \"%s\")).toList()",
+//                typeName, typeName, attributeName, attributeValue);
+//        return searchByGremlin(gremlinQuery);
+        String dslQuery = String.format("%s where %s = \"%s\"", typeName, attributeName, attributeValue);
+        return searchByDSL(dslQuery);
     }
 
     /**
@@ -183,10 +193,15 @@ public class MetadataServiceClient {
      * @return result json object
      * @throws MetadataServiceException
      */
-    public JSONObject searchByDSL(String query) throws MetadataServiceException {
+    public JSONArray searchByDSL(String query) throws MetadataServiceException {
         WebResource resource = getResource(API.SEARCH_DSL);
         resource = resource.queryParam("query", query);
-        return callAPIWithResource(API.SEARCH_DSL, resource);
+        JSONObject result = callAPIWithResource(API.SEARCH_DSL, resource);
+        try {
+            return result.getJSONObject("results").getJSONArray("rows");
+        } catch (JSONException e) {
+            throw new MetadataServiceException(e);
+        }
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/01ee72a3/src/bin/metadata-config.sh
----------------------------------------------------------------------
diff --git a/src/bin/metadata-config.sh b/src/bin/metadata-config.sh
index f2dcec5..e36e059 100755
--- a/src/bin/metadata-config.sh
+++ b/src/bin/metadata-config.sh
@@ -99,7 +99,7 @@ mkdir -p $METADATA_LOG_DIR
 
 pushd ${BASEDIR} > /dev/null
 
-JAVA_PROPERTIES="$METADATA_OPTS $METADATA_PROPERTIES -Dmetadata.log.dir=$METADATA_LOG_DIR -Dmetadata.home=${METADATA_HOME_DIR} -Dmetadata.conf=${METADATA_CONF}"
+JAVA_PROPERTIES="$METADATA_OPTS $METADATA_PROPERTIES -Dmetadata.log.dir=$METADATA_LOG_DIR -Dmetadata.home=${METADATA_HOME_DIR} -Dmetadata.conf=${METADATA_CONF} -Dmetadata.log.file=application.log"
 shift
 
 while [[ ${1} =~ ^\-D ]]; do

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/01ee72a3/src/conf/log4j.xml
----------------------------------------------------------------------
diff --git a/src/conf/log4j.xml b/src/conf/log4j.xml
index 441a5f8..d6525a3 100755
--- a/src/conf/log4j.xml
+++ b/src/conf/log4j.xml
@@ -28,7 +28,7 @@
     </appender>
 
     <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
-        <param name="File" value="${metadata.log.dir}/application.log"/>
+        <param name="File" value="${metadata.log.dir}/${metadata.log.file}"/>
         <param name="Append" value="true"/>
         <param name="Threshold" value="debug"/>
         <layout class="org.apache.log4j.PatternLayout">