You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@trafodion.apache.org by se...@apache.org on 2018/12/05 21:54:45 UTC

[2/8] trafodion git commit: [TRAFODION-3234] Add support for hive partitioned tables

[TRAFODION-3234] Add support for hive partitioned tables

Refactored the interaction with hive metastore to use the efficient APIs to obtain the necessary
information to construct the hive table descriptors. This refactoring is expected to reduce the
memory requirements, efficient and high performing even when there are thousands of partitions.


Project: http://git-wip-us.apache.org/repos/asf/trafodion/repo
Commit: http://git-wip-us.apache.org/repos/asf/trafodion/commit/923ca573
Tree: http://git-wip-us.apache.org/repos/asf/trafodion/tree/923ca573
Diff: http://git-wip-us.apache.org/repos/asf/trafodion/diff/923ca573

Branch: refs/heads/master
Commit: 923ca5738a24ab2fc60f6afda69eef6ad9a480cb
Parents: 571611a
Author: selvaganesang <se...@esgyn.com>
Authored: Tue Nov 13 16:42:37 2018 +0000
Committer: selvaganesang <se...@esgyn.com>
Committed: Thu Nov 15 01:49:01 2018 +0000

----------------------------------------------------------------------
 core/sql/common/ComSmallDefs.h                  |  15 +
 core/sql/executor/ExExeUtilGet.cpp              |   9 +-
 core/sql/executor/HiveClient_JNI.cpp            | 876 ++++++++++++++++---
 core/sql/executor/HiveClient_JNI.h              |  59 +-
 core/sql/executor/hiveHook.cpp                  | 626 +++----------
 .../sql/executor/org_trafodion_sql_HiveClient.h |  61 ++
 core/sql/optimizer/NATable.cpp                  |  16 +-
 core/sql/optimizer/hiveHook.h                   | 133 ++-
 core/sql/regress/hive/EXPECTED003               |  46 +-
 core/sql/regress/hive/EXPECTED005               | 126 +--
 core/sql/sqlcomp/CmpDescribe.cpp                | 126 ++-
 core/sql/sqlcomp/DefaultConstants.h             |   4 +
 core/sql/sqlcomp/nadefaults.cpp                 |   1 +
 .../main/java/org/trafodion/sql/HiveClient.java | 199 ++++-
 core/sql/ustat/hs_la.cpp                        |  10 +-
 15 files changed, 1489 insertions(+), 818 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/trafodion/blob/923ca573/core/sql/common/ComSmallDefs.h
----------------------------------------------------------------------
diff --git a/core/sql/common/ComSmallDefs.h b/core/sql/common/ComSmallDefs.h
index c18662d..b288e82 100644
--- a/core/sql/common/ComSmallDefs.h
+++ b/core/sql/common/ComSmallDefs.h
@@ -1957,6 +1957,21 @@ enum ComAuthenticationType{
 #define COM_DBS_YES_LIT        "Y"
 #define COM_DBS_NO_LIT         "N"
 
+// values used during ORC file writes if not specified as part
+// of table creation.
+#define ORC_DEFAULT_STRIPE_SIZE         67108864
+#define ORC_DEFAULT_ROW_INDEX_STRIDE    10000
+#define ORC_DEFAULT_COMPRESSION         "ZLIB"
+#define ORC_DEFAULT_BLOOM_FILTER_FPP    0.05
+
+// values used during Parquet file writes if not specified as part
+// of table creation.
+#define PARQUET_DEFAULT_BLOCK_SIZE      134217728
+#define PARQUET_DEFAULT_PAGE_SIZE       1048576
+#define PARQUET_DEFAULT_COMPRESSION     "UNCOMPRESSED"
+#define PARQUET_DEFAULT_DICTIONARY_PAGE_SIZE 1048576
+
+
 // used with removeNATable for QI support
 enum ComQiScope 
   {

http://git-wip-us.apache.org/repos/asf/trafodion/blob/923ca573/core/sql/executor/ExExeUtilGet.cpp
----------------------------------------------------------------------
diff --git a/core/sql/executor/ExExeUtilGet.cpp b/core/sql/executor/ExExeUtilGet.cpp
index 86e6a7b..080072e 100644
--- a/core/sql/executor/ExExeUtilGet.cpp
+++ b/core/sql/executor/ExExeUtilGet.cpp
@@ -6097,7 +6097,7 @@ short ExExeUtilHiveMDaccessTcb::work()
 	    if (hiveMD_)
 	      NADELETEBASIC(hiveMD_, getHeap());
 
-            hiveMD_ = new (getHeap()) HiveMetaData();
+            hiveMD_ = new (getHeap()) HiveMetaData((NAHeap *)getHeap());
 
             if (hiveMDtdb().getCatalog())
               strcpy(hiveCat_, hiveMDtdb().getCatalog());
@@ -6216,7 +6216,8 @@ short ExExeUtilHiveMDaccessTcb::work()
             int i = 0;
             while (i < tblNames_.entries())
               {
-                hiveMD_->getTableDesc(schForHive_, tblNames_[i]->c_str());
+                hiveMD_->getTableDesc(schForHive_, tblNames_[i]->c_str(), 
+                      0, FALSE, FALSE, FALSE /*dont read partn info*/);
                 i++;
               }
 
@@ -6341,8 +6342,8 @@ short ExExeUtilHiveMDaccessTcb::work()
             memset(s->nullFormat, ' ', 8);
             if (htd->getSDs()->nullFormat_)
               str_cpy(s->nullFormat, htd->getSDs()->nullFormat_, 8, ' ');
-
-            str_cpy(s->location, htd->getSDs()->location_, 1024, ' ');
+            if (htd->getSDs()->location_ != NULL)
+               str_cpy(s->location, htd->getSDs()->location_, 1024, ' ');
 
             str_cpy(s->hiveTableType, htd->tableType_, 128, ' ');
 

http://git-wip-us.apache.org/repos/asf/trafodion/blob/923ca573/core/sql/executor/HiveClient_JNI.cpp
----------------------------------------------------------------------
diff --git a/core/sql/executor/HiveClient_JNI.cpp b/core/sql/executor/HiveClient_JNI.cpp
index 80248a8..26dfc87 100644
--- a/core/sql/executor/HiveClient_JNI.cpp
+++ b/core/sql/executor/HiveClient_JNI.cpp
@@ -26,6 +26,7 @@
 #include "Context.h"
 #include "jni.h"
 #include "HiveClient_JNI.h"
+#include "org_trafodion_sql_HiveClient.h"
 
 // ===========================================================================
 // ===== Class HiveClient_JNI
@@ -42,10 +43,6 @@ static const char* const hvcErrorEnumStr[] =
  ,"Java exception in close()."
  ,"Preparing parameters for exists()."
  ,"Java exception in exists()."
- ,"Preparing parameters for getHiveTableStr()."
- ,"Java exception in getHiveTableStr()."
- ,"Preparing parameters for getHiveTableParameters()."
- ,"Java exception in getHiveTableParameters()."
  ,"Preparing parameters for getRedefTime()."
  ,"Java exception in getRedefTime()."
  ,"Java exception in getAllSchemas()."
@@ -53,6 +50,9 @@ static const char* const hvcErrorEnumStr[] =
  ,"Java exception in getAllTables()."
  ,"Preparing parameters for executeHiveSQL()."
  ,"Java exception in executeHiveSQL()."
+ ,"Preparing parameters for getHiveTableInfo()."
+ ,"Java exception in getHiveTableInfo()."
+ ,"Error in getHiveTableInfoDetails()."
 };
 
 
@@ -71,6 +71,27 @@ char* HiveClient_JNI::getErrorText(HVC_RetCode errEnum)
 //////////////////////////////////////////////////////////////////////////////
 // 
 //////////////////////////////////////////////////////////////////////////////
+HiveClient_JNI* HiveClient_JNI::newInstance(NAHeap *heap, HVC_RetCode &retCode)
+{
+   QRLogger::log(CAT_SQL_HDFS, LL_DEBUG, "HiveClient_JNI::newInstance() called.");
+
+   if (initJNIEnv() != JOI_OK)
+     return NULL;
+   retCode = HVC_OK;
+   HiveClient_JNI *hiveClient_JNI = new (heap) HiveClient_JNI(heap);
+   if (hiveClient_JNI != NULL) {
+       retCode = hiveClient_JNI->initConnection();
+       if (retCode != HVC_OK) {
+          NADELETE(hiveClient_JNI, HiveClient_JNI, heap);
+          hiveClient_JNI = NULL;
+       }
+   }
+   return hiveClient_JNI;
+}
+
+//////////////////////////////////////////////////////////////////////////////
+// 
+//////////////////////////////////////////////////////////////////////////////
 HiveClient_JNI* HiveClient_JNI::getInstance()
 {
    HVC_RetCode hvcRetcode = HVC_OK;
@@ -108,6 +129,7 @@ void HiveClient_JNI::deleteInstance()
 //////////////////////////////////////////////////////////////////////////////
 HiveClient_JNI::~HiveClient_JNI()
 {
+   cleanupTableInfo();
    if (isInitialized())	
       close(); // error handling
 }
@@ -137,14 +159,12 @@ HVC_RetCode HiveClient_JNI::init()
     
     JavaMethods_[JM_CTOR       ].jm_name      = "<init>";
     JavaMethods_[JM_CTOR       ].jm_signature = "()V";
+    JavaMethods_[JM_INIT       ].jm_name      = "init";
+    JavaMethods_[JM_INIT       ].jm_signature = "()Z";
     JavaMethods_[JM_CLOSE      ].jm_name      = "close";	
     JavaMethods_[JM_CLOSE      ].jm_signature = "()Z";
     JavaMethods_[JM_EXISTS     ].jm_name      = "exists";
     JavaMethods_[JM_EXISTS     ].jm_signature = "(Ljava/lang/String;Ljava/lang/String;)Z";
-    JavaMethods_[JM_GET_HVT    ].jm_name      = "getHiveTableString";
-    JavaMethods_[JM_GET_HVT    ].jm_signature = "(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;";
-    JavaMethods_[JM_GET_HVP    ].jm_name      = "getHiveTableParameters";
-    JavaMethods_[JM_GET_HVP    ].jm_signature = "(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;";
     JavaMethods_[JM_GET_RDT    ].jm_name      = "getRedefTime";
     JavaMethods_[JM_GET_RDT    ].jm_signature = "(Ljava/lang/String;Ljava/lang/String;)J";
     JavaMethods_[JM_GET_ASH     ].jm_name      = "getAllSchemas";
@@ -153,6 +173,8 @@ HVC_RetCode HiveClient_JNI::init()
     JavaMethods_[JM_GET_ATL    ].jm_signature = "(Ljava/lang/String;)[Ljava/lang/Object;";
     JavaMethods_[JM_EXEC_HIVE_SQL].jm_name = "executeHiveSQL";
     JavaMethods_[JM_EXEC_HIVE_SQL].jm_signature = "(Ljava/lang/String;)V";
+    JavaMethods_[JM_GET_HVT_INFO].jm_name      = "getHiveTableInfo";
+    JavaMethods_[JM_GET_HVT_INFO].jm_signature = "(JLjava/lang/String;Ljava/lang/String;Z)Z";
 
     rc = (HVC_RetCode)JavaObjectInterface::init(className, javaClass_, JavaMethods_, (Int32)JM_LAST, javaMethodsInitialized_);
     if (rc == HVC_OK)
@@ -165,113 +187,45 @@ HVC_RetCode HiveClient_JNI::init()
 //////////////////////////////////////////////////////////////////////////////
 // 
 //////////////////////////////////////////////////////////////////////////////
-HVC_RetCode HiveClient_JNI::exists(const char* schName, const char* tabName)
-{
-  QRLogger::log(CAT_SQL_HBASE, LL_DEBUG, "HiveClient_JNI::exists(%s, %s) called.", schName, tabName);
+HVC_RetCode HiveClient_JNI::initConnection()
+{ 
+  QRLogger::log(CAT_SQL_HBASE, LL_DEBUG, "HiveClient_JNI::initConnection(%s) called.");
+
+
   if (initJNIEnv() != JOI_OK)
      return HVC_ERROR_INIT_PARAM;
-  if (getInstance() == NULL)
+  if (init() != HVC_OK)
      return HVC_ERROR_INIT_PARAM;
-  jstring js_schName = jenv_->NewStringUTF(schName);
-  if (js_schName == NULL) 
-  {
-    GetCliGlobals()->setJniErrorStr(getErrorText(HVC_ERROR_EXISTS_PARAM));
-    jenv_->PopLocalFrame(NULL);
-    return HVC_ERROR_EXISTS_PARAM;
-  }
-  jstring js_tabName = jenv_->NewStringUTF(tabName);
-  if (js_tabName == NULL) 
-  {
-    GetCliGlobals()->setJniErrorStr(getErrorText(HVC_ERROR_EXISTS_PARAM));
-    jenv_->PopLocalFrame(NULL);
-    return HVC_ERROR_EXISTS_PARAM;
-  }
 
-  // boolean exists(java.lang.String, java.lang.String);
-  tsRecentJMFromJNI = JavaMethods_[JM_EXISTS].jm_full_name;
-  jboolean jresult = jenv_->CallStaticBooleanMethod(javaClass_, JavaMethods_[JM_EXISTS].methodID, js_schName, js_tabName);
+  tsRecentJMFromJNI = JavaMethods_[JM_INIT].jm_full_name;
+  jboolean jresult = jenv_->CallBooleanMethod(javaObj_, JavaMethods_[JM_INIT].methodID);
 
   if (jenv_->ExceptionCheck())
   {
-    getExceptionDetails(__FILE__, __LINE__, "HiveClient_JNI::exists()");
-    jenv_->PopLocalFrame(NULL);
-    return HVC_ERROR_EXISTS_EXCEPTION;
-  }
-
-  if (jresult == false) {
-     jenv_->PopLocalFrame(NULL);
-     return HVC_DONE;  // Table does not exist
-  }
-
-  jenv_->PopLocalFrame(NULL);
-  return HVC_OK;  // Table exists.
-}
-
-//////////////////////////////////////////////////////////////////////////////
-// 
-//////////////////////////////////////////////////////////////////////////////
-HVC_RetCode HiveClient_JNI::getHiveTableStr(const char* schName, 
-                                            const char* tabName, 
-                                            Text& hiveTblStr)
-{
-  QRLogger::log(CAT_SQL_HBASE, LL_DEBUG, "Enter HiveClient_JNI::getHiveTableStr(%s, %s, %s).", schName, tabName, hiveTblStr.data());
-  if (initJNIEnv() != JOI_OK)
-     return HVC_ERROR_INIT_PARAM;
-  if (getInstance() == NULL)
-     return HVC_ERROR_INIT_PARAM;
-  jstring js_schName = jenv_->NewStringUTF(schName);
-  if (js_schName == NULL) 
-  {
-    GetCliGlobals()->setJniErrorStr(getErrorText(HVC_ERROR_GET_HVT_PARAM));
-    jenv_->PopLocalFrame(NULL);
-    return HVC_ERROR_GET_HVT_PARAM;
-  }
-  jstring js_tabName = jenv_->NewStringUTF(tabName);
-  if (js_tabName == NULL) 
-  {
-    GetCliGlobals()->setJniErrorStr(getErrorText(HVC_ERROR_GET_HVT_PARAM));
+    getExceptionDetails(__FILE__, __LINE__, "HiveClient_JNI::initConnection()");
     jenv_->PopLocalFrame(NULL);
-    return HVC_ERROR_GET_HVT_PARAM;
+    return HVC_ERROR_INIT_EXCEPTION;
   }
 
-  // java.lang.String getHiveTableString(java.lang.String, java.lang.String);
-  tsRecentJMFromJNI = JavaMethods_[JM_GET_HVT].jm_full_name;
-  jstring jresult = (jstring)jenv_->CallStaticObjectMethod(javaClass_, 
-                                            JavaMethods_[JM_GET_HVT].methodID, 
-                                            js_schName, js_tabName);
-  if (jenv_->ExceptionCheck())
+  if (jresult == false) 
   {
-    getExceptionDetails(__FILE__, __LINE__, "HiveClient_JNI::getHiveTableStr()");
+    logError(CAT_SQL_HBASE, "HiveClient_JNI::initConnection()", getLastError());
     jenv_->PopLocalFrame(NULL);
-    return HVC_ERROR_GET_HVT_EXCEPTION;
-  }
- 
-  if (jresult == NULL) {
-     jenv_->PopLocalFrame(NULL);
-     return HVC_DONE;
+    return HVC_ERROR_INIT_EXCEPTION;
   }
-  if (jenv_->GetStringLength(jresult) <= 0)
-  { 
-     jenv_->PopLocalFrame(NULL);
-     return HVC_DONE; // Table does not exist
-  }
-    
-  // Not using UFTchars and NAWString for now.
-  const char* char_result = jenv_->GetStringUTFChars(jresult, 0);
-  hiveTblStr += char_result ; // deep copy. hiveTblStr is assumed to be empty.
-  jenv_->ReleaseStringUTFChars(jresult, char_result);
 
-  QRLogger::log(CAT_SQL_HBASE, LL_DEBUG, "Exit HiveClient_JNI::getHiveTableStr(%s, %s, %s).", schName, tabName, hiveTblStr.data());
+  isConnected_ = TRUE;
   jenv_->PopLocalFrame(NULL);
-  return HVC_OK;  // Table exists.
+  return HVC_OK;
 }
 
+
 //////////////////////////////////////////////////////////////////////////////
 // 
 //////////////////////////////////////////////////////////////////////////////
-HVC_RetCode HiveClient_JNI::getHiveTableParameters(const char *schName, const char *tabName, Text& hiveParamsStr)
+HVC_RetCode HiveClient_JNI::exists(const char* schName, const char* tabName)
 {
-  QRLogger::log(CAT_SQL_HBASE, LL_DEBUG, "Enter HiveClient_JNI::getHiveTableParameters(%s).", hiveParamsStr.data());
+  QRLogger::log(CAT_SQL_HBASE, LL_DEBUG, "HiveClient_JNI::exists(%s, %s) called.", schName, tabName);
   if (initJNIEnv() != JOI_OK)
      return HVC_ERROR_INIT_PARAM;
   if (getInstance() == NULL)
@@ -279,51 +233,38 @@ HVC_RetCode HiveClient_JNI::getHiveTableParameters(const char *schName, const ch
   jstring js_schName = jenv_->NewStringUTF(schName);
   if (js_schName == NULL) 
   {
-    GetCliGlobals()->setJniErrorStr(getErrorText(HVC_ERROR_GET_HVT_PARAM));
+    GetCliGlobals()->setJniErrorStr(getErrorText(HVC_ERROR_EXISTS_PARAM));
     jenv_->PopLocalFrame(NULL);
-    return HVC_ERROR_GET_HVP_PARAM;
+    return HVC_ERROR_EXISTS_PARAM;
   }
   jstring js_tabName = jenv_->NewStringUTF(tabName);
   if (js_tabName == NULL) 
   {
-    GetCliGlobals()->setJniErrorStr(getErrorText(HVC_ERROR_GET_HVT_PARAM));
+    GetCliGlobals()->setJniErrorStr(getErrorText(HVC_ERROR_EXISTS_PARAM));
     jenv_->PopLocalFrame(NULL);
-    return HVC_ERROR_GET_HVP_PARAM;
+    return HVC_ERROR_EXISTS_PARAM;
   }
 
-  // java.lang.String getHiveTableParameters();
-  tsRecentJMFromJNI = JavaMethods_[JM_GET_HVP].jm_full_name;
-  jstring jresult = (jstring)jenv_->CallStaticObjectMethod(javaClass_, 
-                                                     JavaMethods_[JM_GET_HVP].methodID, js_schName, js_tabName);
+  // boolean exists(java.lang.String, java.lang.String);
+  tsRecentJMFromJNI = JavaMethods_[JM_EXISTS].jm_full_name;
+  jboolean jresult = jenv_->CallStaticBooleanMethod(javaClass_, JavaMethods_[JM_EXISTS].methodID, js_schName, js_tabName);
 
   if (jenv_->ExceptionCheck())
   {
-    getExceptionDetails(__FILE__, __LINE__, "HiveClient_JNI::getHiveTableParameters()");
+    getExceptionDetails(__FILE__, __LINE__, "HiveClient_JNI::exists()");
     jenv_->PopLocalFrame(NULL);
-    return HVC_ERROR_GET_HVT_EXCEPTION;
-  }
- 
-  if (jresult == NULL) {
-     jenv_->PopLocalFrame(NULL);
-     return HVC_DONE;
+    return HVC_ERROR_EXISTS_EXCEPTION;
   }
-  if (jenv_->GetStringLength(jresult) <= 0)
-  { 
+
+  if (jresult == false) {
      jenv_->PopLocalFrame(NULL);
-     return HVC_DONE; // Table does not exist
+     return HVC_DONE;  // Table does not exist
   }
-    
-  // Not using UFTchars and NAWString for now.
-  const char* char_result = jenv_->GetStringUTFChars(jresult, 0);
-  hiveParamsStr += char_result ; // deep copy. hiveParamsStr is assumed to be empty.
-  jenv_->ReleaseStringUTFChars(jresult, char_result);
 
-  QRLogger::log(CAT_SQL_HBASE, LL_DEBUG, "Exit HiveClient_JNI::getHiveTableParameters(%s).", hiveParamsStr.data());
   jenv_->PopLocalFrame(NULL);
   return HVC_OK;  // Table exists.
 }
 
-
 //////////////////////////////////////////////////////////////////////////////
 // 
 //////////////////////////////////////////////////////////////////////////////  
@@ -531,6 +472,703 @@ HVC_RetCode HiveClient_JNI::close()
 //////////////////////////////////////////////////////////////////////////////
 // 
 //////////////////////////////////////////////////////////////////////////////  
+//
+//////////////////////////////////////////////////////////////////////////////
+HVC_RetCode HiveClient_JNI::getHiveTableInfo(const char* schName, 
+                                            const char* tabName,
+                                            NABoolean readPartnInfo)
+{
+  QRLogger::log(CAT_SQL_HBASE, LL_DEBUG, "Enter HiveClient_JNI::getHiveTableInfo(%s, %s)", schName, tabName);
+  if (initJNIEnv() != JOI_OK)
+     return HVC_ERROR_INIT_PARAM;
+  jstring js_schName = jenv_->NewStringUTF(schName);
+  if (js_schName == NULL) 
+  {
+    GetCliGlobals()->setJniErrorStr(getErrorText(HVC_ERROR_GET_HVT_INFO_PARAM));
+    jenv_->PopLocalFrame(NULL);
+    return HVC_ERROR_GET_HVT_INFO_PARAM;
+  }
+  jstring js_tabName = jenv_->NewStringUTF(tabName);
+  if (js_tabName == NULL) 
+  {
+    GetCliGlobals()->setJniErrorStr(getErrorText(HVC_ERROR_GET_HVT_INFO_PARAM));
+    jenv_->PopLocalFrame(NULL);
+    return HVC_ERROR_GET_HVT_INFO_PARAM;
+  }
+  jboolean jReadPartn = readPartnInfo;
+  jlong jniObject = (jlong)this;
+  tsRecentJMFromJNI = JavaMethods_[JM_GET_HVT_INFO].jm_full_name;
+  jboolean jresult = jenv_->CallBooleanMethod(javaObj_, 
+                                            JavaMethods_[JM_GET_HVT_INFO].methodID, 
+                                            jniObject,
+                                            js_schName, js_tabName, jReadPartn);
+  if (jenv_->ExceptionCheck())
+  {
+    getExceptionDetails(__FILE__, __LINE__, "HiveClient_JNI::getHiveTableStr()");
+    jenv_->PopLocalFrame(NULL);
+    return HVC_ERROR_GET_HVT_INFO_EXCEPTION;
+  }
+  QRLogger::log(CAT_SQL_HBASE, LL_DEBUG, "Exit HiveClient_JNI::getHiveTableInfo(%s, %s).", schName, tabName);
+  jenv_->PopLocalFrame(NULL);
+  if (jresult)
+     return HVC_OK;
+  else
+     return HVC_DONE;
+}
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * Class:     org_trafodion_sql_HiveClient
+ * Method:    setTableInfo
+ * Signature: (J[Ljava/lang/String;[[Ljava/lang/String;[[Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/String;[I[Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/String;[[Ljava/lang/String;)V
+ */
+JNIEXPORT void JNICALL Java_org_trafodion_sql_HiveClient_setTableInfo
+  (JNIEnv *jniEnv, jobject jobj, jlong jniObject, jobjectArray tableInfo, jobjectArray colInfo, jobjectArray partKeyInfo, 
+                jobjectArray bucketCols, jobjectArray sortCols, jintArray sortColsOrder, jobjectArray paramsKeys, jobjectArray paramsValues,
+                jobjectArray partNames, jobjectArray partKeyValues)
+{
+   HiveClient_JNI *hiveClient = (HiveClient_JNI *)jniObject; 
+   hiveClient->setTableInfo(tableInfo, colInfo, partKeyInfo, bucketCols, sortCols, sortColsOrder, paramsKeys, paramsValues, 
+                                partNames, partKeyValues);
+}
+#ifdef __cplusplus
+}
+#endif
+
+void HiveClient_JNI::setTableInfo(jobjectArray tableInfo, jobjectArray colInfo, jobjectArray partKeyInfo, 
+                jobjectArray bucketCols, jobjectArray sortCols, jintArray sortColsOrder,jobjectArray paramsKeys, jobjectArray paramsValues,
+                jobjectArray partNames, jobjectArray partKeyValues)
+{
+   NABoolean exceptionFound = FALSE;
+   tableInfo_ = (jobjectArray)jenv_->NewGlobalRef(tableInfo);
+   if (jenv_->ExceptionCheck())
+      exceptionFound = TRUE; 
+   if (! exceptionFound) {
+      colInfo_ = (jobjectArray)jenv_->NewGlobalRef(colInfo);
+      if (jenv_->ExceptionCheck())
+          exceptionFound = TRUE; 
+   }
+   if (! exceptionFound) {
+      if (partKeyInfo != NULL) {
+         partKeyInfo_ = (jobjectArray)jenv_->NewGlobalRef(partKeyInfo);
+         if (jenv_->ExceptionCheck())
+             exceptionFound = TRUE; 
+      }
+   }
+   if (! exceptionFound) {
+      if (bucketCols != NULL) {
+         bucketCols_ = (jobjectArray)jenv_->NewGlobalRef(bucketCols);
+         if (jenv_->ExceptionCheck())
+             exceptionFound = TRUE; 
+      }
+   }
+   if (! exceptionFound) {
+      if (sortCols != NULL) {
+         sortCols_ = (jobjectArray)jenv_->NewGlobalRef(sortCols);
+         if (jenv_->ExceptionCheck())
+             exceptionFound = TRUE; 
+      }
+   }
+   if (! exceptionFound) {
+      if (sortColsOrder != NULL) {
+         sortColsOrder_ = (jintArray)jenv_->NewGlobalRef(sortColsOrder);
+         if (jenv_->ExceptionCheck())
+             exceptionFound = TRUE; 
+      }
+   }
+   if (! exceptionFound) {
+      if (paramsKeys != NULL) {
+         paramsKeys_ = (jobjectArray)jenv_->NewGlobalRef(paramsKeys);
+         if (jenv_->ExceptionCheck())
+             exceptionFound = TRUE; 
+      }
+   }
+   if (! exceptionFound) {
+      if (paramsValues != NULL) {
+         paramsValues_ = (jobjectArray)jenv_->NewGlobalRef(paramsValues);
+         if (jenv_->ExceptionCheck())
+             exceptionFound = TRUE; 
+      }
+   }
+   if (! exceptionFound) {
+      if (partNames != NULL) {
+         partNames_ = (jobjectArray)jenv_->NewGlobalRef(partNames);
+         if (jenv_->ExceptionCheck())
+             exceptionFound = TRUE; 
+      }
+   }
+   if (! exceptionFound) {
+      if (partKeyValues != NULL) {
+         partKeyValues_ = (jobjectArray)jenv_->NewGlobalRef(partKeyValues);
+         if (jenv_->ExceptionCheck())
+             exceptionFound = TRUE; 
+      }
+   }
+   ex_assert(! exceptionFound, "Exception in HiveClient_JNI::setTableInfo");
+}
+
+void HiveClient_JNI::cleanupTableInfo() 
+{
+   if (tableInfo_ != NULL) {
+      jenv_->DeleteGlobalRef(tableInfo_);
+      tableInfo_ = NULL;
+   }
+   if (colInfo_ != NULL) {
+      jenv_->DeleteGlobalRef(colInfo_);
+      colInfo_ = NULL;
+   }
+   if (partKeyInfo_ != NULL) {
+      jenv_->DeleteGlobalRef(partKeyInfo_);
+      partKeyInfo_ = NULL;
+   }
+   if (bucketCols_ != NULL) {
+      jenv_->DeleteGlobalRef(bucketCols_);
+      bucketCols_ = NULL;
+   }
+   if (sortCols_ != NULL) {
+      jenv_->DeleteGlobalRef(sortCols_);
+      sortCols_ = NULL;
+   }
+   if (sortColsOrder_ != NULL) {
+      jenv_->DeleteGlobalRef(sortColsOrder_);
+      sortColsOrder_ = NULL;
+   }
+   if (paramsKeys_ != NULL) {
+      jenv_->DeleteGlobalRef(paramsKeys_);
+      paramsKeys_ = NULL;
+   }
+   if (paramsValues_ != NULL) {
+      jenv_->DeleteGlobalRef(paramsValues_);
+      paramsValues_ = NULL;
+   }
+   if (partNames_ != NULL) {
+      jenv_->DeleteGlobalRef(partNames_);
+      partNames_ = NULL;
+   }
+   if (partKeyValues_ != NULL) {
+      jenv_->DeleteGlobalRef(partKeyValues_);
+      partKeyValues_ = NULL;
+   }
+}
+
+HVC_RetCode HiveClient_JNI::getHiveTableDesc(NAHeap *heap, hive_tbl_desc *&hiveTableDesc)
+{
+   HVC_RetCode hvcRetcode;
+   jstring jTableInfo[7];
+   const char *pTableInfo[7];
+   Int64 creationTs;
+
+   jTableInfo[0] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_TABLE_NAME);
+   pTableInfo[0] = jenv_->GetStringUTFChars(jTableInfo[0], NULL);
+
+   jTableInfo[1] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_DB_NAME);
+   pTableInfo[1] = jenv_->GetStringUTFChars(jTableInfo[1], NULL);
+
+   jTableInfo[2] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_OWNER);
+   pTableInfo[2] = jenv_->GetStringUTFChars(jTableInfo[2], NULL);
+
+   jTableInfo[3] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_TABLE_TYPE);
+   pTableInfo[3] = jenv_->GetStringUTFChars(jTableInfo[3], NULL);
+
+   jTableInfo[4] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_VIEW_ORIGINAL_TEXT);
+   if (jTableInfo[4] != NULL)
+      pTableInfo[4] = jenv_->GetStringUTFChars(jTableInfo[4], NULL);
+   else
+      pTableInfo[4] = NULL;
+    
+   jTableInfo[5] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_VIEW_EXPANDED_TEXT);
+   if (jTableInfo[5] != NULL)
+       pTableInfo[5] = jenv_->GetStringUTFChars(jTableInfo[5], NULL);
+   else
+      pTableInfo[5] = NULL;
+
+   jTableInfo[6] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_CREATE_TIME);
+   pTableInfo[6] = jenv_->GetStringUTFChars(jTableInfo[6], NULL);
+   creationTs = atol(pTableInfo[6]);
+   hive_sd_desc *hiveSdDesc;
+   if ((hvcRetcode = populateSD(heap, creationTs, hiveSdDesc)) != HVC_OK)
+      return hvcRetcode;
+
+   struct hive_pkey_desc* partKeyDesc; 
+   if ((hvcRetcode = populatePartKeyColumns(heap, partKeyDesc)) != HVC_OK)
+       return hvcRetcode;
+   
+   struct hive_tblparams_desc *tblParamsDesc;
+   if ((hvcRetcode = populateTableParams(heap, hiveSdDesc, tblParamsDesc)) != HVC_OK)
+       return hvcRetcode;
+
+   hiveTableDesc =  new (heap)
+      struct hive_tbl_desc(heap, 0, // no tblID with JNI 
+                          pTableInfo[0], // Table Name
+                          pTableInfo[1], // schema Name
+                          pTableInfo[2], // owner
+                          pTableInfo[3], // table type
+                          creationTs,
+                          pTableInfo[4], // view original str
+                          pTableInfo[5], // view expanded  str
+                          hiveSdDesc, partKeyDesc, tblParamsDesc); 
+
+   for (int i = 0; i < 7 ; i++) {
+      if (jTableInfo[i] != NULL) {
+         jenv_->ReleaseStringUTFChars(jTableInfo[i], pTableInfo[i]);
+         jenv_->DeleteLocalRef(jTableInfo[i]);
+      }
+   }
+   return HVC_OK; 
+} 
+
+HVC_RetCode HiveClient_JNI::populateSD(NAHeap *heap, Int64 creationTs, hive_sd_desc* &sdDesc)
+{
+   HVC_RetCode hvcRetcode;
+
+   if (tableInfo_ == NULL)
+      return HVC_ERROR_POPULATE_SDS_ERROR;
+
+   struct hive_sd_desc* lastSd = NULL;
+   char fieldTerminator  = '\001';  // this the Hive default ^A or ascii code 1
+   char recordTerminator = '\n';    // this is the Hive default
+
+
+   jstring jSdEntries[8]; 
+   const char *pSdEntries[8];
+
+   NABoolean isCompressed = FALSE;
+   Int32 numBuckets = 0;
+
+   jSdEntries[0] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_SD_LOCATION);
+   if (jSdEntries[0] != NULL)
+      pSdEntries[0] = jenv_->GetStringUTFChars(jSdEntries[0], NULL);
+   else
+      pSdEntries[0] = NULL;
+
+   jSdEntries[1] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_SD_INPUT_FORMAT);
+   pSdEntries[1] = jenv_->GetStringUTFChars(jSdEntries[1], NULL);
+
+   jSdEntries[2] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_SD_OUTPUT_FORMAT);
+   pSdEntries[2] = jenv_->GetStringUTFChars(jSdEntries[2], NULL);
+
+   jSdEntries[3] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_SD_COMPRESSED);
+   pSdEntries[3] = jenv_->GetStringUTFChars(jSdEntries[3], NULL);
+   if (strcmp(pSdEntries[3], "true") == 0)
+      isCompressed = TRUE;
+
+   jSdEntries[4] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_SD_NUM_BUCKETS);
+   pSdEntries[4] = jenv_->GetStringUTFChars(jSdEntries[4], NULL);
+   numBuckets = atoi(pSdEntries[4]); 
+
+   jSdEntries[5] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_NULL_FORMAT);
+   if (jSdEntries[5] != NULL) 
+      pSdEntries[5] = jenv_->GetStringUTFChars(jSdEntries[5], NULL);
+   else
+      pSdEntries[5] = NULL;
+   
+   jSdEntries[6] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_FIELD_DELIM);
+   if (jSdEntries[6] != NULL)  {
+      pSdEntries[6] = jenv_->GetStringUTFChars(jSdEntries[6], NULL);
+      fieldTerminator = *pSdEntries[6];
+   }
+   else
+      pSdEntries[6] = NULL;
+
+   jSdEntries[7] = (jstring)jenv_->GetObjectArrayElement(tableInfo_, org_trafodion_sql_HiveClient_Table_LINE_DELIM);
+   if (jSdEntries[7] != NULL) {
+      pSdEntries[7] = jenv_->GetStringUTFChars(jSdEntries[7], NULL);
+      recordTerminator = *pSdEntries[7];
+   }
+   else
+      pSdEntries[7] = NULL;
+  
+   struct hive_column_desc* colsDesc; 
+   if ((hvcRetcode = populateColumns(heap, colsDesc)) != HVC_OK)
+       return hvcRetcode;
+
+   struct hive_skey_desc *sortColsDesc;
+   if ((hvcRetcode = populateSortColumns(heap, sortColsDesc)) != HVC_OK)
+       return hvcRetcode;
+
+   struct hive_bkey_desc *bucketColsDesc;
+   if ((hvcRetcode = populateBucketColumns(heap, bucketColsDesc)) != HVC_OK)
+       return hvcRetcode;
+
+   jint numPartns = 0;
+    
+   struct hive_sd_desc* tableSdDesc = new (heap)
+        struct hive_sd_desc(heap, 0, //SdID
+                            pSdEntries[0], // Location
+                            creationTs,
+                            numBuckets, // numBuckets
+                            pSdEntries[1], // input format
+                            pSdEntries[2], // output format
+                            pSdEntries[5],  // null format
+                            hive_sd_desc::TABLE_SD,
+                            colsDesc,
+                            sortColsDesc,
+                            bucketColsDesc,
+                            fieldTerminator,
+                            recordTerminator,
+                            isCompressed,
+                            NULL);
+   lastSd = tableSdDesc;
+   jobjectArray partValuesArray;
+   if (partKeyValues_ != NULL) {
+      numPartns = jenv_->GetArrayLength(partKeyValues_);     
+      for (int partNum = 0 ; partNum < numPartns ; partNum++) {
+          jstring jPartName = (jstring)jenv_->GetObjectArrayElement(partNames_, partNum);
+          const char *pPartName = jenv_->GetStringUTFChars(jPartName, NULL);
+          jobjectArray jPartKeyValues = (jobjectArray)jenv_->GetObjectArrayElement(partKeyValues_, partNum);
+          int numPartKeyValues = jenv_->GetArrayLength(jPartKeyValues);       
+          NAString partKeyValue(heap);
+          for (int partKeyValueIdx = 0 ; partKeyValueIdx < numPartKeyValues; partKeyValueIdx++) {
+             jstring jPartKeyValue = (jstring)jenv_->GetObjectArrayElement(jPartKeyValues, partKeyValueIdx);
+             const char *pPartKeyValue = jenv_->GetStringUTFChars(jPartKeyValue, NULL);
+             if (partKeyValueIdx != 0)
+                partKeyValue += ", ";
+             partKeyValue += pPartKeyValue;
+             jenv_->ReleaseStringUTFChars(jPartKeyValue, pPartKeyValue);
+             jenv_->DeleteLocalRef(jPartKeyValue);
+          }
+          NAString location(pSdEntries[0], heap);
+          location += "/";
+          location += pPartName;
+          struct hive_sd_desc* partSdDesc = new (heap)
+                struct hive_sd_desc(heap, 0, //SdID
+                            location.data(),
+                            creationTs,
+                            numBuckets, // numBuckets
+                            pSdEntries[1], // input format
+                            pSdEntries[2], // output format
+                            pSdEntries[5],  // null format
+                            hive_sd_desc::PARTN_SD,
+                            colsDesc,
+                            sortColsDesc,
+                            bucketColsDesc,
+                            fieldTerminator,
+                            recordTerminator,
+                            isCompressed,
+                            partKeyValue.data());
+          lastSd->next_ = partSdDesc;
+          lastSd = partSdDesc;                  
+          jenv_->DeleteLocalRef(jPartKeyValues);
+          jenv_->ReleaseStringUTFChars(jPartName, pPartName);
+          jenv_->DeleteLocalRef(jPartName);
+      }
+   }    
+   
+   for (int i = 0; i < 8 ; i++) {
+      if (jSdEntries[i] != NULL) {
+         jenv_->ReleaseStringUTFChars(jSdEntries[i], pSdEntries[i]);
+         jenv_->DeleteLocalRef(jSdEntries[i]);
+      }
+   }
+   sdDesc = tableSdDesc;  
+   return HVC_OK; 
+}
+
+HVC_RetCode HiveClient_JNI::populateColumns(NAHeap *heap, hive_column_desc* &columns)
+{
+   HVC_RetCode hvcRetcode;
+
+   if (colInfo_ == NULL) {
+      columns = NULL;
+      return HVC_OK;
+   }
+   struct hive_column_desc* result = NULL;
+   struct hive_column_desc* last = result;
+
+   jint numCols = jenv_->GetArrayLength(colInfo_);
+   jstring jColDetails[2];
+   const char *pColDetails[2];
+   for (int colIdx = 0 ; colIdx < numCols ; colIdx++) {
+      jobjectArray jCol = (jobjectArray)jenv_->GetObjectArrayElement(colInfo_, colIdx);
+      jColDetails[0] = (jstring)jenv_->GetObjectArrayElement(jCol, org_trafodion_sql_HiveClient_Col_NAME);
+      jColDetails[1] = (jstring)jenv_->GetObjectArrayElement(jCol, org_trafodion_sql_HiveClient_Col_TYPE);
+      pColDetails[0] = jenv_->GetStringUTFChars(jColDetails[0], NULL); 
+      pColDetails[1] = jenv_->GetStringUTFChars(jColDetails[1], NULL);
+      struct hive_column_desc* newCol = new (heap)
+           struct hive_column_desc(heap, 0,
+                                pColDetails[0],
+                                pColDetails[1],
+                                colIdx);
+       if ( result == NULL ) {
+        last = result = newCol;
+      } else {
+        last->next_ = newCol;
+        last = newCol;
+      }
+      for (int i = 0; i < 2 ; i++) {
+         jenv_->ReleaseStringUTFChars(jColDetails[i], pColDetails[i]);
+         jenv_->DeleteLocalRef(jColDetails[i]);
+      }
+      jenv_->DeleteLocalRef(jCol);
+   } 
+   columns = result;
+   return HVC_OK;
+}
+
+HVC_RetCode HiveClient_JNI::populatePartKeyColumns(NAHeap *heap, hive_pkey_desc* &columns)
+{
+   HVC_RetCode hvcRetcode;
+   
+   if (partKeyInfo_ == NULL) {
+      columns = NULL;
+      return HVC_OK;
+   }
+   struct hive_pkey_desc* result = NULL;
+   struct hive_pkey_desc* last = result;
+
+   jint numCols = jenv_->GetArrayLength(partKeyInfo_);
+   jstring jColDetails[2];
+   const char *pColDetails[2];
+   for (int colIdx = 0 ; colIdx < numCols ; colIdx++) {
+      jobjectArray jCol = (jobjectArray)jenv_->GetObjectArrayElement(partKeyInfo_, colIdx);
+      jColDetails[0] = (jstring)jenv_->GetObjectArrayElement(jCol, org_trafodion_sql_HiveClient_Col_NAME);
+      jColDetails[1] = (jstring)jenv_->GetObjectArrayElement(jCol, org_trafodion_sql_HiveClient_Col_TYPE);
+      pColDetails[0] = jenv_->GetStringUTFChars(jColDetails[0], NULL); 
+      pColDetails[1] = jenv_->GetStringUTFChars(jColDetails[1], NULL);
+      struct hive_pkey_desc* newCol = new (heap)
+           struct hive_pkey_desc(heap, pColDetails[0],
+                                pColDetails[1],
+                                colIdx);
+       if ( result == NULL ) {
+        last = result = newCol;
+      } else {
+        last->next_ = newCol;
+        last = newCol;
+      }
+      for (int i = 0; i < 2 ; i++) {
+         jenv_->ReleaseStringUTFChars(jColDetails[i], pColDetails[i]);
+         jenv_->DeleteLocalRef(jColDetails[i]);
+      }
+      jenv_->DeleteLocalRef(jCol);
+   } 
+   columns = result;
+   return HVC_OK;
+}
+
+HVC_RetCode HiveClient_JNI::populateSortColumns(NAHeap *heap, hive_skey_desc* &sortColsDesc)
+{
+   HVC_RetCode hvcRetcode;
+   
+   if (sortCols_ == NULL) {
+      sortColsDesc = NULL;
+      return HVC_OK;
+   }
+   struct hive_skey_desc* result = NULL;
+   struct hive_skey_desc* last = result;
+
+   jint numCols = jenv_->GetArrayLength(sortCols_);
+   jstring jColName;
+   const char *pColName;
+   jint *pSortColsOrder = jenv_->GetIntArrayElements(sortColsOrder_, NULL);
+   for (int colIdx = 0 ; colIdx < numCols ; colIdx++) {
+      jColName = (jstring)jenv_->GetObjectArrayElement(sortCols_, colIdx);
+      pColName = jenv_->GetStringUTFChars(jColName, NULL); 
+      struct hive_skey_desc* newCol = new (heap)
+           struct hive_skey_desc(heap, pColName,
+                                pSortColsOrder[colIdx],
+                                colIdx);
+       if ( result == NULL ) {
+        last = result = newCol;
+      } else {
+        last->next_ = newCol;
+        last = newCol;
+      }
+      jenv_->ReleaseStringUTFChars(jColName, pColName);
+      jenv_->DeleteLocalRef(jColName);
+   } 
+   jenv_->ReleaseIntArrayElements(sortColsOrder_, pSortColsOrder, JNI_ABORT);
+   sortColsDesc = result;
+   return HVC_OK;
+}
+
+HVC_RetCode HiveClient_JNI::populateBucketColumns(NAHeap *heap, hive_bkey_desc* &bucketColsDesc)
+{
+   HVC_RetCode hvcRetcode;
+   
+   if (bucketCols_ == NULL) {
+      bucketColsDesc = NULL;
+      return HVC_OK;
+   }
+   struct hive_bkey_desc* result = NULL;
+   struct hive_bkey_desc* last = result;
+
+   jint numCols = jenv_->GetArrayLength(bucketCols_);
+   jstring jColName;
+   const char *pColName;
+   for (int colIdx = 0 ; colIdx < numCols ; colIdx++) {
+      jColName = (jstring)jenv_->GetObjectArrayElement(bucketCols_, colIdx);
+      pColName = jenv_->GetStringUTFChars(jColName, NULL); 
+      struct hive_bkey_desc* newCol = new (heap)
+           struct hive_bkey_desc(heap, pColName,
+                                colIdx);
+       if ( result == NULL ) {
+        last = result = newCol;
+      } else {
+        last->next_ = newCol;
+        last = newCol;
+      }
+      jenv_->ReleaseStringUTFChars(jColName, pColName);
+      jenv_->DeleteLocalRef(jColName);
+   } 
+   bucketColsDesc = result;
+   return HVC_OK;
+}
+
+HVC_RetCode HiveClient_JNI::populateTableParams(NAHeap *heap, hive_sd_desc *sd, hive_tblparams_desc* &tblParamsDesc)
+{
+   if (paramsKeys_ == NULL) {
+      tblParamsDesc = NULL;
+      return HVC_OK;
+   }
+  
+   const char **paramsKey;
+   const char *orcParamsKey[] = {"orc.block.padding", "orc.stripe.size", "orc.compress", 
+                            "orc.row.index.stride", "orc.bloom.filter.columns", "orc.bloom.filter.fpp",
+                            "orc.create.index", NULL}; 
+   const char *parquetParamsKey[] = {"parquet.block.size", "parquet.page.size", "parquet.compression",  
+                            "parquet.enable.dictionary", "parquet.dictionary.page.size", "parquet.writer.max-padding", 
+                            NULL};
+   if (sd->isOrcFile())
+      paramsKey = orcParamsKey;
+   else if (sd->isParquetFile())
+      paramsKey = parquetParamsKey;
+   else {
+      tblParamsDesc = NULL;
+      return HVC_OK;
+   }
+   Int32 orcBlockPadding = 1;
+   Int64 orcStripeSize = ORC_DEFAULT_STRIPE_SIZE;
+   Int32 orcRowIndexStride = ORC_DEFAULT_ROW_INDEX_STRIDE;
+   const char *orcCompression = ORC_DEFAULT_COMPRESSION;
+   const char *orcBloomFilterColumns = NULL;
+   double orcBloomFilterFPP = ORC_DEFAULT_BLOOM_FILTER_FPP;
+   NABoolean orcCreateIndex = TRUE;
+
+   NABoolean parquetEnableDictionary = FALSE;
+   Int64 parquetBlockSize = PARQUET_DEFAULT_BLOCK_SIZE;
+   Int32 parquetPageSize = PARQUET_DEFAULT_PAGE_SIZE;
+   const char *parquetCompression = PARQUET_DEFAULT_COMPRESSION;
+   Int64 parquetDictionaryPageSize = PARQUET_DEFAULT_DICTIONARY_PAGE_SIZE;
+   Int32 parquetWriterMaxPadding = 0;
+
+   NAString tblParamsStr(heap);
+   const char *pParamsValue[7]; 
+   jstring jParamsValue[7];
+   for (int i = 0 ; i < 7 ; i++) {
+      pParamsValue[i] = NULL;
+      jParamsValue[i] = NULL;
+   } 
+   int numParams = jenv_->GetArrayLength(paramsKeys_);
+   for (int paramNo = 0 ; paramNo < numParams; paramNo++) {
+      jstring jTmpParamsKey = (jstring)jenv_->GetObjectArrayElement(paramsKeys_, paramNo);
+      jstring jTmpParamsValue = (jstring)jenv_->GetObjectArrayElement(paramsValues_, paramNo);
+      const char *pTmpParamsKey = jenv_->GetStringUTFChars(jTmpParamsKey, NULL); 
+      const char *pTmpParamsValue = jenv_->GetStringUTFChars(jTmpParamsValue, NULL); 
+      bool paramFound = false;
+      int  paramFoundKeyIdx;
+      for (int paramsKeyIdx = 0; paramsKey[paramsKeyIdx] != NULL ; paramsKeyIdx++) {
+         if (strcmp(paramsKey[paramsKeyIdx], pTmpParamsKey) == 0) {
+            paramFound = true;
+            paramFoundKeyIdx = paramsKeyIdx;
+            break;
+         }
+      } 
+      if (paramFound) {
+         jParamsValue[paramFoundKeyIdx] = jTmpParamsValue; 
+         pParamsValue[paramFoundKeyIdx] = pTmpParamsValue; 
+         tblParamsStr += pTmpParamsKey;
+         tblParamsStr += "=";  
+         tblParamsStr += pTmpParamsValue;
+         tblParamsStr += "|";  
+      } else {
+         jenv_->ReleaseStringUTFChars(jTmpParamsValue, pTmpParamsValue);
+         jenv_->DeleteLocalRef(jTmpParamsValue);
+      }      
+      jenv_->ReleaseStringUTFChars(jTmpParamsKey, pTmpParamsKey);
+      jenv_->DeleteLocalRef(jTmpParamsKey);
+   }
+  
+   if (sd->isOrcFile()) {
+      if (pParamsValue[0] != NULL) {
+         if (strcmp(pParamsValue[0], "true") == 0)
+            orcBlockPadding = TRUE;
+         else
+            orcBlockPadding = FALSE;
+      }
+      if (pParamsValue[1] != NULL) 
+         orcStripeSize = atol(pParamsValue[1]);
+      if (pParamsValue[2] != NULL) 
+         orcRowIndexStride = atoi(pParamsValue[2]);
+      if (pParamsValue[3] != NULL) 
+         orcCompression = pParamsValue[3];
+      orcBloomFilterColumns = pParamsValue[4];
+      if (pParamsValue[5] != NULL) 
+         orcBloomFilterFPP = atof(pParamsValue[5]);
+      if (pParamsValue[6] != NULL) {
+         if (strcmp(pParamsValue[6], "true") == 0) 
+            orcCreateIndex = TRUE;
+         else
+            orcCreateIndex = FALSE;
+      }
+      tblParamsDesc  = new (heap)
+         struct hive_tblparams_desc(heap, 
+         tblParamsStr.data(),
+         orcBlockPadding,
+         orcStripeSize, 
+         orcRowIndexStride,
+         orcCompression,
+         orcBloomFilterColumns,
+         orcBloomFilterFPP,
+         orcCreateIndex);
+
+   }
+   else
+   if (sd->isParquetFile()) {
+      if (pParamsValue[0] != NULL) 
+         parquetBlockSize = atol(pParamsValue[1]);
+      if (pParamsValue[1] != NULL) 
+         parquetPageSize = atoi(pParamsValue[1]);
+      if (pParamsValue[2] != NULL) 
+         parquetCompression = pParamsValue[2];
+      if (pParamsValue[3] != NULL) {
+         if (strcmp(pParamsValue[3], "true") == 0) 
+            parquetEnableDictionary = TRUE;
+         else
+            parquetEnableDictionary = FALSE;
+      }
+      if (pParamsValue[4] != NULL) 
+         parquetDictionaryPageSize = atol(pParamsValue[4]);
+      if (pParamsValue[5] != NULL) 
+         parquetWriterMaxPadding = atol(pParamsValue[5]);
+      tblParamsDesc  = new (heap)
+           struct hive_tblparams_desc(heap, 
+         tblParamsStr,
+         parquetWriterMaxPadding,
+         parquetBlockSize,
+         parquetPageSize,
+         parquetCompression,
+         NULL,
+         parquetDictionaryPageSize,
+         parquetEnableDictionary);
+   } 
+   else
+      tblParamsDesc = NULL;
+
+   for (int i = 0; i < 7 ; i++) {
+      if (jParamsValue[i] != NULL) {
+         jenv_->ReleaseStringUTFChars(jParamsValue[i], pParamsValue[i]);
+         jenv_->DeleteLocalRef(jParamsValue[i]);
+      }
+   }
+   return HVC_OK;
+}
+
+//////////////////////////////////////////////////////////////////////////////
+// 
+////////////////////////////////////////////////////////////////////////////  
 void HiveClient_JNI::logIt(const char* str)
 {
   QRLogger::log(CAT_SQL_HBASE, LL_DEBUG, str);

http://git-wip-us.apache.org/repos/asf/trafodion/blob/923ca573/core/sql/executor/HiveClient_JNI.h
----------------------------------------------------------------------
diff --git a/core/sql/executor/HiveClient_JNI.h b/core/sql/executor/HiveClient_JNI.h
index 3d6dfe8..1a3c187 100644
--- a/core/sql/executor/HiveClient_JNI.h
+++ b/core/sql/executor/HiveClient_JNI.h
@@ -36,13 +36,10 @@ typedef enum {
  ,HVC_FIRST  = JOI_LAST
  ,HVC_DONE   = HVC_FIRST
  ,HVC_ERROR_INIT_PARAM
+ ,HVC_ERROR_INIT_EXCEPTION
  ,HVC_ERROR_CLOSE_EXCEPTION
  ,HVC_ERROR_EXISTS_PARAM
  ,HVC_ERROR_EXISTS_EXCEPTION
- ,HVC_ERROR_GET_HVT_PARAM
- ,HVC_ERROR_GET_HVT_EXCEPTION
- ,HVC_ERROR_GET_HVP_PARAM
- ,HVC_ERROR_GET_HVP_EXCEPTION
  ,HVC_ERROR_GET_REDEFTIME_PARAM
  ,HVC_ERROR_GET_REDEFTIME_EXCEPTION
  ,HVC_ERROR_GET_ALLSCH_EXCEPTION
@@ -50,12 +47,17 @@ typedef enum {
  ,HVC_ERROR_GET_ALLTBL_EXCEPTION
  ,HVC_ERROR_EXECUTE_HIVE_SQL_PARAM
  ,HVC_ERROR_EXECUTE_HIVE_SQL_EXCEPTION
+ ,HVC_ERROR_GET_HVT_INFO_PARAM
+ ,HVC_ERROR_GET_HVT_INFO_EXCEPTION
+ ,HVC_ERROR_GET_HIVE_TABLE_INFO_ERROR
+ ,HVC_ERROR_POPULATE_SDS_ERROR
  ,HVC_LAST
 } HVC_RetCode;
 
 class HiveClient_JNI : public JavaObjectInterface
 {
 public:
+  static HiveClient_JNI* newInstance(NAHeap *heap, HVC_RetCode &retCode);
   static HiveClient_JNI* getInstance();
   static void deleteInstance();
 
@@ -66,10 +68,15 @@ public:
   // Must be called.
   HVC_RetCode init();
 
+  HVC_RetCode initConnection();
+  bool isConnected() 
+  {
+    return isConnected_;
+  }
+
   HVC_RetCode close();
   static HVC_RetCode exists(const char* schName, const char* tabName);
-  static HVC_RetCode getHiveTableStr(const char* schName, const char* tabName, 
-                              Text& hiveTblStr);
+  HVC_RetCode getHiveTableInfo(const char* schName, const char* tabName, NABoolean readPartnInfo);
   static HVC_RetCode getHiveTableParameters(const char *schName, const char *tabName, 
                               Text& hiveParamsStr);
   static HVC_RetCode getRedefTime(const char* schName, const char* tabName, 
@@ -82,25 +89,47 @@ public:
   static char* getErrorText(HVC_RetCode errEnum);
   
   static void logIt(const char* str);
-
+  void setTableInfo(jobjectArray tableInfo, jobjectArray colInfo, jobjectArray partKeyInfo, 
+                jobjectArray bucketCols, jobjectArray sortCols, jintArray sortColsOrder, jobjectArray paramsKeys, jobjectArray paramsValue,
+                jobjectArray partNames, jobjectArray partKeyValues);
+  void cleanupTableInfo();
+  HVC_RetCode getHiveTableDesc(NAHeap *heap, hive_tbl_desc *&hiveTableDesc);
 private:   
   // Private Default constructor		
   HiveClient_JNI(NAHeap *heap)
   :  JavaObjectInterface(heap)
   , isConnected_(FALSE)
-  {}
+  {
+     tableInfo_ = NULL;
+     colInfo_ = NULL;
+     partKeyInfo_ = NULL;
+     bucketCols_ = NULL;
+     sortCols_ = NULL;
+     sortColsOrder_ = NULL;
+     paramsKeys_ = NULL;
+     paramsValues_ = NULL;
+     partNames_ = NULL;
+     partKeyValues_ = NULL;
+  }
 
 private:  
+  HVC_RetCode populateSD(NAHeap *heap, Int64 creationTs, hive_sd_desc* &sd);
+  HVC_RetCode populateColumns(NAHeap *heap, hive_column_desc* &columns);
+  HVC_RetCode populatePartKeyColumns(NAHeap *heap, hive_pkey_desc* &partKeyDesc);
+  HVC_RetCode populateSortColumns(NAHeap *heap, hive_skey_desc* &sortKeyDesc);
+  HVC_RetCode populateBucketColumns(NAHeap *heap, hive_bkey_desc* &bucketKeyDesc);
+  HVC_RetCode populateTableParams(NAHeap *heap, hive_sd_desc *sd, hive_tblparams_desc* &tblparamsDesc);
+
   enum JAVA_METHODS {
     JM_CTOR = 0
+   ,JM_INIT
    ,JM_CLOSE
    ,JM_EXISTS     
-   ,JM_GET_HVT
-   ,JM_GET_HVP
    ,JM_GET_RDT
    ,JM_GET_ASH
    ,JM_GET_ATL
    ,JM_EXEC_HIVE_SQL
+   ,JM_GET_HVT_INFO
    ,JM_LAST
   };
   static jclass          javaClass_; 
@@ -109,5 +138,15 @@ private:
   // this mutex protects both JaveMethods_ and javaClass_ initialization
   static pthread_mutex_t javaMethodsInitMutex_;
   bool isConnected_;
+  jobjectArray tableInfo_;
+  jobjectArray colInfo_;
+  jobjectArray partKeyInfo_;
+  jobjectArray bucketCols_;
+  jobjectArray sortCols_;
+  jintArray    sortColsOrder_;
+  jobjectArray paramsKeys_;
+  jobjectArray paramsValues_;
+  jobjectArray partNames_;
+  jobjectArray partKeyValues_;
 };
 #endif

http://git-wip-us.apache.org/repos/asf/trafodion/blob/923ca573/core/sql/executor/hiveHook.cpp
----------------------------------------------------------------------
diff --git a/core/sql/executor/hiveHook.cpp b/core/sql/executor/hiveHook.cpp
index bc3ef1b..5922599 100644
--- a/core/sql/executor/hiveHook.cpp
+++ b/core/sql/executor/hiveHook.cpp
@@ -35,20 +35,6 @@
 #include "HiveClient_JNI.h"
 #include "Globals.h"
 
-struct hive_sd_desc* populateSD(HiveMetaData *md, Int32 mainSdID, 
-                                Int32 tblID, NAText* tblStr, size_t& pos);
-struct hive_column_desc* populateColumns(HiveMetaData *md, Int32 cdID,  
-                                         NAText* tblStr, size_t& pos);
-struct hive_pkey_desc* populatePartitionKey(HiveMetaData *md, Int32 tblID,  
-                                            NAText* tblStr, size_t& pos);
-struct hive_skey_desc* populateSortCols(HiveMetaData *md, Int32 sdID,  
-                                        NAText* tblStr, size_t& pos);
-struct hive_bkey_desc* populateBucketingCols(HiveMetaData *md, Int32 sdID,  
-                                             NAText* tblStr, size_t& pos);
-NABoolean populateSerDeParams(HiveMetaData *md, Int32 serdeID, 
-                              char& fieldSep, char& recordSep,  
-                              NABoolean &nullFormatSpec, NAString &nullFormat,
-                              NAText* tblStr, size_t& pos);
 
 NABoolean findAToken (HiveMetaData *md, NAText* tblStr, size_t& pos, 
                       const char* tok, const char* errStr,
@@ -60,7 +46,7 @@ NABoolean extractValueStr (HiveMetaData *md, NAText* tblStr, size_t& pos,
                            NABoolean raiseError = TRUE);
 
 
-HiveMetaData::HiveMetaData() : tbl_(NULL),
+HiveMetaData::HiveMetaData(NAHeap *heap) : heap_(heap), tbl_(NULL),
                                currDesc_(NULL),
                                errCode_(0) ,
                                errDetail_(NULL),
@@ -152,12 +138,10 @@ NABoolean HiveMetaData::atEnd()
 
 void HiveMetaData::clear()
 {
-  CollHeap *h = CmpCommon::contextHeap();
-
   hive_tbl_desc* ptr ;
   while (tbl_) {
     ptr = tbl_->next_;       
-    NADELETEBASIC(tbl_, h);
+    NADELETEBASIC(tbl_, heap_);
     tbl_ = ptr;
   }
 
@@ -195,96 +179,6 @@ void HiveMetaData::resetErrorInfo()
   errCodeStr_ = NULL;
 }
 
-struct hive_sd_desc* populateSD(HiveMetaData *md, Int32 mainSdID, 
-                                Int32 tblID,  NAText* tblStr, size_t& pos)
-{
-  struct hive_sd_desc* result = NULL;
-  struct hive_sd_desc* mainSD = NULL;
-  struct hive_sd_desc* last = NULL;
-  char fieldTerminator, recordTerminator;
-
-  size_t foundB;
-  
-  if (!findAToken(md, tblStr, pos, "sd:StorageDescriptor(", 
-                  "getTableDesc::sd:StorageDescriptor(###"))
-    return NULL;
-  struct hive_column_desc* newColumns = populateColumns(md, 0, 
-                                                        tblStr, pos);
-  if (!newColumns)
-    return NULL;
-
-  NAText locationStr;
-  if(!extractValueStr(md, tblStr, pos, "location:", ",", 
-                      locationStr, "populateSD::location:###"))
-    return NULL;
-    
-  NAText inputStr;
-  if(!extractValueStr(md, tblStr, pos, "inputFormat:", ",", 
-                      inputStr, "populateSD:inputFormat:###"))
-    return NULL;
-  
-  NAText outputStr;
-  if(!extractValueStr(md, tblStr, pos, "outputFormat:", ",", 
-                      outputStr, "populateSD:outputFormat:###"))
-    return NULL;
-  
-  NAText compressedStr;
-  NABoolean isCompressed = FALSE;
-  if(!extractValueStr(md, tblStr, pos, "compressed:", ",", 
-                      compressedStr, "populateSD:compressed:###"))
-    return NULL;
-  if (compressedStr == "true")
-    isCompressed = TRUE;
-  
-  NAText numBucketsStr;
-  if(!extractValueStr(md, tblStr, pos, "numBuckets:", ",", 
-                      numBucketsStr, "populateSD:numBuckets:###"))
-    return NULL;
-  Int32 numBuckets = atoi(numBucketsStr.c_str());
-  
-  NABoolean nullFormatSpec = FALSE;
-  NAString nullFormat;
-  NABoolean success = populateSerDeParams(md, 0, fieldTerminator, 
-                                          recordTerminator, 
-                                          nullFormatSpec, nullFormat,
-                                          tblStr, pos);
-  if (!success)
-    return NULL;
-
-  struct hive_bkey_desc* newBucketingCols = 
-    populateBucketingCols(md, 0, tblStr, pos);
-
-  struct hive_skey_desc* newSortCols = populateSortCols(md, 0, 
-                                                        tblStr, pos);
-
-  struct hive_sd_desc* newSD = new (CmpCommon::contextHeap()) 
-    struct hive_sd_desc(0, //SdID
-                        locationStr.c_str(),
-                        0, // creation time
-                        numBuckets,
-                        inputStr.c_str(),
-                        outputStr.c_str(),
-                        (nullFormatSpec ? nullFormat.data() : NULL),
-                        hive_sd_desc::TABLE_SD, 
-                        // TODO : no support for hive_sd_desc::PARTN_SD
-                        newColumns, 
-                        newSortCols, 
-                        newBucketingCols,
-                        fieldTerminator,
-                        recordTerminator,
-                        isCompressed
-                        );
-  
-  result = newSD;
-  
-  // TODO : loop over SDs
-  if (findAToken(md, tblStr, pos, "sd:StorageDescriptor(", 
-                 "getTableDesc::sd:StorageDescriptor(###)",FALSE))
-    return NULL;
-
-  return result;
-}
-
    
 NABoolean hive_sd_desc::isOrcFile() const
 {
@@ -292,6 +186,12 @@ NABoolean hive_sd_desc::isOrcFile() const
     strstr(outputFormat_, "Orc");
 }
 
+NABoolean hive_sd_desc::isParquetFile() const
+{
+  return strstr(inputFormat_, "Parquet") &&
+    strstr(outputFormat_, "Parquet");
+}
+
 NABoolean hive_sd_desc::isSequenceFile() const
 {
   return strstr(inputFormat_, "Sequence") && 
@@ -304,181 +204,6 @@ NABoolean hive_sd_desc::isTextFile() const
     strstr(outputFormat_, "Text");
 }
 
-struct hive_column_desc* populateColumns(HiveMetaData *md, Int32 cdID,  
-                                         NAText* tblStr, size_t& pos)
-{
-  struct hive_column_desc* result = NULL;
-  struct hive_column_desc* last = result;
-
-  std::size_t foundB ;
-  if (!findAToken(md, tblStr, pos, "cols:", 
-                  "populateColumns::cols:###"))
-    return NULL;
-  
-  std::size_t foundE = pos;
-  if (!findAToken(md, tblStr, foundE, ")],", 
-                  "populateColumns::cols:],###"))
-    return NULL;
-  
-  Int32 colIdx = 0;
-  while (pos < foundE)
-    {
-      NAText nameStr;
-      if(!extractValueStr(md, tblStr, pos, "FieldSchema(name:", ",", 
-                          nameStr, "populateColumns::FieldSchema(name:###"))
-        return NULL;
-      
-      NAText typeStr;
-      if(!extractValueStr(md, tblStr, pos, "type:", ", comment", 
-                          typeStr, "populateColumns::type:###"))
-        return NULL;
-      
-      pos++;
-      if (!findAToken(md, tblStr, pos, ",", 
-                      "populateColumns::comment:,###"))
-        return NULL;
-      
-      struct hive_column_desc* newCol = new (CmpCommon::contextHeap())
-        struct hive_column_desc(0, 
-                                nameStr.c_str(),
-                                typeStr.c_str(),
-                                colIdx);
-      
-      if ( result == NULL ) {
-        last = result = newCol;
-      } else {
-        last->next_ = newCol;
-        last = newCol;
-      }
-      
-      colIdx++;
-    } // end of while
-  
-  return result;
-}
-
-struct hive_pkey_desc* populatePartitionKey(HiveMetaData *md, Int32 tblID,  
-                                            NAText* tblStr, size_t& pos)
-{
-  hive_pkey_desc* result = NULL;
-  hive_pkey_desc* last = NULL;
-
-  std::size_t foundB ;
-  if (!findAToken(md, tblStr, pos, "partitionKeys:",
-                  "populatePartitionKeys::partitionKeys:###"))
-    return NULL;
-  
-  std::size_t foundE = pos ;
-  if (!findAToken(md, tblStr, foundE, "],",
-                  "populatePartitionKeys::partitionKeys:],###"))
-    return NULL;
-  
-  Int32 colIdx = 0;
-  while (pos < foundE)
-    {
-      foundB = tblStr->find("FieldSchema(name:", pos);
-      if ((foundB == std::string::npos)||(foundB > foundE)) {
-        return NULL; // no part Key
-      }
-      
-      foundB = foundB + strlen("FieldSchema(name:");
-      pos = foundB ;
-      if (!findAToken(md, tblStr, pos, ",",
-                      "populatePartitionKeys::comment:,###"))
-        return NULL;
-      
-      NAText nameStr = tblStr->substr(foundB, pos-foundB);
-      
-      NAText typeStr;
-      if(!extractValueStr(md, tblStr, pos, "type:", ", comment", 
-                          typeStr, "populatePartitionKeys::type:###"))
-        return NULL;
-      
-      pos++;
-      if (!findAToken(md, tblStr, pos, ",",
-                      "populateColumns::comment:,###"))
-        return NULL;
-      
-      
-      hive_pkey_desc* newPkey = new (CmpCommon::contextHeap())
-        struct hive_pkey_desc(nameStr.c_str(),
-                              typeStr.c_str(),
-                              colIdx);
-      
-      if ( result == NULL ) {
-        last = result = newPkey;
-      } else {
-        last->next_ = newPkey;
-        last = newPkey;
-      }
-      
-      colIdx++;
-    } // end of while
-
-  return result;
-}
-
-struct hive_skey_desc* populateSortCols(HiveMetaData *md, Int32 sdID,  
-                                        NAText* tblStr, size_t& pos)
-{
-  hive_skey_desc* result = NULL;
-  hive_skey_desc* last = NULL;
-
-  std::size_t foundB ;
-  if (!findAToken(md, tblStr, pos, "sortCols:",
-                  "populateSortCols::sortCols:###"))
-    return NULL;
-  
-  std::size_t foundE = pos ;
-  if (!findAToken(md, tblStr, foundE, "],",
-                  "populateSortCols::sortCols:],###"))
-    return NULL;
-  
-  if ((foundE - pos)<=10) //this is important to avoid major performance impact when looking for non existent Order(col over and over, parsing to the end of string. hot spot flagged using gprof
-    return NULL;
-  Int32 colIdx = 0;
-  while (pos < foundE)
-    {
-      foundB = tblStr->find("Order(col:", pos);
-      if ((foundB == std::string::npos)||(foundB > foundE)) {
-        return NULL;
-      }
-      
-      foundB = foundB + strlen("Order(col:");
-      pos = foundB ;
-      if (!findAToken(md, tblStr, pos, ",",
-                      "populateSortCols::name:,###"))
-        return NULL;
-      NAText nameStr = tblStr->substr(foundB, pos-foundB);
-      
-      NAText orderStr;
-      if(!extractValueStr(md, tblStr, pos, "order:", ",", 
-                          orderStr, "populateSortCols::order:###"))
-        return NULL;
-      
-      pos++;
-      if (!findAToken(md, tblStr, pos, ",",
-                      "populateSortColumns::comment:,###"))
-        return NULL;
-      
-      hive_skey_desc* newSkey  = new (CmpCommon::contextHeap())
-        struct hive_skey_desc(nameStr.c_str(),
-                              colIdx,
-                              atoi(orderStr.c_str()));
-      
-      if ( result == NULL ) {
-        last = result = newSkey;
-      } else {
-        last->next_ = newSkey;
-        last = newSkey;
-      }
-      
-      colIdx++;
-    } // end of while
-
-  return result;
-}
-
 static int getAsciiDecimalValue(const char * valPtr)
 {
   if (str_len(valPtr) <= 0) return 0;
@@ -486,109 +211,6 @@ static int getAsciiDecimalValue(const char * valPtr)
   return atoi(valPtr);
 }
 
-NABoolean populateSerDeParams(HiveMetaData *md, Int32 serdeID, 
-                              char& fieldTerminator, char& recordTerminator,
-                              NABoolean &nullFormatSpec, NAString &nullFormat,
-                              NAText* tblStr, size_t& pos)
-{
-
-  fieldTerminator  = '\001';  // this the Hive default ^A or ascii code 1
-  recordTerminator = '\n';    // this is the Hive default
-
-  if (!findAToken(md, tblStr, pos, "serdeInfo:",
-                  "populateSerDeParams::serdeInfo:###"))
-    return FALSE;
-
-  std::size_t foundB = pos;
-  std::size_t foundE = pos;
-
-  if (!findAToken(md, tblStr, foundE, "}),",
-                  "populateSerDeParams::serDeInfo:)},###"))
-    return FALSE;
-  
-  NAText serdeStr = tblStr->substr(foundB, foundE-foundB);
-
-  const char * nullStr = "serialization.null.format=";
-  const char * fieldStr = "field.delim=" ;
-  const char * lineStr = "line.delim=" ;
-
-  nullFormatSpec = FALSE;
-  foundB = serdeStr.find(nullStr);
-  if (foundB != std::string::npos)
-    {
-      nullFormatSpec = TRUE;
-      std::size_t foundNB = foundB + strlen(nullStr);
-      std::size_t foundNE = serdeStr.find(", ", foundNB);
-      if (foundNE == std::string::npos)
-        {
-          foundNE = serdeStr.length();
-        }
-      nullFormat = NAString(serdeStr.substr(foundNB, (foundNE-foundNB)));
-    }
-
-  std::size_t foundDelim = serdeStr.find(fieldStr);
-  if ((foundDelim != std::string::npos))
-    fieldTerminator = serdeStr.at(foundDelim+strlen(fieldStr));
-
-  foundDelim = serdeStr.find(lineStr);
-  if ((foundDelim != std::string::npos))
-    recordTerminator = serdeStr.at(foundDelim+strlen(lineStr));
-  
-  pos = foundE;
-  
-  return TRUE;
-}
-
-struct hive_bkey_desc* populateBucketingCols(HiveMetaData *md, Int32 sdID,  
-                                             NAText* tblStr, size_t& pos)
-{
-  hive_bkey_desc* result = NULL;
-  hive_bkey_desc* last = NULL;
-
-  std::size_t foundB ;
-  if (!findAToken(md, tblStr, pos, "bucketCols:",
-                  "populateBucketingCols::bucketCols:###"))
-    return NULL;
-
-  std::size_t foundE = pos ;
-  if (!findAToken(md, tblStr, foundE, "],",
-                  "populateBucketingCols::bucketCols:],###"))
-    return NULL;
-  
-  
-  pos = pos + strlen("bucketCols:[");
-  if (pos == foundE)
-    return NULL ; // empty bucket cols list. This line is code is for 
-  // clarity alone, the while condition alone is sufficient.
-  
-  Int32 colIdx = 0;
-  while (pos < foundE)
-    {
-      foundB = tblStr->find(",", pos);
-      if ((foundB == std::string::npos)||(foundB > foundE)) {
-        foundB = foundE; // we have only one bucketing col or
-        // this is the last bucket col
-      }
-      NAText nameStr = tblStr->substr(pos, foundB-pos);
-      pos = foundB + 1;
-      
-      hive_bkey_desc* newBkey  = new (CmpCommon::contextHeap())
-        struct hive_bkey_desc(nameStr.c_str(),
-                              colIdx);
-      
-      if ( result == NULL ) {
-        last = result = newBkey;
-      } else {
-        last->next_ = newBkey;
-        last = newBkey;
-      }
-      
-      colIdx++;
-    } // end of while
-
-  return result;
-}
-
 NABoolean findAToken (HiveMetaData *md, NAText* tblStr, size_t& pos, 
                       const char* tok, const char* errStr, NABoolean raiseError)
 {
@@ -623,37 +245,64 @@ NABoolean extractValueStr (HiveMetaData *md, NAText* tblStr, size_t& pos,
   return TRUE;
 }
 
+hive_tblparams_desc::hive_tblparams_desc(NAHeap *heap, const char* tblParamsStr,
+                                         NABoolean obp, Int64 oss, Int32 oris,
+                                         const char * oc, const char* bfc,
+                                         double bff, NABoolean oci) :
+
+     heap_(heap),
+     tblParamsStr_(NULL),
+     orcBlockPadding_(obp),
+     orcStripeSize_(oss), orcRowIndexStride_(oris),
+     orcBloomFilterColumns_(NULL),
+     orcBloomFilterFPP_(bff),
+     orcCreateIndex_(oci)
+{
+  if (tblParamsStr)
+    tblParamsStr_ = strduph(tblParamsStr, heap_);
+  
+  orcCompression_[0] = 0;
+  if (oc)
+    strcpy(orcCompression_, oc);
+  if (bfc)
+    orcBloomFilterColumns_ = strduph(bfc, heap_);
+}
+
 struct hive_tbl_desc* HiveMetaData::getFakedTableDesc(const char* tblName)
 {
-  CollHeap *h = CmpCommon::contextHeap();
-  hive_column_desc* c1 = new (h) hive_column_desc(1, "C1", "int", 0);
-  hive_column_desc* c2 = new (h) hive_column_desc(2, "C2", "string", 1);
-  hive_column_desc* c3 = new (h) hive_column_desc(3, "C3", "float", 2);
+  NAHeap *h = heap_;
+  hive_column_desc* c1 = new (h) hive_column_desc(h, 1, "C1", "int", 0);
+  hive_column_desc* c2 = new (h) hive_column_desc(h, 2, "C2", "string", 1);
+  hive_column_desc* c3 = new (h) hive_column_desc(h, 3, "C3", "float", 2);
 
    c1->next_ = c2;
    c2->next_ = c3;
 
    // sort key c1
-   hive_skey_desc* sk1 = new (h) hive_skey_desc("C1", 1, 1);
+   hive_skey_desc* sk1 = new (h) hive_skey_desc(h, "C1", 1, 1);
 
    // bucket key c2
-   hive_bkey_desc* bk1 = new (h) hive_bkey_desc("C2", 1);
+   hive_bkey_desc* bk1 = new (h) hive_bkey_desc(h, "C2", 1);
 
 
-   hive_sd_desc* sd1 = new (h)hive_sd_desc(1, "loc", 0, 1, "ift", "oft", NULL,
+   hive_sd_desc* sd1 = new (h)hive_sd_desc(h, 1, "loc", 0, 1, "ift", "oft", NULL,
                                            hive_sd_desc::TABLE_SD, c1, 
                                            sk1, bk1, '\010', '\n',
-                                           FALSE);
+                                           FALSE, NULL);
 
-   hive_tbl_desc* tbl1 = new (h) hive_tbl_desc(1, "myHive", "default", "me",
+   hive_tbl_desc* tbl1 = new (h) hive_tbl_desc(h, 1, "myHive", "default", "me",
                                                "MANAGED",
-                                               0, NULL, NULL, sd1, 0);
+                                               0, NULL, NULL, sd1, NULL, NULL);
 
    return tbl1;
 }
 
-struct hive_tbl_desc* HiveMetaData::getTableDesc(const char* schemaName,
-                                                 const char* tblName)
+struct hive_tbl_desc* HiveMetaData::getTableDesc( const char* schemaName,
+                                                 const char* tblName,
+                                                 Int64 expirationTS,
+                                                 NABoolean validateOnly,
+                                                 NABoolean rereadFromMD,
+                                                 NABoolean readPartnInfo)
 {
     struct hive_tbl_desc *ptr = tbl_;
 
@@ -676,108 +325,46 @@ struct hive_tbl_desc* HiveMetaData::getTableDesc(const char* schemaName,
               ptr2 = ptr2->next_;
             }
           }        
-          NADELETEBASIC(ptr, CmpCommon::contextHeap());
+          NADELETEBASIC(ptr, heap_);
           ptr = NULL;
           break;
         }
       }
 
       ptr = ptr->next_;
-
    }
 
-   // table not found in cache, try to read it from metadata
-   hive_tbl_desc * result = NULL;
-   Int64 creationTS;
-
-   NAText* tblStr = new (CmpCommon::statementHeap()) string();
-   if (!tblStr)
-     return NULL;
-
-   HVC_RetCode retCode = HiveClient_JNI::getHiveTableStr(schemaName, 
-                                                  tblName, *tblStr);
-   if ((retCode != HVC_OK) && (retCode != HVC_DONE)) {
-     recordError((Int32)retCode, "HiveClient_JNI::getTableStr()");
-     return NULL;
+   HVC_RetCode hvcRetcode;
+   HiveClient_JNI *hiveClient = HiveClient_JNI::newInstance(heap_, hvcRetcode);
+   if (hvcRetcode != HVC_OK) {
+      recordError((Int32)hvcRetcode, "HiveClient_JNI::newInstance()");
+      return NULL;
+   }
+ 
+   hvcRetcode = hiveClient->getHiveTableInfo(schemaName, tblName, readPartnInfo);
+   if (hvcRetcode != HVC_OK && hvcRetcode != HVC_DONE) {
+      recordError((Int32)hvcRetcode, "HiveClient_JNI::getHiveTableInfo()");
+      NADELETE(hiveClient, HiveClient_JNI, heap_);
+      return NULL;
+   }
+   if (hvcRetcode == HVC_DONE) {
+      NADELETE(hiveClient, HiveClient_JNI, heap_);
+      return NULL;
    }
-   if (retCode == HVC_DONE) // table not found.
-     return NULL;
-
-   NAText tblNameStr;
-   size_t pos = 0;
-   if(!extractValueStr(this, tblStr, pos, "tableName:", ",", 
-                       tblNameStr, "getTableDesc::tableName:###"))
-     return NULL;
-   
-   NAText schNameStr;
-   pos = 0;
-   if(!extractValueStr(this, tblStr, pos, "dbName:", ",", 
-                       schNameStr, "getTableDesc::dbName:###"))
-     return NULL;
-   
-   NAText ownerStr;
-   pos = 0;
-   if(!extractValueStr(this, tblStr, pos, "owner:", ",", 
-                       ownerStr, "getTableDesc:owner:###"))
-     return NULL;
-
-   NAText createTimeStr;
-   pos = 0;
-   if(!extractValueStr(this, tblStr, pos, "createTime:", ",", 
-                       createTimeStr, "getTableDesc::createTime:###"))
-     return NULL;
-
-   creationTS = atol(createTimeStr.c_str());
 
-   
-   // TODO: need to handle multiple SDs
-   struct hive_sd_desc* sd = populateSD(this, 0,0, tblStr, pos);
-   if (!sd)
-     return NULL;
-   struct hive_pkey_desc* pkey = populatePartitionKey(this, 0, 
-                                                      tblStr, pos);
-   
-   NAText tableTypeStr;
-   pos = 0;
-   if(!extractValueStr(this, tblStr, pos, "tableType:", ")", 
-                       tableTypeStr, "getTableDesc:tableType:###"))
-     return NULL;
-   
-   NAText viewOriginalStr;
-   NAText viewExpandedStr;
-   if ((NOT tableTypeStr.empty()) && (tableTypeStr == "VIRTUAL_VIEW"))
-     {
-       pos = 0;
-       if(!extractValueStr(this, tblStr, pos, " viewOriginalText:", ", viewExpandedText:", 
-                           viewOriginalStr, "getTableDesc:viewOriginalText:###"))
-         return NULL;
-
-       pos = 0;
-       if(!extractValueStr(this, tblStr, pos, "viewExpandedText:", ", tableType:", 
-                           viewExpandedStr, "getTableDesc:viewExpandedText:###"))
-         return NULL;
-     }
-
-   result = 
-     new (CmpCommon::contextHeap()) 
-     struct hive_tbl_desc(0, // no tblID with JNI 
-                          tblNameStr.c_str(), 
-                          schNameStr.c_str(),
-                          ownerStr.c_str(),
-                          tableTypeStr.c_str(),
-                          creationTS,
-                          viewOriginalStr.c_str(),
-                          viewExpandedStr.c_str(),
-                          sd, pkey);
-   
+   // table not found in cache, try to read it from metadata
+   hive_tbl_desc * hiveTableDesc;
+   hvcRetcode = hiveClient->getHiveTableDesc(heap_, hiveTableDesc);
+   if (hvcRetcode != HVC_OK) {
+      recordError((Int32)hvcRetcode, "HiveClient_JNI::getHiveTableInfoDetails()");
+      NADELETE(hiveClient, HiveClient_JNI, heap_);
+      return NULL;
+   } 
    // add the new table to the cache
-   result->next_ = tbl_;
-   tbl_ = result;
-   
-   
-   //delete tblStr ;
-
-   return result;
+   hiveTableDesc->next_ = tbl_;
+   tbl_ = hiveTableDesc;
+   NADELETE(hiveClient, HiveClient_JNI, heap_);
+   return hiveTableDesc;
 }
 
 NABoolean HiveMetaData::validate(Int32 tableId, Int64 redefTS, 
@@ -801,40 +388,43 @@ NABoolean HiveMetaData::validate(Int32 tableId, Int64 redefTS,
   return result;
 }
 
-hive_tbl_desc::hive_tbl_desc(Int32 tblID, const char* name, const char* schName,
+hive_tbl_desc::hive_tbl_desc(NAHeap *heap, Int32 tblID, const char* name, const char* schName, 
                              const char * owner,
                              const char * tableType,
                              Int64 creationTS, 
                              const char * viewOriginalText,
                              const char * viewExpandedText,
                              struct hive_sd_desc* sd,
-                             struct hive_pkey_desc* pk)
-     : tblID_(tblID), 
+                             struct hive_pkey_desc* pk,
+                             struct hive_tblparams_desc* tp)
+     : heap_(heap), tblID_(tblID), 
        viewOriginalText_(NULL), viewExpandedText_(NULL),
-       sd_(sd), creationTS_(creationTS), pkey_(pk), next_(NULL)
+       sd_(sd), tblParams_(tp),
+       creationTS_(creationTS), pkey_(pk), next_(NULL)
 {  
-  tblName_ = strduph(name, CmpCommon::contextHeap());
-  schName_ = strduph(schName, CmpCommon::contextHeap()); 
+  tblName_ = strduph(name, heap_);
+  schName_ = strduph(schName, heap_);
+  validationTS_ = JULIANTIMESTAMP();
 
   if (owner)
-    owner_ = strduph(owner, CmpCommon::contextHeap());
+    owner_ = strduph(owner, heap_);
   else
     owner_ = NULL;
 
   if (tableType)
-    tableType_ = strduph(tableType, CmpCommon::contextHeap());
+    tableType_ = strduph(tableType, heap_);
   else
     tableType_ = NULL;
 
   if (isView())
     {
       if (viewOriginalText)
-        viewOriginalText_ = strduph(viewOriginalText, CmpCommon::contextHeap());
+        viewOriginalText_ = strduph(viewOriginalText, heap_);
       else
         viewOriginalText_ = NULL;
 
       if (viewExpandedText)
-        viewExpandedText_ = strduph(viewExpandedText, CmpCommon::contextHeap());
+        viewExpandedText_ = strduph(viewExpandedText, heap_);
       else
         viewExpandedText_ = NULL;
     }
@@ -998,55 +588,53 @@ Int64 hive_tbl_desc::redeftime()
 
 hive_tbl_desc::~hive_tbl_desc()
 {
-  CollHeap *h = CmpCommon::contextHeap();
   if (tblName_)
-    NADELETEBASIC(tblName_, h);
+    NADELETEBASIC(tblName_, heap_);
   if (schName_)
-    NADELETEBASIC(schName_, h);
+    NADELETEBASIC(schName_, heap_);
 
    hive_sd_desc* ptr ;
    while (sd_) {
     ptr = sd_->next_;       
-    NADELETEBASIC(sd_, h);
+    NADELETEBASIC(sd_, heap_);
     sd_ = ptr;
   }
  
    hive_pkey_desc* ptr1 ;
    while (pkey_) {
     ptr1 = pkey_->next_;       
-    NADELETEBASIC(pkey_, h);
+    NADELETEBASIC(pkey_, heap_);
     pkey_ = ptr1;
   }
 }
 
 hive_sd_desc::~hive_sd_desc()
 {
-  CollHeap *h = CmpCommon::contextHeap();
   if (location_)
-    NADELETEBASIC(location_, h);
+    NADELETEBASIC(location_, heap_);
   if (inputFormat_)
-    NADELETEBASIC(inputFormat_, h);
+    NADELETEBASIC(inputFormat_, heap_);
   if (outputFormat_)
-    NADELETEBASIC(outputFormat_, h);
+    NADELETEBASIC(outputFormat_, heap_);
 
   hive_column_desc* ptr ;
   while (column_) {
     ptr = column_->next_;       
-    NADELETEBASIC(column_, h);
+    NADELETEBASIC(column_, heap_);
     column_ = ptr;
   }
  
   hive_skey_desc* ptr1 ;
   while (skey_) {
     ptr1 = skey_->next_;       
-    NADELETEBASIC(skey_, h);
+    NADELETEBASIC(skey_, heap_);
     skey_ = ptr1;
   }
 
   hive_bkey_desc* ptr2 ;
   while (bkey_) {
     ptr2 = bkey_->next_;       
-    NADELETEBASIC(bkey_, h);
+    NADELETEBASIC(bkey_, heap_);
     bkey_ = ptr2;
   }
 }
@@ -1054,32 +642,28 @@ hive_sd_desc::~hive_sd_desc()
 
 hive_pkey_desc::~hive_pkey_desc()
 {
-  CollHeap *h = CmpCommon::contextHeap();
   if (name_)
-    NADELETEBASIC(name_, h);
+    NADELETEBASIC(name_, heap_);
   if (type_)
-    NADELETEBASIC(type_, h);
+    NADELETEBASIC(type_, heap_);
 }
 
 hive_skey_desc::~hive_skey_desc()
 {
-  CollHeap *h = CmpCommon::contextHeap();
   if (name_)
-    NADELETEBASIC(name_, h);
+    NADELETEBASIC(name_, heap_);
 }
 
 hive_bkey_desc::~hive_bkey_desc()
 {
-  CollHeap *h = CmpCommon::contextHeap();
   if (name_)
-    NADELETEBASIC(name_, h);
+    NADELETEBASIC(name_, heap_);
 }
 
 hive_column_desc::~hive_column_desc()
 {
-  CollHeap *h = CmpCommon::contextHeap();
   if (name_)
-    NADELETEBASIC(name_, h);
+    NADELETEBASIC(name_, heap_);
   if (type_)
-    NADELETEBASIC(type_, h);
+    NADELETEBASIC(type_, heap_);
 }

http://git-wip-us.apache.org/repos/asf/trafodion/blob/923ca573/core/sql/executor/org_trafodion_sql_HiveClient.h
----------------------------------------------------------------------
diff --git a/core/sql/executor/org_trafodion_sql_HiveClient.h b/core/sql/executor/org_trafodion_sql_HiveClient.h
new file mode 100644
index 0000000..9e36b7b
--- /dev/null
+++ b/core/sql/executor/org_trafodion_sql_HiveClient.h
@@ -0,0 +1,61 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class org_trafodion_sql_HiveClient */
+
+#ifndef _Included_org_trafodion_sql_HiveClient
+#define _Included_org_trafodion_sql_HiveClient
+#ifdef __cplusplus
+extern "C" {
+#endif
+#undef org_trafodion_sql_HiveClient_Table_TABLE_NAME
+#define org_trafodion_sql_HiveClient_Table_TABLE_NAME 0L
+#undef org_trafodion_sql_HiveClient_Table_DB_NAME
+#define org_trafodion_sql_HiveClient_Table_DB_NAME 1L
+#undef org_trafodion_sql_HiveClient_Table_OWNER
+#define org_trafodion_sql_HiveClient_Table_OWNER 2L
+#undef org_trafodion_sql_HiveClient_Table_CREATE_TIME
+#define org_trafodion_sql_HiveClient_Table_CREATE_TIME 3L
+#undef org_trafodion_sql_HiveClient_Table_TABLE_TYPE
+#define org_trafodion_sql_HiveClient_Table_TABLE_TYPE 4L
+#undef org_trafodion_sql_HiveClient_Table_VIEW_ORIGINAL_TEXT
+#define org_trafodion_sql_HiveClient_Table_VIEW_ORIGINAL_TEXT 5L
+#undef org_trafodion_sql_HiveClient_Table_VIEW_EXPANDED_TEXT
+#define org_trafodion_sql_HiveClient_Table_VIEW_EXPANDED_TEXT 6L
+#undef org_trafodion_sql_HiveClient_Table_SD_COMPRESSED
+#define org_trafodion_sql_HiveClient_Table_SD_COMPRESSED 7L
+#undef org_trafodion_sql_HiveClient_Table_SD_LOCATION
+#define org_trafodion_sql_HiveClient_Table_SD_LOCATION 8L
+#undef org_trafodion_sql_HiveClient_Table_SD_INPUT_FORMAT
+#define org_trafodion_sql_HiveClient_Table_SD_INPUT_FORMAT 9L
+#undef org_trafodion_sql_HiveClient_Table_SD_OUTPUT_FORMAT
+#define org_trafodion_sql_HiveClient_Table_SD_OUTPUT_FORMAT 10L
+#undef org_trafodion_sql_HiveClient_Table_SD_NUM_BUCKETS
+#define org_trafodion_sql_HiveClient_Table_SD_NUM_BUCKETS 11L
+#undef org_trafodion_sql_HiveClient_Table_NULL_FORMAT
+#define org_trafodion_sql_HiveClient_Table_NULL_FORMAT 12L
+#undef org_trafodion_sql_HiveClient_Table_FIELD_DELIM
+#define org_trafodion_sql_HiveClient_Table_FIELD_DELIM 13L
+#undef org_trafodion_sql_HiveClient_Table_LINE_DELIM
+#define org_trafodion_sql_HiveClient_Table_LINE_DELIM 14L
+#undef org_trafodion_sql_HiveClient_Table_FIELD_COUNT
+#define org_trafodion_sql_HiveClient_Table_FIELD_COUNT 12L
+#undef org_trafodion_sql_HiveClient_Col_NAME
+#define org_trafodion_sql_HiveClient_Col_NAME 0L
+#undef org_trafodion_sql_HiveClient_Col_TYPE
+#define org_trafodion_sql_HiveClient_Col_TYPE 1L
+#undef org_trafodion_sql_HiveClient_Col_COMMENT
+#define org_trafodion_sql_HiveClient_Col_COMMENT 2L
+#undef org_trafodion_sql_HiveClient_Col_FIELD_COUNT
+#define org_trafodion_sql_HiveClient_Col_FIELD_COUNT 3L
+/*
+ * Class:     org_trafodion_sql_HiveClient
+ * Method:    setTableInfo
+ * Signature: (J[Ljava/lang/String;[[Ljava/lang/String;[[Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/String;[I[Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/String;[[Ljava/lang/String;)V
+ */
+JNIEXPORT void JNICALL Java_org_trafodion_sql_HiveClient_setTableInfo
+  (JNIEnv *, jobject, jlong, jobjectArray, jobjectArray, jobjectArray, jobjectArray, jobjectArray, jintArray, jobjectArray, jobjectArray, jobjectArray, jobjectArray);
+
+#ifdef __cplusplus
+}
+#endif
+#endif

http://git-wip-us.apache.org/repos/asf/trafodion/blob/923ca573/core/sql/optimizer/NATable.cpp
----------------------------------------------------------------------
diff --git a/core/sql/optimizer/NATable.cpp b/core/sql/optimizer/NATable.cpp
index 0f282f6..2149f31 100644
--- a/core/sql/optimizer/NATable.cpp
+++ b/core/sql/optimizer/NATable.cpp
@@ -8409,7 +8409,7 @@ NATable * NATableDB::get(CorrName& corrName, BindWA * bindWA,
       if ( hiveMetaDB_ == NULL ) {
 	if (CmpCommon::getDefault(HIVE_USE_FAKE_TABLE_DESC) != DF_ON)
 	  {
-	    hiveMetaDB_ = new (CmpCommon::contextHeap()) HiveMetaData();
+	    hiveMetaDB_ = new (CmpCommon::contextHeap()) HiveMetaData((NAHeap *)CmpCommon::contextHeap());
 	    
 	    if ( !hiveMetaDB_->init() ) {
 	      *CmpCommon::diags() << DgSqlCode(-1190)
@@ -8427,7 +8427,7 @@ NATable * NATableDB::get(CorrName& corrName, BindWA * bindWA,
 	  }
 	else
 	  hiveMetaDB_ = new (CmpCommon::contextHeap()) 
-            HiveMetaData(); // fake metadata
+            HiveMetaData((NAHeap *)CmpCommon::contextHeap()); // fake metadata
       }
       
       // this default schema name is what the Hive default schema is called in SeaHive
@@ -8447,7 +8447,11 @@ NATable * NATableDB::get(CorrName& corrName, BindWA * bindWA,
        if (CmpCommon::getDefault(HIVE_USE_FAKE_TABLE_DESC) == DF_ON)
          htbl = hiveMetaDB_->getFakedTableDesc(tableNameInt);
        else
-         htbl = hiveMetaDB_->getTableDesc(schemaNameInt, tableNameInt);
+         htbl = hiveMetaDB_->getTableDesc(schemaNameInt, tableNameInt,
+                0, FALSE,
+                // reread Hive Table Desc from MD.
+                (CmpCommon::getDefault(TRAF_RELOAD_NATABLE_CACHE) == DF_ON),
+                TRUE);
 
        NAString extName = ComConvertNativeNameToTrafName(
             corrName.getQualifiedNameObj().getCatalogName(),
@@ -8595,7 +8599,7 @@ NATable * NATableDB::get(CorrName& corrName, BindWA * bindWA,
         // ------------------------------------------------------------------
         if ( hiveMetaDB_ == NULL ) 
           {
-            hiveMetaDB_ = new (CmpCommon::contextHeap()) HiveMetaData();
+            hiveMetaDB_ = new (CmpCommon::contextHeap()) HiveMetaData((NAHeap *)CmpCommon::contextHeap());
             
             if ( !hiveMetaDB_->init() ) 
               {
@@ -8645,11 +8649,11 @@ NATable * NATableDB::get(CorrName& corrName, BindWA * bindWA,
           }
 
         htbl = new(naTableHeap) hive_tbl_desc
-          (0, 
+          ((NAHeap *)naTableHeap, 0, 
            corrName.getQualifiedNameObj().getObjectName(),
            corrName.getQualifiedNameObj().getSchemaName(),
            NULL, NULL,
-           0, NULL, NULL, NULL, NULL);
+           0, NULL, NULL, NULL, NULL, NULL);
         table = new (naTableHeap) NATable
           (bindWA, corrName, naTableHeap, htbl);