You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@metamodel.apache.org by ka...@apache.org on 2016/10/13 19:37:57 UTC

metamodel git commit: METAMODEL-1119: Fixed

Repository: metamodel
Updated Branches:
  refs/heads/master 423676852 -> 3949af857


METAMODEL-1119: Fixed

Fixes #133

Project: http://git-wip-us.apache.org/repos/asf/metamodel/repo
Commit: http://git-wip-us.apache.org/repos/asf/metamodel/commit/3949af85
Tree: http://git-wip-us.apache.org/repos/asf/metamodel/tree/3949af85
Diff: http://git-wip-us.apache.org/repos/asf/metamodel/diff/3949af85

Branch: refs/heads/master
Commit: 3949af857282f7c17a9be279e333348c5d3a0196
Parents: 4236768
Author: Dennis Du Kr�ger <lo...@apache.org>
Authored: Thu Oct 13 12:37:48 2016 -0700
Committer: Kasper S�rensen <i....@gmail.com>
Committed: Thu Oct 13 12:37:48 2016 -0700

----------------------------------------------------------------------
 CHANGES.md                                      |  1 +
 .../apache/metamodel/jdbc/JdbcDataContext.java  | 48 +++++++++-----
 .../jdbc/dialects/HiveQueryRewriter.java        | 10 +++
 .../integrationtests/HiveIntegrationTest.java   | 69 +++++++++++++++++---
 4 files changed, 101 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/metamodel/blob/3949af85/CHANGES.md
----------------------------------------------------------------------
diff --git a/CHANGES.md b/CHANGES.md
index 5c0b893..bef1902 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -7,6 +7,7 @@
  * [METAMODEL-1115] - Added support for passing your own PartnerConnection object to the Salesforce.com connector.
  * [METAMODEL-1113] - Fixed support for ColumnNamingStrategy in CSV connector.
  * [METAMODEL-1114] - Added support for ColumnNamingStrategy in EBCDIC connector.
+ * [METAMODEL-1119] - Worked around Hive JDBC driver issues, avoiding non-compliant metadata calls.
 
 ### Apache MetaModel 4.5.4
 

http://git-wip-us.apache.org/repos/asf/metamodel/blob/3949af85/jdbc/src/main/java/org/apache/metamodel/jdbc/JdbcDataContext.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/metamodel/jdbc/JdbcDataContext.java b/jdbc/src/main/java/org/apache/metamodel/jdbc/JdbcDataContext.java
index 63e42e5..8cd027b 100644
--- a/jdbc/src/main/java/org/apache/metamodel/jdbc/JdbcDataContext.java
+++ b/jdbc/src/main/java/org/apache/metamodel/jdbc/JdbcDataContext.java
@@ -654,7 +654,14 @@ public class JdbcDataContext extends AbstractDataContext implements UpdateableDa
                 // Second strategy: Find default schema name by examining the
                 // URL
                 if (!found) {
-                    String url = metaData.getURL();
+                    String url = null;
+                    try {
+                        url = metaData.getURL();
+                    } catch (SQLException e) {
+                        if (!DATABASE_PRODUCT_HIVE.equals(_databaseProductName)) {
+                            throw e;
+                        }
+                    }
                     if (url != null && url.length() > 0) {
                         if (schemaNames.length > 0) {
                             StringTokenizer st = new StringTokenizer(url, "/\\:");
@@ -679,7 +686,14 @@ public class JdbcDataContext extends AbstractDataContext implements UpdateableDa
 
                 // Third strategy: Check for schema equal to username
                 if (!found) {
-                    String username = metaData.getUserName();
+                    String username = null;
+                    try {
+                        username = metaData.getUserName();
+                    } catch (SQLException e) {
+                        if (!DATABASE_PRODUCT_HIVE.equals(_databaseProductName)) {
+                            throw e;
+                        }
+                    }
                     if (username != null) {
                         for (int i = 0; i < schemaNames.length && !found; i++) {
                             if (username.equalsIgnoreCase(schemaNames[i])) {
@@ -708,30 +722,28 @@ public class JdbcDataContext extends AbstractDataContext implements UpdateableDa
                     found = true;
                 }
                 if (DATABASE_PRODUCT_HSQLDB.equalsIgnoreCase(_databaseProductName)) {
-                    for (int i = 0; i < schemaNames.length && !found; i++) {
-                        String schemaName = schemaNames[i];
-                        if ("PUBLIC".equals(schemaName)) {
-                            result = schemaName;
-                            found = true;
-                            break;
-                        }
-                    }
+                    result = findDefaultSchema("PUBLIC", schemaNames);
                 }
                 if (DATABASE_PRODUCT_SQLSERVER.equals(_databaseProductName)) {
-                    for (int i = 0; i < schemaNames.length && !found; i++) {
-                        String schemaName = schemaNames[i];
-                        if ("dbo".equals(schemaName)) {
-                            result = schemaName;
-                            found = true;
-                            break;
-                        }
-                    }
+                    result = findDefaultSchema("dbo", schemaNames);
+                }
+                if (DATABASE_PRODUCT_HIVE.equals(_databaseProductName)) {
+                    result = findDefaultSchema("default", schemaNames);
                 }
             }
         }
         return result;
     }
 
+    private String findDefaultSchema(final String defaultName, final String[] schemaNames) {
+        for (String schemaName : schemaNames) {
+            if (defaultName.equals(schemaName)) {
+                return schemaName;
+            }
+        }
+        return null;
+    }
+
     /**
      * Microsoft SQL Server returns users instead of schemas when calling
      * metadata.getSchemas() This is a simple workaround.

http://git-wip-us.apache.org/repos/asf/metamodel/blob/3949af85/jdbc/src/main/java/org/apache/metamodel/jdbc/dialects/HiveQueryRewriter.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/metamodel/jdbc/dialects/HiveQueryRewriter.java b/jdbc/src/main/java/org/apache/metamodel/jdbc/dialects/HiveQueryRewriter.java
index b18b9aa..59608d9 100644
--- a/jdbc/src/main/java/org/apache/metamodel/jdbc/dialects/HiveQueryRewriter.java
+++ b/jdbc/src/main/java/org/apache/metamodel/jdbc/dialects/HiveQueryRewriter.java
@@ -35,6 +35,16 @@ public class HiveQueryRewriter extends DefaultQueryRewriter {
         if (columnType == ColumnType.INTEGER) {
             return "INT";
         }
+
+        if(columnType == ColumnType.STRING) {
+            return "STRING";
+        }
+
+        // Hive does not support VARCHAR without a width, nor VARCHAR(MAX).
+        // Returning max allowable column size instead.
+        if (columnType == ColumnType.VARCHAR && columnSize == null) {
+            return super.rewriteColumnType(columnType, 65535);
+        }
         return super.rewriteColumnType(columnType, columnSize);
     }
     

http://git-wip-us.apache.org/repos/asf/metamodel/blob/3949af85/jdbc/src/test/java/org/apache/metamodel/jdbc/integrationtests/HiveIntegrationTest.java
----------------------------------------------------------------------
diff --git a/jdbc/src/test/java/org/apache/metamodel/jdbc/integrationtests/HiveIntegrationTest.java b/jdbc/src/test/java/org/apache/metamodel/jdbc/integrationtests/HiveIntegrationTest.java
index 6f6b9ec..b26bfe8 100644
--- a/jdbc/src/test/java/org/apache/metamodel/jdbc/integrationtests/HiveIntegrationTest.java
+++ b/jdbc/src/test/java/org/apache/metamodel/jdbc/integrationtests/HiveIntegrationTest.java
@@ -18,6 +18,10 @@
  */
 package org.apache.metamodel.jdbc.integrationtests;
 
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+
 import org.apache.metamodel.UpdateCallback;
 import org.apache.metamodel.UpdateScript;
 import org.apache.metamodel.create.CreateTable;
@@ -28,14 +32,61 @@ import org.apache.metamodel.jdbc.dialects.HiveQueryRewriter;
 import org.apache.metamodel.schema.ColumnType;
 import org.apache.metamodel.schema.Schema;
 import org.apache.metamodel.schema.Table;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class HiveIntegrationTest extends AbstractJdbIntegrationTest {
+    private static final Logger logger = LoggerFactory.getLogger(HiveIntegrationTest.class);
 
     @Override
     protected String getPropertyPrefix() {
         return "hive";
     }
 
+    private void createSchemas() throws SQLException {
+        Connection connection = getConnection();
+        final Statement st = connection.createStatement();
+        final String createFirstSql = "CREATE SCHEMA IF NOT EXISTS a_metamodel_test";
+        logger.info("SQL updated fired (return {}): {}", st.executeUpdate(createFirstSql), createFirstSql);
+        final String createLastSql = "CREATE SCHEMA IF NOT EXISTS z_metamodel_test";
+        logger.info("SQL updated fired (return {}): {}", st.executeUpdate(createLastSql), createLastSql);
+    }
+
+    private void deleteSchemas() throws SQLException {
+        Connection connection = getConnection();
+        final Statement st = connection.createStatement();
+        final String deleteFirstSql = "DROP SCHEMA IF EXISTS a_metamodel_test";
+        logger.info("SQL updated fired (return {}): {}", st.executeUpdate(deleteFirstSql), deleteFirstSql);
+        final String deleteLastSql = "DROP SCHEMA IF EXISTS z_metamodel_test";
+        logger.info("SQL updated fired (return {}): {}", st.executeUpdate(deleteLastSql), deleteLastSql);
+    }
+
+    public void testDefaultGetSchema() throws Exception {
+        if (!isConfigured()) {
+            return;
+        }
+
+        try {
+            try {
+                createSchemas();
+            } catch (SQLException e) {
+                fail("Schema creation failed");
+            }
+
+            final JdbcDataContext dataContext = getDataContext();
+            final Schema schema = dataContext.getDefaultSchema();
+
+            assertEquals("Schema[name=default]", schema.toString());
+
+        } finally {
+            try {
+                deleteSchemas();
+            } catch (SQLException e) {
+                logger.warn("Weird, couldn't delete test schemas");
+            }
+        }
+    }
+
     public void testGetSchema() throws Exception {
         if (!isConfigured()) {
             return;
@@ -45,7 +96,7 @@ public class HiveIntegrationTest extends AbstractJdbIntegrationTest {
         final Schema schema = dataContext.getSchemaByName("default");
         assertEquals("Schema[name=default]", schema.toString());
     }
-    
+
     public void testUseCorrectRewriter() throws Exception {
         if (!isConfigured()) {
             return;
@@ -65,19 +116,19 @@ public class HiveIntegrationTest extends AbstractJdbIntegrationTest {
         final Schema schema = dataContext.getDefaultSchema();
 
         dataContext.executeUpdate(new CreateTable(schema, tableName).withColumn("foo").ofType(ColumnType.STRING)
-                .withColumn("bar").ofType(ColumnType.INTEGER));
+                .withColumn("bar").ofType(ColumnType.INTEGER).withColumn("baz").ofType(ColumnType.VARCHAR));
         try {
             final Table table = dataContext.getTableByQualifiedLabel(tableName);
             assertNotNull(table);
             
             dataContext.executeUpdate(new UpdateScript() {
-                
+
                 @Override
                 public void run(UpdateCallback callback) {
-                    callback.insertInto(table).value("foo", "Hello world").value("bar", 42).execute();
-                    callback.insertInto(table).value("foo", "Lorem ipsum").value("bar", 42).execute();
-                    callback.insertInto(table).value("foo", "Apache").value("bar", 43).execute();
-                    callback.insertInto(table).value("foo", "MetaModel").value("bar", 44).execute();
+                    callback.insertInto(table).value("foo", "Hello world").value("bar", 42).value("baz", "Hive").execute();
+                    callback.insertInto(table).value("foo", "Lorem ipsum").value("bar", 42).value("baz", "Five").execute();
+                    callback.insertInto(table).value("foo", "Apache").value("bar", 43).value("baz", "Live").execute();
+                    callback.insertInto(table).value("foo", "MetaModel").value("bar", 44).value("baz", "Jive").execute();
                 }
             });
             
@@ -89,9 +140,9 @@ public class HiveIntegrationTest extends AbstractJdbIntegrationTest {
             
             final DataSet ds2 = dataContext.query().from(table).selectAll().where("bar").eq(42).execute();
             assertTrue(ds2.next());
-            assertEquals("Row[values=[Hello world, 42]]", ds2.getRow().toString());
+            assertEquals("Row[values=[Hello world, 42, Hive]]", ds2.getRow().toString());
             assertTrue(ds2.next());
-            assertEquals("Row[values=[Lorem ipsum, 42]]", ds2.getRow().toString());
+            assertEquals("Row[values=[Lorem ipsum, 42, Five]]", ds2.getRow().toString());
             assertFalse(ds2.next());
             ds2.close();
         } finally {