You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by vo...@apache.org on 2019/07/29 09:20:17 UTC

[drill] branch master updated (efbf4fc -> fb30eb9)

This is an automated email from the ASF dual-hosted git repository.

volodymyr pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git.


    from efbf4fc  DRILL-7272: Drill Metastore Read / Write API and Drill Iceberg Metastore implementation
     new 275fb27  DRILL-7205: Drill fails to start when authentication is disabled
     new fb30eb9  DRILL-7332: Allow parsing empty schema

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../rpc/security/AuthenticatorProviderImpl.java    |   5 +
 .../java/org/apache/drill/TestSchemaCommands.java  |  66 ++++++++----
 .../user/security/TestCustomUserAuthenticator.java | 115 +++++++++++----------
 .../record/metadata/schema/parser/SchemaParser.g4  |   2 +-
 .../metadata/schema/parser/TestSchemaParser.java   |  14 ++-
 5 files changed, 124 insertions(+), 78 deletions(-)


[drill] 02/02: DRILL-7332: Allow parsing empty schema

Posted by vo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

volodymyr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit fb30eb9a9b1dfb69fe4082ac2d3738b355572b6a
Author: Arina Ielchiieva <ar...@gmail.com>
AuthorDate: Fri Jul 26 14:47:51 2019 +0300

    DRILL-7332: Allow parsing empty schema
    
    closes #1828
---
 .../java/org/apache/drill/TestSchemaCommands.java  | 66 +++++++++++++++-------
 .../record/metadata/schema/parser/SchemaParser.g4  |  2 +-
 .../metadata/schema/parser/TestSchemaParser.java   | 14 ++++-
 3 files changed, 57 insertions(+), 25 deletions(-)

diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestSchemaCommands.java b/exec/java-exec/src/test/java/org/apache/drill/TestSchemaCommands.java
index 3258e1c..f4965ca 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestSchemaCommands.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestSchemaCommands.java
@@ -17,6 +17,7 @@
  */
 package org.apache.drill;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.drill.categories.SqlTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.exceptions.UserRemoteException;
@@ -143,7 +144,7 @@ public class TestSchemaCommands extends ClusterTest {
     try {
       run("create schema (col1 int, col2 int) for table %s.`%s`", "dfs.tmp", table);
     } finally {
-      assertTrue(tablePath.delete());
+      FileUtils.deleteQuietly(tablePath);
     }
   }
 
@@ -159,7 +160,7 @@ public class TestSchemaCommands extends ClusterTest {
     try {
       run("create schema (col1 int, col2 int) path '%s'", schema.getPath());
     } finally {
-      assertTrue(schema.delete());
+      FileUtils.deleteQuietly(schema);
     }
   }
 
@@ -213,9 +214,7 @@ public class TestSchemaCommands extends ClusterTest {
       assertTrue(varcharColumn.isNullable());
       assertEquals(TypeProtos.MinorType.VARCHAR, varcharColumn.type());
     } finally {
-      if (schemaFile.exists()) {
-        assertTrue(schemaFile.delete());
-      }
+      FileUtils.deleteQuietly(schemaFile);
     }
   }
 
@@ -267,7 +266,6 @@ public class TestSchemaCommands extends ClusterTest {
       ColumnMetadata updatedColumn = updatedSchemaContainer.getSchema().metadata("c");
       assertTrue(updatedColumn.isNullable());
       assertEquals(TypeProtos.MinorType.VARCHAR, updatedColumn.type());
-
     } finally {
       run("drop table if exists %s", table);
     }
@@ -305,9 +303,7 @@ public class TestSchemaCommands extends ClusterTest {
       assertEquals(properties, schema.properties());
 
     } finally {
-      if (schemaFile.exists()) {
-        assertTrue(schemaFile.delete());
-      }
+      FileUtils.deleteQuietly(schemaFile);
     }
   }
 
@@ -335,9 +331,7 @@ public class TestSchemaCommands extends ClusterTest {
       assertNotNull(schema.properties());
       assertEquals(0, schema.properties().size());
     } finally {
-      if (schemaFile.exists()) {
-        assertTrue(schemaFile.delete());
-      }
+      FileUtils.deleteQuietly(schemaFile);
     }
   }
 
@@ -389,9 +383,7 @@ public class TestSchemaCommands extends ClusterTest {
 
       assertEquals(0, schema.properties().size());
     } finally {
-      if (schemaFile.exists()) {
-        assertTrue(schemaFile.delete());
-      }
+      FileUtils.deleteQuietly(schemaFile);
     }
   }
 
@@ -423,9 +415,7 @@ public class TestSchemaCommands extends ClusterTest {
       assertTrue(schema.isEmpty());
       assertEquals("val", schema.property("prop"));
     } finally {
-      if (schemaFile.exists()) {
-        assertTrue(schemaFile.delete());
-      }
+      FileUtils.deleteQuietly(schemaFile);
     }
   }
 
@@ -474,9 +464,43 @@ public class TestSchemaCommands extends ClusterTest {
 
       assertEquals(2, schema.properties().size());
     } finally {
-      if (rawSchema.exists()) {
-        assertTrue(rawSchema.delete());
-      }
+      FileUtils.deleteQuietly(rawSchema);
+      FileUtils.deleteQuietly(schemaFile);
+    }
+  }
+
+  @Test
+  public void testCreateUsingLoadEmptyFile() throws Exception {
+    File tmpDir = dirTestWatcher.getTmpDir();
+    File rawSchema = new File(tmpDir, "raw_empty.schema");
+    File schemaFile = new File(tmpDir, "schema_for_create_using_load_empty_file.schema");
+
+    try {
+      assertTrue(rawSchema.createNewFile());
+
+      testBuilder()
+        .sqlQuery("create schema load '%s' path '%s' properties ('k1'='v1', 'k2' = 'v2')",
+          rawSchema.getPath(), schemaFile.getPath())
+        .unOrdered()
+        .baselineColumns("ok", "summary")
+        .baselineValues(true, String.format("Created schema for [%s]", schemaFile.getPath()))
+        .go();
+
+      SchemaProvider schemaProvider = new PathSchemaProvider(new Path(schemaFile.getPath()));
+      assertTrue(schemaFile.exists());
+
+      SchemaContainer schemaContainer = schemaProvider.read();
+
+      assertNull(schemaContainer.getTable());
+
+      TupleMetadata schema = schemaContainer.getSchema();
+      assertNotNull(schema);
+
+      assertEquals(0, schema.size());
+      assertEquals(2, schema.properties().size());
+    } finally {
+      FileUtils.deleteQuietly(rawSchema);
+      FileUtils.deleteQuietly(schemaFile);
     }
   }
 
diff --git a/exec/vector/src/main/antlr4/org/apache/drill/exec/record/metadata/schema/parser/SchemaParser.g4 b/exec/vector/src/main/antlr4/org/apache/drill/exec/record/metadata/schema/parser/SchemaParser.g4
index 384daf8..3be11ca 100644
--- a/exec/vector/src/main/antlr4/org/apache/drill/exec/record/metadata/schema/parser/SchemaParser.g4
+++ b/exec/vector/src/main/antlr4/org/apache/drill/exec/record/metadata/schema/parser/SchemaParser.g4
@@ -25,7 +25,7 @@ options {
  */
 }
 
-schema: (columns | LEFT_PAREN columns? RIGHT_PAREN) property_values? EOF;
+schema: (columns | LEFT_PAREN columns? RIGHT_PAREN | /* empty input */) property_values? EOF;
 
 columns: column_def (COMMA column_def)*;
 
diff --git a/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestSchemaParser.java b/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestSchemaParser.java
index 2d0d1be..5bb6cfb 100644
--- a/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestSchemaParser.java
+++ b/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestSchemaParser.java
@@ -311,9 +311,17 @@ public class TestSchemaParser {
 
   @Test
   public void testEmptySchema() throws Exception {
-    String value = "()";
-    TupleMetadata schema = SchemaExprParser.parseSchema(value);
-    assertEquals(0, schema.size());
+    assertEquals(0, SchemaExprParser.parseSchema("").size());
+    assertEquals(0, SchemaExprParser.parseSchema("  ").size());
+    assertEquals(0, SchemaExprParser.parseSchema("\n").size());
+    assertEquals(0, SchemaExprParser.parseSchema("  \n").size());
+  }
+
+  @Test
+  public void testEmptySchemaWithParentheses() throws Exception {
+    assertEquals(0, SchemaExprParser.parseSchema("()").size());
+    assertEquals(0, SchemaExprParser.parseSchema("(  )").size());
+    assertEquals(0, SchemaExprParser.parseSchema("(\n)\n").size());
   }
 
   @Test


[drill] 01/02: DRILL-7205: Drill fails to start when authentication is disabled

Posted by vo...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

volodymyr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 275fb27126b35fe3229f6ef6dd5296f4138670fc
Author: Anton Gozhiy <an...@gmail.com>
AuthorDate: Fri Jul 5 18:51:32 2019 +0300

    DRILL-7205: Drill fails to start when authentication is disabled
    
    closes #1824
---
 .../rpc/security/AuthenticatorProviderImpl.java    |   5 +
 .../user/security/TestCustomUserAuthenticator.java | 115 +++++++++++----------
 2 files changed, 67 insertions(+), 53 deletions(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/security/AuthenticatorProviderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/security/AuthenticatorProviderImpl.java
index 4ed688b..60ffdec 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/security/AuthenticatorProviderImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/security/AuthenticatorProviderImpl.java
@@ -45,6 +45,11 @@ public class AuthenticatorProviderImpl implements AuthenticatorProvider {
 
   @SuppressWarnings("unchecked")
   public AuthenticatorProviderImpl(final DrillConfig config, final ScanResult scan) throws DrillbitStartupException {
+    // Skip auth mechanisms setup if user authentication is disabled
+    if (!config.getBoolean(ExecConstants.USER_AUTHENTICATION_ENABLED)) {
+      return;
+    }
+
     List<String> configuredFactories = Lists.newArrayList();
     if (config.hasPath(ExecConstants.AUTHENTICATION_MECHANISMS)) {
       configuredFactories = config.getStringList(ExecConstants.AUTHENTICATION_MECHANISMS);
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestCustomUserAuthenticator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestCustomUserAuthenticator.java
index b4d44ef..93d11cb 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestCustomUserAuthenticator.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/security/TestCustomUserAuthenticator.java
@@ -17,47 +17,46 @@
  */
 package org.apache.drill.exec.rpc.user.security;
 
-import com.typesafe.config.ConfigValueFactory;
-import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SecurityTest;
 import org.apache.drill.common.config.DrillProperties;
-import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.rpc.RpcException;
 import org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl;
+import org.apache.drill.test.ClientFixture;
+import org.apache.drill.test.ClusterFixture;
+import org.apache.drill.test.ClusterFixtureBuilder;
+import org.apache.drill.test.ClusterTest;
+import org.apache.drill.test.QueryTestUtil;
 import org.junit.BeforeClass;
+import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.ExpectedException;
 
-import java.util.Properties;
+import java.util.Arrays;
+import java.util.List;
 
 import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.TEST_USER_1;
 import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.TEST_USER_1_PASSWORD;
 import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.TEST_USER_2;
 import static org.apache.drill.exec.rpc.user.security.testing.UserAuthenticatorTestImpl.TEST_USER_2_PASSWORD;
+import static org.hamcrest.CoreMatchers.isA;
 import static org.hamcrest.core.StringContains.containsString;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.fail;
 
 @Category(SecurityTest.class)
-public class TestCustomUserAuthenticator extends BaseTestQuery {
+public class TestCustomUserAuthenticator extends ClusterTest {
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
 
   @BeforeClass
   public static void setupCluster() {
-    // Create a new DrillConfig which has user authentication enabled and authenticator set to
-    // UserAuthenticatorTestImpl.
-    final Properties props = cloneDefaultTestConfigProperties();
-    final DrillConfig newConfig = new DrillConfig(DrillConfig.create(props)
-            .withValue(ExecConstants.USER_AUTHENTICATION_ENABLED,
-                ConfigValueFactory.fromAnyRef("true"))
-            .withValue(ExecConstants.USER_AUTHENTICATOR_IMPL,
-                ConfigValueFactory.fromAnyRef(UserAuthenticatorTestImpl.TYPE)));
-
-    final Properties connectionProps = new Properties();
-    connectionProps.setProperty(DrillProperties.USER, "anonymous");
-    connectionProps.setProperty(DrillProperties.PASSWORD, "anything works!");
-    updateTestCluster(3, newConfig, connectionProps);
+    cluster = ClusterFixture.bareBuilder(dirTestWatcher)
+        .clusterSize(3)
+        .configProperty(ExecConstants.ALLOW_LOOPBACK_ADDRESS_BINDING, true)
+        .configProperty(ExecConstants.USER_AUTHENTICATION_ENABLED, true)
+        .configProperty(ExecConstants.USER_AUTHENTICATOR_IMPL, UserAuthenticatorTestImpl.TYPE)
+        .build();
   }
 
   @Test
@@ -67,6 +66,22 @@ public class TestCustomUserAuthenticator extends BaseTestQuery {
   }
 
   @Test
+  public void positiveAuthDisabled() throws Exception {
+    // Setup a separate cluster
+    ClusterFixtureBuilder builder = ClusterFixture.bareBuilder(dirTestWatcher)
+        .configProperty(ExecConstants.ALLOW_LOOPBACK_ADDRESS_BINDING, true)
+        .configProperty(ExecConstants.INITIAL_USER_PORT, QueryTestUtil.getFreePortNumber(31170, 300))
+        .configProperty(ExecConstants.INITIAL_BIT_PORT, QueryTestUtil.getFreePortNumber(31180, 300));
+
+    // Setup specific auth settings
+    ClusterFixture cluster = builder.configProperty(ExecConstants.USER_AUTHENTICATION_ENABLED, false)
+        .configProperty(ExecConstants.USER_AUTHENTICATOR_IMPL, "NonExistingImpl")
+        .build();
+
+    runTest(TEST_USER_1, TEST_USER_1_PASSWORD, cluster);
+  }
+
+  @Test
   public void negativeUserAuth() throws Exception {
     negativeAuthHelper(TEST_USER_1, "blah.. blah..");
     negativeAuthHelper(TEST_USER_2, "blah.. blah..");
@@ -75,15 +90,9 @@ public class TestCustomUserAuthenticator extends BaseTestQuery {
 
   @Test
   public void emptyPassword() throws Exception {
-    try {
-      runTest(TEST_USER_2, "");
-      fail("Expected an exception.");
-    } catch (RpcException e) {
-      final String exMsg = e.getMessage();
-      assertThat(exMsg, containsString("Insufficient credentials"));
-    } catch (Exception e) {
-      fail("Expected an RpcException.");
-    }
+    thrown.expectCause(isA(RpcException.class));
+    thrown.expectMessage(containsString("Insufficient credentials"));
+    runTest(TEST_USER_2, "");
   }
 
   @Test
@@ -92,33 +101,33 @@ public class TestCustomUserAuthenticator extends BaseTestQuery {
     runTest(TEST_USER_2, TEST_USER_2_PASSWORD);
   }
 
-  private static void negativeAuthHelper(final String user, final String password) throws Exception {
-    RpcException negativeAuthEx = null;
-    try {
-      runTest(user, password);
-    } catch (RpcException e) {
-      negativeAuthEx = e;
-    }
-
-    assertNotNull("Expected RpcException.", negativeAuthEx);
-    final String exMsg = negativeAuthEx.getMessage();
-    assertThat(exMsg, containsString("Authentication failed"));
-    assertThat(exMsg, containsString("Incorrect credentials"));
+  private void negativeAuthHelper(final String user, final String password) throws Exception {
+    thrown.expectCause(isA(RpcException.class));
+    thrown.expectMessage(containsString("Authentication failed"));
+    thrown.expectMessage(containsString("Incorrect credentials"));
+    runTest(user, password);
   }
 
-  private static void runTest(final String user, final String password) throws Exception {
-    final Properties connectionProps = new Properties();
-
-    connectionProps.setProperty(DrillProperties.USER, user);
-    connectionProps.setProperty(DrillProperties.PASSWORD, password);
+  private static void runTest(String user, String password) throws Exception {
+    runTest(user, password, cluster);
+  }
 
-    updateClient(connectionProps);
+  private static void runTest(String user, String password, ClusterFixture cluster) throws Exception {
+    ClientFixture client = cluster.clientBuilder()
+        .property(DrillProperties.USER, user)
+        .property(DrillProperties.PASSWORD, password)
+        .build();
 
     // Run few queries using the new client
-    test("SHOW SCHEMAS");
-    test("USE INFORMATION_SCHEMA");
-    test("SHOW TABLES");
-    test("SELECT * FROM INFORMATION_SCHEMA.`TABLES` WHERE TABLE_NAME LIKE 'COLUMNS'");
-    test("SELECT * FROM cp.`region.json` LIMIT 5");
+    List<String> queries = Arrays.asList(
+        "SHOW SCHEMAS",
+        "USE INFORMATION_SCHEMA",
+        "SHOW TABLES",
+        "SELECT * FROM INFORMATION_SCHEMA.`TABLES` WHERE TABLE_NAME LIKE 'COLUMNS'",
+        "SELECT * FROM cp.`region.json` LIMIT 5");
+
+    for (String query : queries) {
+      client.queryBuilder().sql(query).run();
+    }
   }
 }