You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by na...@apache.org on 2019/11/21 11:11:36 UTC
[incubator-hudi] 01/01: fixing build issues due to javax servlet
This is an automated email from the ASF dual-hosted git repository.
nagarwal pushed a commit to branch hudi_test_suite_refactor
in repository https://gitbox.apache.org/repos/asf/incubator-hudi.git
commit 0c2ed5334559250cc30d4d4a80fa4d5b517bb9bd
Author: Nishith Agarwal <na...@uber.com>
AuthorDate: Tue Nov 19 23:56:03 2019 -0800
fixing build issues due to javax servlet
---
hudi-bench/pom.xml | 100 ++++++++++++++-------
.../hudi/bench/configuration/DeltaConfig.java | 1 -
.../java/org/apache/hudi/bench/dag/DagUtils.java | 12 ---
.../hudi/bench/generator/DeltaGenerator.java | 6 +-
.../hudi/bench/reader/DFSAvroDeltaInputReader.java | 1 -
.../bench/reader/DFSParquetDeltaInputReader.java | 4 +-
.../hudi/bench/writer/AvroDeltaInputWriter.java | 3 +-
.../TestGenericRecordPayloadEstimator.java | 1 -
.../test/resources/log4j-surefire-quiet.properties | 2 +-
.../src/test/resources/log4j-surefire.properties | 2 +-
.../org/apache/hudi/hive/HoodieHiveClient.java | 2 +-
11 files changed, 77 insertions(+), 57 deletions(-)
diff --git a/hudi-bench/pom.xml b/hudi-bench/pom.xml
index a2534ec..69cc78b 100644
--- a/hudi-bench/pom.xml
+++ b/hudi-bench/pom.xml
@@ -94,6 +94,33 @@
</dependency>
<!-- the following order of dependencies are crucial -->
+
+ <!-- Need this for SparkSession sparkSql queries -->
+ <dependency>
+ <groupId>org.apache.spark</groupId>
+ <artifactId>spark-hive_2.11</artifactId>
+ <version>${spark.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>*</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet.jsp</groupId>
+ <artifactId>*</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>*</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>*</artifactId>
+ </exclusion>
+ </exclusions>
+ <scope>provided</scope>
+ </dependency>
+
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-common</artifactId>
@@ -107,6 +134,20 @@
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-hive</artifactId>
<version>${project.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>*</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet.jsp</groupId>
+ <artifactId>*</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>*</artifactId>
+ </exclusion>
+ </exclusions>
<classifier>tests</classifier>
<type>test-jar</type>
</dependency>
@@ -141,23 +182,13 @@
<groupId>${hive.groupid}</groupId>
<artifactId>hive-exec</artifactId>
<version>${hive.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>${hive.groupid}</groupId>
- <artifactId>hive-jdbc</artifactId>
- <version>${hive.version}</version>
<exclusions>
<exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- </exclusion>
- <exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
</exclusions>
+ <scope>test</scope>
</dependency>
<dependency>
@@ -225,12 +256,36 @@
<scope>provided</scope>
<exclusions>
<exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>*</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet.jsp</groupId>
+ <artifactId>*</artifactId>
+ </exclusion>
+ <exclusion>
<groupId>javax.servlet</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
+ <dependency>
+ <groupId>${hive.groupid}</groupId>
+ <artifactId>hive-jdbc</artifactId>
+ <version>${hive.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
<!-- Used for SQL templating -->
<dependency>
<groupId>org.antlr</groupId>
@@ -275,37 +330,18 @@
<scope>test</scope>
<exclusions>
<exclusion>
- <groupId>javax.servlet</groupId>
+ <groupId>org.mortbay.jetty</groupId>
<artifactId>*</artifactId>
</exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-core_2.11</artifactId>
- <version>${spark.version}</version>
- <exclusions>
<exclusion>
- <groupId>javax.servlet</groupId>
+ <groupId>javax.servlet.jsp</groupId>
<artifactId>*</artifactId>
</exclusion>
- </exclusions>
- <scope>provided</scope>
- </dependency>
-
- <!-- Need this for SparkSession sparkSql queries -->
- <dependency>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-hive_2.11</artifactId>
- <version>${spark.version}</version>
- <exclusions>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
- <scope>provided</scope>
</dependency>
</dependencies>
diff --git a/hudi-bench/src/main/java/org/apache/hudi/bench/configuration/DeltaConfig.java b/hudi-bench/src/main/java/org/apache/hudi/bench/configuration/DeltaConfig.java
index c5ab9b9..11aa072 100644
--- a/hudi-bench/src/main/java/org/apache/hudi/bench/configuration/DeltaConfig.java
+++ b/hudi-bench/src/main/java/org/apache/hudi/bench/configuration/DeltaConfig.java
@@ -224,7 +224,6 @@ public class DeltaConfig implements Serializable {
return this;
}
-
public Builder withConfig(String name, Object value) {
this.configsMap.put(name, value);
return this;
diff --git a/hudi-bench/src/main/java/org/apache/hudi/bench/dag/DagUtils.java b/hudi-bench/src/main/java/org/apache/hudi/bench/dag/DagUtils.java
index 961476b..93a04b3 100644
--- a/hudi-bench/src/main/java/org/apache/hudi/bench/dag/DagUtils.java
+++ b/hudi-bench/src/main/java/org/apache/hudi/bench/dag/DagUtils.java
@@ -39,21 +39,10 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.bench.configuration.DeltaConfig;
import org.apache.hudi.bench.configuration.DeltaConfig.Config;
-import org.apache.hudi.bench.dag.nodes.BulkInsertNode;
-import org.apache.hudi.bench.dag.nodes.CompactNode;
import org.apache.hudi.bench.dag.nodes.DagNode;
-import org.apache.hudi.bench.dag.nodes.HiveQueryNode;
-import org.apache.hudi.bench.dag.nodes.HiveSyncNode;
-import org.apache.hudi.bench.dag.nodes.InsertNode;
-import org.apache.hudi.bench.dag.nodes.RollbackNode;
-import org.apache.hudi.bench.dag.nodes.ScheduleCompactNode;
-import org.apache.hudi.bench.dag.nodes.SparkSQLQueryNode;
-import org.apache.hudi.bench.dag.nodes.UpsertNode;
-import org.apache.hudi.bench.dag.nodes.ValidateNode;
import org.apache.hudi.common.util.ReflectionUtils;
import org.apache.hudi.common.util.StringUtils;
import org.apache.hudi.common.util.collection.Pair;
-import org.apache.hudi.exception.HoodieException;
/**
* Utility class to SerDe workflow dag
@@ -161,7 +150,6 @@ public class DagUtils {
default:
configsMap.put(entry.getKey(), getValue(entry.getValue()));
break;
- // add any new scope added under CONFIG
}
}
return configsMap;
diff --git a/hudi-bench/src/main/java/org/apache/hudi/bench/generator/DeltaGenerator.java b/hudi-bench/src/main/java/org/apache/hudi/bench/generator/DeltaGenerator.java
index 4979cea..335fecf 100644
--- a/hudi-bench/src/main/java/org/apache/hudi/bench/generator/DeltaGenerator.java
+++ b/hudi-bench/src/main/java/org/apache/hudi/bench/generator/DeltaGenerator.java
@@ -68,7 +68,8 @@ public class DeltaGenerator implements Serializable {
private List<String> partitionPathFieldNames;
private int batchId;
- public DeltaGenerator(DeltaConfig deltaOutputConfig, JavaSparkContext jsc, SparkSession sparkSession, String schemaStr,
+ public DeltaGenerator(DeltaConfig deltaOutputConfig, JavaSparkContext jsc, SparkSession sparkSession,
+ String schemaStr,
KeyGenerator keyGenerator) {
this.deltaOutputConfig = deltaOutputConfig;
this.jsc = jsc;
@@ -120,7 +121,8 @@ public class DeltaGenerator implements Serializable {
adjustedRDD = adjustRDDToGenerateExactNumUpdates(adjustedRDD, jsc, config.getNumRecordsUpsert());
} else {
deltaInputReader =
- new DFSHoodieDatasetInputReader(jsc, ((DFSDeltaConfig) deltaOutputConfig).getDatasetOutputPath(), schemaStr);
+ new DFSHoodieDatasetInputReader(jsc, ((DFSDeltaConfig) deltaOutputConfig).getDatasetOutputPath(),
+ schemaStr);
if (config.getFractionUpsertPerFile() > 0) {
adjustedRDD = deltaInputReader.read(config.getNumUpsertPartitions(), config.getNumUpsertFiles(),
config.getFractionUpsertPerFile());
diff --git a/hudi-bench/src/main/java/org/apache/hudi/bench/reader/DFSAvroDeltaInputReader.java b/hudi-bench/src/main/java/org/apache/hudi/bench/reader/DFSAvroDeltaInputReader.java
index 7929415..7d06c77 100644
--- a/hudi-bench/src/main/java/org/apache/hudi/bench/reader/DFSAvroDeltaInputReader.java
+++ b/hudi-bench/src/main/java/org/apache/hudi/bench/reader/DFSAvroDeltaInputReader.java
@@ -79,7 +79,6 @@ public class DFSAvroDeltaInputReader extends DFSDeltaInputReader {
throw new UnsupportedOperationException("cannot generate updates");
}
-
@Override
protected long analyzeSingleFile(String filePath) {
JavaRDD<GenericRecord> recordsFromOneFile = SparkBasedReader
diff --git a/hudi-bench/src/main/java/org/apache/hudi/bench/reader/DFSParquetDeltaInputReader.java b/hudi-bench/src/main/java/org/apache/hudi/bench/reader/DFSParquetDeltaInputReader.java
index c448ab6..c0016a2 100644
--- a/hudi-bench/src/main/java/org/apache/hudi/bench/reader/DFSParquetDeltaInputReader.java
+++ b/hudi-bench/src/main/java/org/apache/hudi/bench/reader/DFSParquetDeltaInputReader.java
@@ -34,11 +34,11 @@ import org.apache.spark.sql.SparkSession;
*/
public class DFSParquetDeltaInputReader extends DFSDeltaInputReader {
+ private static final String PARQUET_EXTENSION = ".parquet";
private final SparkSession sparkSession;
private final String basePath;
private final Option<String> structName;
private final Option<String> nameSpace;
- private final static String PARQUET_EXTENSION = ".parquet";
protected PathFilter filter = (path) -> {
if (path.toUri().toString().contains(PARQUET_EXTENSION)) {
return true;
@@ -50,7 +50,6 @@ public class DFSParquetDeltaInputReader extends DFSDeltaInputReader {
public DFSParquetDeltaInputReader(SparkSession sparkSession, String schemaStr, String basePath,
Option<String> structName, Option<String> nameSpace) {
this.sparkSession = sparkSession;
- this.schemaStr = schemaStr;
this.basePath = basePath;
this.structName = structName;
this.nameSpace = nameSpace;
@@ -81,7 +80,6 @@ public class DFSParquetDeltaInputReader extends DFSDeltaInputReader {
throw new UnsupportedOperationException("cannot generate updates");
}
-
@Override
protected long analyzeSingleFile(String filePath) {
JavaRDD<GenericRecord> recordsFromOneFile = SparkBasedReader.readParquet(sparkSession, Arrays.asList(filePath),
diff --git a/hudi-bench/src/main/java/org/apache/hudi/bench/writer/AvroDeltaInputWriter.java b/hudi-bench/src/main/java/org/apache/hudi/bench/writer/AvroDeltaInputWriter.java
index 7c8d1ae..234530e 100644
--- a/hudi-bench/src/main/java/org/apache/hudi/bench/writer/AvroDeltaInputWriter.java
+++ b/hudi-bench/src/main/java/org/apache/hudi/bench/writer/AvroDeltaInputWriter.java
@@ -39,9 +39,8 @@ import org.apache.log4j.Logger;
*/
public class AvroDeltaInputWriter implements FileDeltaInputWriter<GenericRecord> {
- private static Logger log = Logger.getLogger(AvroDeltaInputWriter.class);
-
public static final String AVRO_EXTENSION = ".avro";
+ private static Logger log = Logger.getLogger(AvroDeltaInputWriter.class);
// The maximum file size for an avro file before being rolled over to a new one
private final Long maxFileSize;
private final Configuration configuration;
diff --git a/hudi-bench/src/test/java/org/apache/hudi/bench/generator/TestGenericRecordPayloadEstimator.java b/hudi-bench/src/test/java/org/apache/hudi/bench/generator/TestGenericRecordPayloadEstimator.java
index a9f9cee..e616813 100644
--- a/hudi-bench/src/test/java/org/apache/hudi/bench/generator/TestGenericRecordPayloadEstimator.java
+++ b/hudi-bench/src/test/java/org/apache/hudi/bench/generator/TestGenericRecordPayloadEstimator.java
@@ -24,7 +24,6 @@ import org.apache.avro.Schema;
import org.apache.hudi.common.util.collection.Pair;
import org.junit.Test;
-
public class TestGenericRecordPayloadEstimator {
@Test
diff --git a/hudi-bench/src/test/resources/log4j-surefire-quiet.properties b/hudi-bench/src/test/resources/log4j-surefire-quiet.properties
index 8203bbd..cd7d2db 100644
--- a/hudi-bench/src/test/resources/log4j-surefire-quiet.properties
+++ b/hudi-bench/src/test/resources/log4j-surefire-quiet.properties
@@ -16,7 +16,7 @@
# limitations under the License.
#
-og4j.rootLogger=WARN, A1
+log4j.rootLogger=WARN, A1
# A1 is set to be a ConsoleAppender.
log4j.appender.A1=org.apache.log4j.ConsoleAppender
# A1 uses PatternLayout.
diff --git a/hudi-bench/src/test/resources/log4j-surefire.properties b/hudi-bench/src/test/resources/log4j-surefire.properties
index a94b075..1ba3680 100644
--- a/hudi-bench/src/test/resources/log4j-surefire.properties
+++ b/hudi-bench/src/test/resources/log4j-surefire.properties
@@ -16,7 +16,7 @@
# limitations under the License.
#
-og4j.rootLogger=WARN, A1
+log4j.rootLogger=WARN, A1
log4j.category.org.apache=INFO
log4j.category.org.apache.parquet.hadoop=WARN
# A1 is set to be a ConsoleAppender.
diff --git a/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java b/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java
index 26ca6f5..3223108 100644
--- a/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java
+++ b/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java
@@ -682,7 +682,7 @@ public class HoodieHiveClient {
}
}
- public HiveMetaStoreClient getClient() {
+ public IMetaStoreClient getClient() {
return client;
}
}