You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2020/05/10 05:25:59 UTC
[hive] branch master updated: HIVE-22491 : Use Collections
emptyList (David Mollitor via Ashutosh Chauhan)
This is an automated email from the ASF dual-hosted git repository.
hashutosh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 0bf24c8 HIVE-22491 : Use Collections emptyList (David Mollitor via Ashutosh Chauhan)
0bf24c8 is described below
commit 0bf24c805467db2c04098fac5029914ce0b0985c
Author: David Mollitor <dm...@apache.org>
AuthorDate: Sat May 9 22:23:33 2020 -0700
HIVE-22491 : Use Collections emptyList (David Mollitor via Ashutosh Chauhan)
Signed-off-by: Ashutosh Chauhan <ha...@apache.org>
---
common/src/java/org/apache/hadoop/hive/common/HiveStatsUtils.java | 3 ++-
.../main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java | 3 ++-
.../org/apache/hive/hcatalog/templeton/tool/ZooKeeperCleanup.java | 3 +--
.../org/apache/hive/hcatalog/templeton/tool/ZooKeeperStorage.java | 3 ++-
.../java/org/apache/hadoop/hive/kudu/KuduPredicateHandler.java | 2 +-
.../org/apache/hadoop/hive/ql/exec/SerializationUtilities.java | 4 ++--
ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java | 2 +-
ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java | 7 +++----
ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java | 3 ++-
ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java | 8 +++-----
ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java | 3 ++-
ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java | 3 ++-
.../hive/ql/optimizer/calcite/rules/HivePreFilteringRule.java | 7 ++++---
ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java | 2 +-
ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java | 5 +++--
ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java | 4 ++--
serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java | 6 +++---
.../java/org/apache/hadoop/hive/metastore/utils/FileUtils.java | 2 +-
.../main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java | 6 ++----
.../apache/hadoop/hive/metastore/MetastoreDefaultTransformer.java | 7 ++++---
.../main/java/org/apache/hadoop/hive/metastore/ObjectStore.java | 6 +++---
.../apache/hadoop/hive/metastore/utils/MetaStoreServerUtils.java | 3 ++-
22 files changed, 48 insertions(+), 44 deletions(-)
diff --git a/common/src/java/org/apache/hadoop/hive/common/HiveStatsUtils.java b/common/src/java/org/apache/hadoop/hive/common/HiveStatsUtils.java
index 7641610..10ff548 100644
--- a/common/src/java/org/apache/hadoop/hive/common/HiveStatsUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/HiveStatsUtils.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hive.common;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
@@ -66,7 +67,7 @@ public class HiveStatsUtils {
// does not exist. But getFileStatus() throw IOException. To mimic the
// similar behavior we will return empty array on exception. For external
// tables, the path of the table will not exists during table creation
- return new ArrayList<>(0);
+ return Collections.emptyList();
}
return result;
}
diff --git a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
index b73b6fe..2f92733 100644
--- a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
+++ b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
@@ -21,6 +21,7 @@ package org.apache.hive.hcatalog.api;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -1007,7 +1008,7 @@ public class HCatClientHMSImpl extends HCatClient {
}
});
} else {
- return new ArrayList<HCatNotificationEvent>();
+ return Collections.emptyList();
}
} catch (TException e) {
throw new ConnectionFailureException("TException while getting notifications", e);
diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperCleanup.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperCleanup.java
index 4ebc34b..ecf3ef5 100644
--- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperCleanup.java
+++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperCleanup.java
@@ -19,7 +19,6 @@
package org.apache.hive.hcatalog.templeton.tool;
import java.io.IOException;
-import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Date;
@@ -138,7 +137,7 @@ public class ZooKeeperCleanup extends Thread {
} catch (IOException e) {
LOG.info("No jobs to check.");
}
- return new ArrayList<String>();
+ return Collections.emptyList();
}
/**
diff --git a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperStorage.java b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperStorage.java
index 02a8926..2919038 100644
--- a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperStorage.java
+++ b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperStorage.java
@@ -20,6 +20,7 @@ package org.apache.hive.hcatalog.templeton.tool;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.List;
import org.apache.hadoop.hive.common.ZooKeeperHiveHelper;
@@ -280,7 +281,7 @@ public class ZooKeeperStorage implements TempletonStorage {
try {
return zk.getChildren().forPath(getPath(type));
} catch (Exception e) {
- return new ArrayList<String>();
+ return Collections.emptyList();
}
}
diff --git a/kudu-handler/src/java/org/apache/hadoop/hive/kudu/KuduPredicateHandler.java b/kudu-handler/src/java/org/apache/hadoop/hive/kudu/KuduPredicateHandler.java
index 4807f53..5168b45 100644
--- a/kudu-handler/src/java/org/apache/hadoop/hive/kudu/KuduPredicateHandler.java
+++ b/kudu-handler/src/java/org/apache/hadoop/hive/kudu/KuduPredicateHandler.java
@@ -119,7 +119,7 @@ public final class KuduPredicateHandler {
public static List<KuduPredicate> getPredicates(Configuration conf, Schema schema) {
SearchArgument sarg = ConvertAstToSearchArg.createFromConf(conf);
if (sarg == null) {
- return new ArrayList<>();
+ return Collections.emptyList();
}
return toKuduPredicates(sarg, schema);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
index f19f394..3e3fa46 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
@@ -28,8 +28,8 @@ import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.sql.Timestamp;
import java.time.ZoneId;
-import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
@@ -694,7 +694,7 @@ public class SerializationUtilities {
*/
public static List<Operator<?>> cloneOperatorTree(List<Operator<?>> roots) {
if (roots.isEmpty()) {
- return new ArrayList<>();
+ return Collections.emptyList();
}
ByteArrayOutputStream baos = new ByteArrayOutputStream(4096);
CompilationOpContext ctx = roots.get(0).getCompilationOpContext();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 5d244ec..ace24be 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -4141,7 +4141,7 @@ public final class Utilities {
String[] classNames = org.apache.hadoop.util.StringUtils.getStrings(HiveConf.getVar(hiveConf,
confVar));
if (classNames == null) {
- return new ArrayList<>(0);
+ return Collections.emptyList();
}
Collection<Class<?>> classList = new ArrayList<Class<?>>(classNames.length);
for (String className : classNames) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java
index 2f6dd4a..8cab3ef 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlan.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.ql.exec.spark;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
-import java.util.ArrayList;
+import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
@@ -31,7 +31,6 @@ import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.ExplainTask;
-import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.parse.ExplainConfiguration;
import org.apache.hadoop.hive.ql.plan.ExplainWork;
import org.apache.hadoop.mapred.JobConf;
@@ -226,7 +225,7 @@ public class SparkPlan {
public List<SparkTran> getParents(SparkTran tran) {
if (!invertedTransGraph.containsKey(tran)) {
- return new ArrayList<SparkTran>();
+ return Collections.emptyList();
}
return invertedTransGraph.get(tran);
@@ -234,7 +233,7 @@ public class SparkPlan {
public List<SparkTran> getChildren(SparkTran tran) {
if (!transGraph.containsKey(tran)) {
- return new ArrayList<SparkTran>();
+ return Collections.emptyList();
}
return transGraph.get(tran);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
index 22099e0..ce8f418 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
@@ -23,6 +23,7 @@ import java.nio.file.FileSystemNotFoundException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
@@ -481,7 +482,7 @@ public final class HiveFileFormatUtils {
**/
public static List<String> doGetAliasesFromPath(Map<Path, List<String>> pathToAliases, Path dir) {
if (pathToAliases == null) {
- return new ArrayList<String>();
+ return Collections.emptyList();
}
Path path = getMatchingPath(pathToAliases, dir);
return pathToAliases.get(path);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index c5901cd..8497ec5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -1625,8 +1625,7 @@ public class Hive {
List<RelOptMaterialization> materializedViews =
HiveMaterializedViewsRegistry.get().getRewritingMaterializedViews();
if (materializedViews.isEmpty()) {
- // Bail out: empty list
- return new ArrayList<>();
+ return Collections.emptyList();
}
// Add to final result
return filterAugmentMaterializedViews(materializedViews, tablesUsed, txnMgr);
@@ -1772,8 +1771,7 @@ public class Hive {
List<Table> materializedViewTables =
getAllMaterializedViewObjectsForRewriting();
if (materializedViewTables.isEmpty()) {
- // Bail out: empty list
- return new ArrayList<>();
+ return Collections.emptyList();
}
// Return final result
return getValidMaterializedViews(materializedViewTables, tablesUsed, false, txnMgr);
@@ -3819,7 +3817,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
private static List<Partition> convertFromMetastore(Table tbl,
List<org.apache.hadoop.hive.metastore.api.Partition> partitions) throws HiveException {
if (partitions == null) {
- return new ArrayList<Partition>();
+ return Collections.emptyList();
}
List<Partition> results = new ArrayList<Partition>(partitions.size());
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
index 136709c..9326ea3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.metadata;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
@@ -511,7 +512,7 @@ public class Partition implements Serializable {
tPartition.getSd().getSerdeInfo().getSerializationLib(), e);
}
- return new ArrayList<FieldSchema>();
+ return Collections.emptyList();
}
public String getLocation() {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
index 6d56536..8805eee 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
@@ -22,6 +22,7 @@ import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
@@ -714,7 +715,7 @@ public class Table implements Serializable {
} catch (Exception e) {
LOG.error("Unable to get field from serde: " + serializationLib, e);
}
- return new ArrayList<FieldSchema>();
+ return Collections.emptyList();
}
/**
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePreFilteringRule.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePreFilteringRule.java
index 33205a5..c78e7a4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePreFilteringRule.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HivePreFilteringRule.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.optimizer.calcite.rules;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -204,13 +205,13 @@ public class HivePreFilteringRule extends RelOptRule {
for (RexNode conjunction : conjunctions) {
// We do not know what it is, we bail out for safety
if (!(conjunction instanceof RexCall) || !HiveCalciteUtil.isDeterministic(conjunction)) {
- return new ArrayList<>();
+ return Collections.emptyList();
}
RexCall conjCall = (RexCall) conjunction;
Set<Integer> refs = HiveCalciteUtil.getInputRefs(conjCall);
if (refs.size() != 1) {
// We do not know what it is, we bail out for safety
- return new ArrayList<>();
+ return Collections.emptyList();
}
RexNode ref = rexBuilder.makeInputRef(input, refs.iterator().next());
String stringRef = ref.toString();
@@ -227,7 +228,7 @@ public class HivePreFilteringRule extends RelOptRule {
// If we did not add any factor or there are no common factors, we can
// bail out
if (refsInAllOperands.isEmpty()) {
- return new ArrayList<>();
+ return Collections.emptyList();
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
index 6282c8e..2fb452b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
@@ -660,7 +660,7 @@ public final class PlanUtils {
public static List<FieldSchema> getFieldSchemasFromColumnInfo(
List<ColumnInfo> cols, String fieldPrefix) {
if ((cols == null) || (cols.size() == 0)) {
- return new ArrayList<FieldSchema>();
+ return Collections.emptyList();
}
List<FieldSchema> schemas = new ArrayList<FieldSchema>(cols.size());
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
index a807fa9..34be0b6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.plan;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.EnumSet;
import java.util.LinkedHashSet;
import java.util.List;
@@ -575,7 +576,7 @@ public class ReduceSinkDesc extends AbstractOperatorDesc {
int[] keyColumnMap = vectorReduceSinkInfo.getReduceSinkKeyColumnMap();
if (keyColumnMap == null) {
// Always show an array.
- return new ArrayList<String>();
+ return Collections.emptyList();
}
return outputColumnsAndTypesToStringList(
vectorReduceSinkInfo.getReduceSinkKeyColumnMap(),
@@ -591,7 +592,7 @@ public class ReduceSinkDesc extends AbstractOperatorDesc {
int[] valueColumnMap = vectorReduceSinkInfo.getReduceSinkValueColumnMap();
if (valueColumnMap == null) {
// Always show an array.
- return new ArrayList<String>();
+ return Collections.emptyList();
}
return outputColumnsAndTypesToStringList(
vectorReduceSinkInfo.getReduceSinkValueColumnMap(),
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
index 0ad886e..9b160dd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
@@ -1059,8 +1059,8 @@ public class StatsUtils {
}
private static List<ColStatistics> convertColStats(List<ColumnStatisticsObj> colStats, String tabName) {
- if (colStats==null) {
- return new ArrayList<ColStatistics>();
+ if (colStats == null) {
+ return Collections.emptyList();
}
List<ColStatistics> stats = new ArrayList<ColStatistics>(colStats.size());
for (ColumnStatisticsObj statObj : colStats) {
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java
index db08878..09b7630 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hive.serde2;
-import java.util.ArrayList;
+import java.util.Collections;
import java.util.List;
import java.util.Properties;
@@ -107,7 +107,7 @@ public class NullStructSerDe extends AbstractSerDe {
@Override
public List<NullStructField> getAllStructFieldRefs() {
- return new ArrayList<NullStructField>();
+ return Collections.emptyList();
}
@Override
@@ -117,7 +117,7 @@ public class NullStructSerDe extends AbstractSerDe {
@Override
public List<Object> getStructFieldsDataAsList(Object data) {
- return new ArrayList<Object>();
+ return Collections.emptyList();
}
}
diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/FileUtils.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/FileUtils.java
index e9342e6..b731e38 100644
--- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/FileUtils.java
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/FileUtils.java
@@ -332,7 +332,7 @@ public class FileUtils {
// does not exist. But getFileStatus() throw IOException. To mimic the
// similar behavior we will return empty array on exception. For external
// tables, the path of the table will not exists during table creation
- return new ArrayList<>(0);
+ return Collections.emptyList();
}
}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index b13ad96..915618c 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -6224,12 +6224,10 @@ public class HiveMetaStore extends ThriftHiveMetastore {
@Override
public List<String> partition_name_to_vals(String part_name) throws TException {
if (part_name.length() == 0) {
- return new ArrayList<>();
+ return Collections.emptyList();
}
LinkedHashMap<String, String> map = Warehouse.makeSpecFromName(part_name);
- List<String> part_vals = new ArrayList<>();
- part_vals.addAll(map.values());
- return part_vals;
+ return new ArrayList<>(map.values());
}
@Override
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetastoreDefaultTransformer.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetastoreDefaultTransformer.java
index 3c483ee..eff19b2 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetastoreDefaultTransformer.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetastoreDefaultTransformer.java
@@ -26,6 +26,7 @@ import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.EXTERNAL_TAB
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -706,17 +707,17 @@ public class MetastoreDefaultTransformer implements IMetaStoreMetadataTransforme
return list1;
if (list1 == null || list1.size() == 0)
- return new ArrayList<String>();
+ return Collections.emptyList();
if (list2.containsAll(list1))
- return new ArrayList<String>();
+ return Collections.emptyList();
diffList.addAll(list2);
LOG.debug("diffList=" + Arrays.toString(diffList.toArray()) + ",master list=" + Arrays.toString(list1.toArray()));
if (diffList.retainAll(list1)) {
LOG.debug("diffList=" + Arrays.toString(diffList.toArray()));
if (diffList.size() == list1.size()) { // lists match
- return new ArrayList<String>(); // return empty list indicating no missing elements
+ return Collections.emptyList(); // return empty list indicating no missing elements
} else {
list1.removeAll(diffList);
LOG.debug("list1.size():" + list1.size());
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index b01428e..5748841 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -5943,7 +5943,7 @@ public class ObjectStore implements RawStore, Configurable {
return grantInfos;
}
}
- return new ArrayList<>(0);
+ return Collections.emptyList();
}
@@ -6178,7 +6178,7 @@ public class ObjectStore implements RawStore, Configurable {
return grantInfos;
}
}
- return new ArrayList<>(0);
+ return Collections.emptyList();
}
private List<PrivilegeGrantInfo> getColumnPrivilege(String catName, String dbName,
@@ -6221,7 +6221,7 @@ public class ObjectStore implements RawStore, Configurable {
return grantInfos;
}
}
- return new ArrayList<>(0);
+ return Collections.emptyList();
}
@Override
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreServerUtils.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreServerUtils.java
index 7c4e129..92d10cd 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreServerUtils.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreServerUtils.java
@@ -30,6 +30,7 @@ import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -181,7 +182,7 @@ public class MetaStoreServerUtils {
if (colStatsMap.size() < 1) {
LOG.debug("No stats data found for: tblName= {}, partNames= {}, colNames= {}",
TableName.getQualified(catName, dbName, tableName), partNames, colNames);
- return new ArrayList<ColumnStatisticsObj>();
+ return Collections.emptyList();
}
return aggrPartitionStats(colStatsMap, partNames, areAllPartsFound,
useDensityFunctionForNDVEstimation, ndvTuner);