You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2016/04/12 18:27:41 UTC
hive git commit: HIVE-13472 : Replace primitive wrapper's valueOf
method with parse* method to avoid unnecessary boxing/unboxing (Kousuke
Saruta via Ashutosh Chauhan)
Repository: hive
Updated Branches:
refs/heads/master 14bcbab13 -> 547b37dcb
HIVE-13472 : Replace primitive wrapper's valueOf method with parse* method to avoid unnecessary boxing/unboxing (Kousuke Saruta via Ashutosh Chauhan)
Signed-off-by: Ashutosh Chauhan <ha...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/547b37dc
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/547b37dc
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/547b37dc
Branch: refs/heads/master
Commit: 547b37dcb24ed745fa2a3389ba2cafdb0476a325
Parents: 14bcbab
Author: Kousuke Saruta <sa...@oss.nttdata.co.jp>
Authored: Sun Apr 10 03:03:00 2016 -0800
Committer: Ashutosh Chauhan <ha...@apache.org>
Committed: Tue Apr 12 09:26:27 2016 -0700
----------------------------------------------------------------------
.../java/org/apache/hive/beeline/BeeLine.java | 4 ++--
.../beeline/SeparatedValuesOutputFormat.java | 2 +-
.../apache/hadoop/hive/common/FileUtils.java | 2 +-
.../org/apache/hadoop/hive/conf/HiveConf.java | 4 ++--
.../org/apache/hadoop/hive/conf/Validator.java | 2 +-
.../hadoop/hive/hbase/HBaseSerDeParameters.java | 6 ++---
.../hive/hbase/HiveHBaseInputFormatUtil.java | 6 ++---
.../mapreduce/FileOutputCommitterContainer.java | 4 ++--
.../mapreduce/FosterStorageHandler.java | 6 ++---
.../streaming/StreamingIntegrationTester.java | 12 +++++-----
.../hive/jdbc/miniHS2/StartMiniHS2Cluster.java | 2 +-
.../org/apache/hive/jdbc/HiveBaseResultSet.java | 10 ++++----
.../impl/LlapZookeeperRegistryImpl.java | 10 ++++----
.../llap/shufflehandler/ShuffleHandler.java | 2 +-
.../hive/metastore/MetaStoreSchemaInfo.java | 4 ++--
.../hive/metastore/hbase/HBaseImport.java | 4 ++--
.../hive/metastore/hbase/HBaseReadWrite.java | 10 ++++----
.../hadoop/hive/ql/exec/FileSinkOperator.java | 4 ++--
.../ql/io/parquet/convert/ETypeConverter.java | 3 ++-
.../io/parquet/convert/HiveStructConverter.java | 2 +-
.../write/ParquetRecordWriterWrapper.java | 4 ++--
.../apache/hadoop/hive/ql/metadata/Hive.java | 2 +-
.../SizeBasedBigTableSelectorForAutoSMJ.java | 2 +-
.../calcite/cost/HiveAlgorithmsUtil.java | 12 +++++-----
.../apache/hadoop/hive/ql/parse/ASTNode.java | 2 +-
.../hive/ql/parse/DDLSemanticAnalyzer.java | 4 ++--
.../apache/hadoop/hive/ql/parse/ParseUtils.java | 8 +++----
.../hadoop/hive/ql/parse/ReplicationSpec.java | 3 ++-
.../hadoop/hive/ql/parse/SemanticAnalyzer.java | 10 ++++----
.../hadoop/hive/ql/parse/TableSample.java | 4 ++--
.../hive/ql/parse/spark/GenSparkUtils.java | 2 +-
.../apache/hadoop/hive/ql/plan/PlanUtils.java | 2 +-
.../hive/ql/stats/fs/FSStatsAggregator.java | 2 +-
.../hive/ql/stats/fs/FSStatsPublisher.java | 3 ++-
.../hive/ql/txn/compactor/CompactorMR.java | 12 +++++-----
.../apache/hadoop/hive/ql/udf/UDFToDouble.java | 2 +-
.../apache/hadoop/hive/ql/udf/UDFToFloat.java | 2 +-
.../hive/ql/udf/generic/GenericUDFBetween.java | 2 +-
.../results/clientnegative/dyn_part_max.q.out | 2 +-
.../serde2/MetadataTypedColumnsetSerDe.java | 2 +-
.../serde2/dynamic_type/thrift_grammar.java | 2 +-
.../hive/serde2/lazy/LazySerDeParameters.java | 3 ++-
.../hadoop/hive/serde2/lazy/LazyUtils.java | 2 +-
.../serde2/thrift/TCTLSeparatedProtocol.java | 25 +++++++++-----------
.../hive/serde2/typeinfo/TypeInfoUtils.java | 6 ++---
.../hive/service/cli/HiveSQLException.java | 6 ++---
.../service/cli/thrift/ThriftCLIService.java | 4 ++--
.../junit/runners/ConcurrentTestRunner.java | 4 ++--
48 files changed, 116 insertions(+), 117 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/beeline/src/java/org/apache/hive/beeline/BeeLine.java
----------------------------------------------------------------------
diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
index 98d4e09..5e6e9ba 100644
--- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java
+++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
@@ -664,8 +664,8 @@ public class BeeLine implements Closeable {
}
dbName = commandLine.getOptionValue("database");
- getOpts().setVerbose(Boolean.valueOf(commandLine.getOptionValue("verbose")));
- getOpts().setSilent(Boolean.valueOf(commandLine.getOptionValue("slient")));
+ getOpts().setVerbose(Boolean.parseBoolean(commandLine.getOptionValue("verbose")));
+ getOpts().setSilent(Boolean.parseBoolean(commandLine.getOptionValue("slient")));
int code = 0;
if (commandLine.getOptionValues("e") != null) {
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java
----------------------------------------------------------------------
diff --git a/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java b/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java
index 61b84ef..66d9fd0 100644
--- a/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java
+++ b/beeline/src/java/org/apache/hive/beeline/SeparatedValuesOutputFormat.java
@@ -108,7 +108,7 @@ class SeparatedValuesOutputFormat implements OutputFormat {
}
String parsedOptionStr = quotingDisabledStr.toLowerCase();
if (parsedOptionStr.equals("false") || parsedOptionStr.equals("true")) {
- return Boolean.valueOf(parsedOptionStr);
+ return Boolean.parseBoolean(parsedOptionStr);
} else {
beeLine.error("System Property disable.quoting.for.sv is now " + parsedOptionStr
+ " which only accepts boolean value");
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
index 51340d8..f7d41cd 100644
--- a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
@@ -295,7 +295,7 @@ public final class FileUtils {
if (c == '%' && i + 2 < path.length()) {
int code = -1;
try {
- code = Integer.valueOf(path.substring(i + 1, i + 3), 16);
+ code = Integer.parseInt(path.substring(i + 1, i + 3), 16);
} catch (Exception e) {
code = -1;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 1702eb1..c7e5b33 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -3185,12 +3185,12 @@ public class HiveConf extends Configuration {
public static long toTime(String value, TimeUnit inputUnit, TimeUnit outUnit) {
String[] parsed = parseNumberFollowedByUnit(value.trim());
- return outUnit.convert(Long.valueOf(parsed[0].trim().trim()), unitFor(parsed[1].trim(), inputUnit));
+ return outUnit.convert(Long.parseLong(parsed[0].trim()), unitFor(parsed[1].trim(), inputUnit));
}
public static long toSizeBytes(String value) {
String[] parsed = parseNumberFollowedByUnit(value.trim());
- return Long.valueOf(parsed[0].trim()) * multiplierFor(parsed[1].trim());
+ return Long.parseLong(parsed[0].trim()) * multiplierFor(parsed[1].trim());
}
private static String[] parseNumberFollowedByUnit(String value) {
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/common/src/java/org/apache/hadoop/hive/conf/Validator.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/Validator.java b/common/src/java/org/apache/hadoop/hive/conf/Validator.java
index 3fb09b9..bb8962a 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/Validator.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/Validator.java
@@ -199,7 +199,7 @@ public interface Validator {
@Override
public String validate(String value) {
try {
- float fvalue = Float.valueOf(value);
+ float fvalue = Float.parseFloat(value);
if (fvalue < 0 || fvalue > 1) {
return "Invalid ratio " + value + ", which should be in between 0 to 1";
}
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
index a11d3cd..617c293 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
@@ -69,8 +69,8 @@ public class HBaseSerDeParameters {
// Read configuration parameters
columnMappingString = tbl.getProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING);
doColumnRegexMatching =
- Boolean.valueOf(tbl.getProperty(HBaseSerDe.HBASE_COLUMNS_REGEX_MATCHING, "true"));
- doColumnPrefixCut = Boolean.valueOf(tbl.getProperty(HBaseSerDe.HBASE_COLUMNS_PREFIX_HIDE, "false"));
+ Boolean.parseBoolean(tbl.getProperty(HBaseSerDe.HBASE_COLUMNS_REGEX_MATCHING, "true"));
+ doColumnPrefixCut = Boolean.parseBoolean(tbl.getProperty(HBaseSerDe.HBASE_COLUMNS_PREFIX_HIDE, "false"));
// Parse and initialize the HBase columns mapping
columnMappings = HBaseSerDe.parseColumnsMapping(columnMappingString, doColumnRegexMatching, doColumnPrefixCut);
@@ -95,7 +95,7 @@ public class HBaseSerDeParameters {
}
this.serdeParams = new LazySerDeParameters(job, tbl, serdeName);
- this.putTimestamp = Long.valueOf(tbl.getProperty(HBaseSerDe.HBASE_PUT_TIMESTAMP, "-1"));
+ this.putTimestamp = Long.parseLong(tbl.getProperty(HBaseSerDe.HBASE_PUT_TIMESTAMP, "-1"));
columnMappings.setHiveColumnDescription(serdeName, serdeParams.getColumnNames(),
serdeParams.getColumnTypes());
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java
index c002070..6054d53 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java
@@ -124,15 +124,15 @@ class HiveHBaseInputFormatUtil {
String scanCache = jobConf.get(HBaseSerDe.HBASE_SCAN_CACHE);
if (scanCache != null) {
- scan.setCaching(Integer.valueOf(scanCache));
+ scan.setCaching(Integer.parseInt(scanCache));
}
String scanCacheBlocks = jobConf.get(HBaseSerDe.HBASE_SCAN_CACHEBLOCKS);
if (scanCacheBlocks != null) {
- scan.setCacheBlocks(Boolean.valueOf(scanCacheBlocks));
+ scan.setCacheBlocks(Boolean.parseBoolean(scanCacheBlocks));
}
String scanBatch = jobConf.get(HBaseSerDe.HBASE_SCAN_BATCH);
if (scanBatch != null) {
- scan.setBatch(Integer.valueOf(scanBatch));
+ scan.setBatch(Integer.parseInt(scanBatch));
}
return scan;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
index 367f4ea..9db3dc1 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
@@ -107,7 +107,7 @@ class FileOutputCommitterContainer extends OutputCommitterContainer {
this.partitionsDiscovered = !dynamicPartitioningUsed;
cachedStorageHandler = HCatUtil.getStorageHandler(context.getConfiguration(), jobInfo.getTableInfo().getStorerInfo());
Table table = new Table(jobInfo.getTableInfo().getTable());
- if (dynamicPartitioningUsed && Boolean.valueOf((String)table.getProperty("EXTERNAL"))
+ if (dynamicPartitioningUsed && Boolean.parseBoolean((String)table.getProperty("EXTERNAL"))
&& jobInfo.getCustomDynamicPath() != null
&& jobInfo.getCustomDynamicPath().length() > 0) {
customDynamicLocationUsed = true;
@@ -355,7 +355,7 @@ class FileOutputCommitterContainer extends OutputCommitterContainer {
if (customDynamicLocationUsed) {
partPath = new Path(dynPartPath);
} else if (!dynamicPartitioningUsed
- && Boolean.valueOf((String)table.getProperty("EXTERNAL"))
+ && Boolean.parseBoolean((String)table.getProperty("EXTERNAL"))
&& jobInfo.getLocation() != null && jobInfo.getLocation().length() > 0) {
// Now, we need to de-scratchify this location - i.e., get rid of any
// _SCRATCH[\d].?[\d]+ from the location.
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java
index ef7aa48..14f7316 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java
@@ -157,7 +157,7 @@ public class FosterStorageHandler extends DefaultStorageHandler {
// we create a temp dir for the associated write job
if (dynHash != null) {
// if external table and custom root specified, update the parent path
- if (Boolean.valueOf((String)tableDesc.getProperties().get("EXTERNAL"))
+ if (Boolean.parseBoolean((String)tableDesc.getProperties().get("EXTERNAL"))
&& jobInfo.getCustomDynamicRoot() != null
&& jobInfo.getCustomDynamicRoot().length() > 0) {
parentPath = new Path(parentPath, jobInfo.getCustomDynamicRoot()).toString();
@@ -170,14 +170,14 @@ public class FosterStorageHandler extends DefaultStorageHandler {
String outputLocation;
if ((dynHash != null)
- && Boolean.valueOf((String)tableDesc.getProperties().get("EXTERNAL"))
+ && Boolean.parseBoolean((String)tableDesc.getProperties().get("EXTERNAL"))
&& jobInfo.getCustomDynamicPath() != null
&& jobInfo.getCustomDynamicPath().length() > 0) {
// dynamic partitioning with custom path; resolve the custom path
// using partition column values
outputLocation = HCatFileUtil.resolveCustomPath(jobInfo, null, true);
} else if ((dynHash == null)
- && Boolean.valueOf((String)tableDesc.getProperties().get("EXTERNAL"))
+ && Boolean.parseBoolean((String)tableDesc.getProperties().get("EXTERNAL"))
&& jobInfo.getLocation() != null && jobInfo.getLocation().length() > 0) {
// honor custom location for external table apart from what metadata specifies
outputLocation = jobInfo.getLocation();
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/StreamingIntegrationTester.java
----------------------------------------------------------------------
diff --git a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/StreamingIntegrationTester.java b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/StreamingIntegrationTester.java
index 0fcc103..bf2cc63 100644
--- a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/StreamingIntegrationTester.java
+++ b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/StreamingIntegrationTester.java
@@ -166,12 +166,12 @@ public class StreamingIntegrationTester {
String db = cmdline.getOptionValue('d');
String table = cmdline.getOptionValue('t');
String uri = cmdline.getOptionValue('m');
- int txnsPerBatch = Integer.valueOf(cmdline.getOptionValue('n', "100"));
- int writers = Integer.valueOf(cmdline.getOptionValue('w', "2"));
- int batches = Integer.valueOf(cmdline.getOptionValue('i', "10"));
- int recordsPerTxn = Integer.valueOf(cmdline.getOptionValue('r', "100"));
- int frequency = Integer.valueOf(cmdline.getOptionValue('f', "1"));
- int ap = Integer.valueOf(cmdline.getOptionValue('a', "5"));
+ int txnsPerBatch = Integer.parseInt(cmdline.getOptionValue('n', "100"));
+ int writers = Integer.parseInt(cmdline.getOptionValue('w', "2"));
+ int batches = Integer.parseInt(cmdline.getOptionValue('i', "10"));
+ int recordsPerTxn = Integer.parseInt(cmdline.getOptionValue('r', "100"));
+ int frequency = Integer.parseInt(cmdline.getOptionValue('f', "1"));
+ int ap = Integer.parseInt(cmdline.getOptionValue('a', "5"));
float abortPct = ((float)ap) / 100.0f;
String[] partVals = cmdline.getOptionValues('p');
String[] cols = cmdline.getOptionValues('c');
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java
index 91cbd18..00527a1 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/StartMiniHS2Cluster.java
@@ -30,7 +30,7 @@ public class StartMiniHS2Cluster {
MiniClusterType clusterType = MiniClusterType.valueOf(System.getProperty("miniHS2.clusterType", "MR").toUpperCase());
String confFilesProperty = System.getProperty("miniHS2.conf", "../../data/conf/hive-site.xml");
- boolean usePortsFromConf = Boolean.valueOf(System.getProperty("miniHS2.usePortsFromConf", "false"));
+ boolean usePortsFromConf = Boolean.parseBoolean(System.getProperty("miniHS2.usePortsFromConf", "false"));
// Load conf files
String[] confFiles = confFilesProperty.split(",");
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
----------------------------------------------------------------------
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
index 98d0370..88ba853 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
@@ -289,7 +289,7 @@ public abstract class HiveBaseResultSet implements ResultSet {
} else if (obj == null) {
return 0;
} else if (String.class.isInstance(obj)) {
- return Double.valueOf((String)obj);
+ return Double.parseDouble((String)obj);
}
throw new Exception("Illegal conversion");
} catch (Exception e) {
@@ -318,7 +318,7 @@ public abstract class HiveBaseResultSet implements ResultSet {
} else if (obj == null) {
return 0;
} else if (String.class.isInstance(obj)) {
- return Float.valueOf((String)obj);
+ return Float.parseFloat((String)obj);
}
throw new Exception("Illegal conversion");
} catch (Exception e) {
@@ -343,7 +343,7 @@ public abstract class HiveBaseResultSet implements ResultSet {
} else if (obj == null) {
return 0;
} else if (String.class.isInstance(obj)) {
- return Integer.valueOf((String)obj);
+ return Integer.parseInt((String)obj);
}
throw new Exception("Illegal conversion");
} catch (Exception e) {
@@ -365,7 +365,7 @@ public abstract class HiveBaseResultSet implements ResultSet {
} else if (obj == null) {
return 0;
} else if (String.class.isInstance(obj)) {
- return Long.valueOf((String)obj);
+ return Long.parseLong((String)obj);
}
throw new Exception("Illegal conversion");
} catch (Exception e) {
@@ -511,7 +511,7 @@ public abstract class HiveBaseResultSet implements ResultSet {
} else if (obj == null) {
return 0;
} else if (String.class.isInstance(obj)) {
- return Short.valueOf((String)obj);
+ return Short.parseShort((String)obj);
}
throw new Exception("Illegal conversion");
} catch (Exception e) {
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapZookeeperRegistryImpl.java
----------------------------------------------------------------------
diff --git a/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapZookeeperRegistryImpl.java b/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapZookeeperRegistryImpl.java
index 275cbc2..6af30d4 100644
--- a/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapZookeeperRegistryImpl.java
+++ b/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapZookeeperRegistryImpl.java
@@ -328,13 +328,13 @@ public class LlapZookeeperRegistryImpl implements ServiceRegistry {
RegistryTypeUtils.getAddressField(rpc.addresses.get(0),
AddressTypes.ADDRESS_HOSTNAME_FIELD);
this.rpcPort =
- Integer.valueOf(RegistryTypeUtils.getAddressField(rpc.addresses.get(0),
+ Integer.parseInt(RegistryTypeUtils.getAddressField(rpc.addresses.get(0),
AddressTypes.ADDRESS_PORT_FIELD));
this.mngPort =
- Integer.valueOf(RegistryTypeUtils.getAddressField(mng.addresses.get(0),
+ Integer.parseInt(RegistryTypeUtils.getAddressField(mng.addresses.get(0),
AddressTypes.ADDRESS_PORT_FIELD));
this.shufflePort =
- Integer.valueOf(RegistryTypeUtils.getAddressField(shuffle.addresses.get(0),
+ Integer.parseInt(RegistryTypeUtils.getAddressField(shuffle.addresses.get(0),
AddressTypes.ADDRESS_PORT_FIELD));
this.serviceAddress =
RegistryTypeUtils.getAddressField(services.addresses.get(0), AddressTypes.ADDRESS_URI);
@@ -383,8 +383,8 @@ public class LlapZookeeperRegistryImpl implements ServiceRegistry {
@Override
public Resource getResource() {
- int memory = Integer.valueOf(srv.get(ConfVars.LLAP_DAEMON_MEMORY_PER_INSTANCE_MB.varname));
- int vCores = Integer.valueOf(srv.get(ConfVars.LLAP_DAEMON_NUM_EXECUTORS.varname));
+ int memory = Integer.parseInt(srv.get(ConfVars.LLAP_DAEMON_MEMORY_PER_INSTANCE_MB.varname));
+ int vCores = Integer.parseInt(srv.get(ConfVars.LLAP_DAEMON_NUM_EXECUTORS.varname));
return Resource.newInstance(memory, vCores);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
index 39a1468..9a3e221 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/shufflehandler/ShuffleHandler.java
@@ -658,7 +658,7 @@ public class ShuffleHandler implements AttemptRegistrationListener {
final List<String> keepAliveList = q.get("keepAlive");
boolean keepAliveParam = false;
if (keepAliveList != null && keepAliveList.size() == 1) {
- keepAliveParam = Boolean.valueOf(keepAliveList.get(0));
+ keepAliveParam = Boolean.parseBoolean(keepAliveList.get(0));
if (LOG.isDebugEnabled()) {
LOG.debug("KeepAliveParam : " + keepAliveList
+ " : " + keepAliveParam);
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
index 7c7f7ce..9c30ee7 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
@@ -193,8 +193,8 @@ public class MetaStoreSchemaInfo {
}
for (int i = 0; i < dbVerParts.length; i++) {
- Integer dbVerPart = Integer.valueOf(dbVerParts[i]);
- Integer hiveVerPart = Integer.valueOf(hiveVerParts[i]);
+ int dbVerPart = Integer.parseInt(dbVerParts[i]);
+ int hiveVerPart = Integer.parseInt(hiveVerParts[i]);
if (dbVerPart > hiveVerPart) {
return true;
} else if (dbVerPart < hiveVerPart) {
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
index ba5cb22..434bd9e 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
@@ -206,7 +206,7 @@ public class HBaseImport {
doAll = true;
}
if (cli.hasOption('b')) {
- batchSize = Integer.valueOf(cli.getOptionValue('b'));
+ batchSize = Integer.parseInt(cli.getOptionValue('b'));
}
if (cli.hasOption('d')) {
hasCmd = true;
@@ -217,7 +217,7 @@ public class HBaseImport {
functionsToImport = Arrays.asList(cli.getOptionValues('f'));
}
if (cli.hasOption('p')) {
- parallel = Integer.valueOf(cli.getOptionValue('p'));
+ parallel = Integer.parseInt(cli.getOptionValue('p'));
}
if (cli.hasOption('r')) {
hasCmd = true;
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
index 7ed825f..2860875 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
@@ -1783,7 +1783,7 @@ public class HBaseReadWrite implements MetadataStore {
// Someone deleted it before we got to it, no worries
return;
}
- int refCnt = Integer.valueOf(new String(serializedRefCnt, HBaseUtils.ENCODING));
+ int refCnt = Integer.parseInt(new String(serializedRefCnt, HBaseUtils.ENCODING));
HTableInterface htab = conn.getHBaseTable(SD_TABLE);
if (--refCnt < 1) {
Delete d = new Delete(key);
@@ -1823,7 +1823,7 @@ public class HBaseReadWrite implements MetadataStore {
sdCache.put(new ByteArrayWrapper(key), storageDescriptor);
} else {
// Just increment the reference count
- int refCnt = Integer.valueOf(new String(serializedRefCnt, HBaseUtils.ENCODING)) + 1;
+ int refCnt = Integer.parseInt(new String(serializedRefCnt, HBaseUtils.ENCODING)) + 1;
Put p = new Put(key);
p.add(CATALOG_CF, REF_COUNT_COL, Integer.toString(refCnt).getBytes(HBaseUtils.ENCODING));
htab.put(p);
@@ -2377,7 +2377,7 @@ public class HBaseReadWrite implements MetadataStore {
Result result = iter.next();
byte[] val = result.getValue(CATALOG_CF, MASTER_KEY_COL);
if (val != null) {
- int seqNo = Integer.valueOf(new String(result.getRow(), HBaseUtils.ENCODING));
+ int seqNo = Integer.parseInt(new String(result.getRow(), HBaseUtils.ENCODING));
lines.add("Master key " + seqNo + ": " + HBaseUtils.deserializeMasterKey(val));
} else {
val = result.getValue(CATALOG_CF, DELEGATION_TOKEN_COL);
@@ -2395,14 +2395,14 @@ public class HBaseReadWrite implements MetadataStore {
long peekAtSequence(byte[] sequence) throws IOException {
byte[] serialized = read(SEQUENCES_TABLE, sequence, CATALOG_CF, CATALOG_COL);
- return serialized == null ? 0 : Long.valueOf(new String(serialized, HBaseUtils.ENCODING));
+ return serialized == null ? 0 : Long.parseLong(new String(serialized, HBaseUtils.ENCODING));
}
long getNextSequence(byte[] sequence) throws IOException {
byte[] serialized = read(SEQUENCES_TABLE, sequence, CATALOG_CF, CATALOG_COL);
long val = 0;
if (serialized != null) {
- val = Long.valueOf(new String(serialized, HBaseUtils.ENCODING));
+ val = Long.parseLong(new String(serialized, HBaseUtils.ENCODING));
}
byte[] incrSerialized = new Long(val + 1).toString().getBytes(HBaseUtils.ENCODING);
store(SEQUENCES_TABLE, sequence, CATALOG_CF, CATALOG_COL, incrSerialized);
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
index 0899793..ec6381b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
@@ -539,7 +539,7 @@ public class FileSinkOperator extends TerminalOperator<FileSinkDesc> implements
int numReducers = totalFiles / numFiles;
if (numReducers > 1) {
- int currReducer = Integer.valueOf(Utilities.getTaskIdFromFilename(Utilities
+ int currReducer = Integer.parseInt(Utilities.getTaskIdFromFilename(Utilities
.getTaskId(hconf)));
int reducerIdx = prtner.getPartition(key, null, numReducers);
@@ -623,7 +623,7 @@ public class FileSinkOperator extends TerminalOperator<FileSinkDesc> implements
// Only set up the updater for insert. For update and delete we don't know unitl we see
// the row.
ObjectInspector inspector = bDynParts ? subSetOI : outputObjInspector;
- int acidBucketNum = Integer.valueOf(Utilities.getTaskIdFromFilename(taskId));
+ int acidBucketNum = Integer.parseInt(Utilities.getTaskIdFromFilename(taskId));
fsp.updaters[filesIdx] = HiveFileFormatUtils.getAcidRecordUpdater(jc, conf.getTableInfo(),
acidBucketNum, conf, fsp.outPaths[filesIdx], inspector, reporter, -1);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
index ec0dd81..ca89640 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
@@ -165,7 +165,8 @@ public enum ETypeConverter {
Map<String, String> metadata = parent.getMetadata();
//Current Hive parquet timestamp implementation stores it in UTC, but other components do not do that.
//If this file written by current Hive implementation itself, we need to do the reverse conversion, else skip the conversion.
- boolean skipConversion = Boolean.valueOf(metadata.get(HiveConf.ConfVars.HIVE_PARQUET_TIMESTAMP_SKIP_CONVERSION.varname));
+ boolean skipConversion = Boolean.parseBoolean(
+ metadata.get(HiveConf.ConfVars.HIVE_PARQUET_TIMESTAMP_SKIP_CONVERSION.varname));
Timestamp ts = NanoTimeUtils.getTimestamp(nt, skipConversion);
return new TimestampWritable(ts);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveStructConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveStructConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveStructConverter.java
index e4907d2..a89aa4d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveStructConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveStructConverter.java
@@ -118,7 +118,7 @@ public class HiveStructConverter extends HiveGroupConverter {
private TypeInfo getStructFieldTypeInfo(String field, int fieldIndex) {
String fieldLowerCase = field.toLowerCase();
- if (Boolean.valueOf(getMetadata().get(DataWritableReadSupport.PARQUET_COLUMN_INDEX_ACCESS))
+ if (Boolean.parseBoolean(getMetadata().get(DataWritableReadSupport.PARQUET_COLUMN_INDEX_ACCESS))
&& fieldIndex < hiveFieldNames.size()) {
return hiveFieldTypeInfos.get(fieldIndex);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
index 2f838fc..c021daf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
@@ -77,7 +77,7 @@ public class ParquetRecordWriterWrapper implements RecordWriter<NullWritable, Pa
Configuration conf = ContextUtil.getConfiguration(job);
if (blockSize != null && !blockSize.isEmpty()) {
LOG.debug("get override parquet.block.size property via tblproperties");
- conf.setInt(ParquetOutputFormat.BLOCK_SIZE, Integer.valueOf(blockSize));
+ conf.setInt(ParquetOutputFormat.BLOCK_SIZE, Integer.parseInt(blockSize));
}
String enableDictionaryPage =
@@ -85,7 +85,7 @@ public class ParquetRecordWriterWrapper implements RecordWriter<NullWritable, Pa
if (enableDictionaryPage != null && !enableDictionaryPage.isEmpty()) {
LOG.debug("get override parquet.enable.dictionary property via tblproperties");
conf.setBoolean(ParquetOutputFormat.ENABLE_DICTIONARY,
- Boolean.valueOf(enableDictionaryPage));
+ Boolean.parseBoolean(enableDictionaryPage));
}
String compressionName = tableProperties.getProperty(ParquetOutputFormat.COMPRESSION);
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index c27481f..4c9acce 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -2697,7 +2697,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
String fullF1 = getQualifiedPathWithoutSchemeAndAuthority(srcf, srcFs);
String fullF2 = getQualifiedPathWithoutSchemeAndAuthority(destf, destFs);
- boolean isInTest = Boolean.valueOf(HiveConf.getBoolVar(srcFs.getConf(), ConfVars.HIVE_IN_TEST));
+ boolean isInTest = HiveConf.getBoolVar(srcFs.getConf(), ConfVars.HIVE_IN_TEST);
// In the automation, the data warehouse is the local file system based.
LOG.debug("The source path is " + fullF1 + " and the destination path is " + fullF2);
if (isInTest) {
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SizeBasedBigTableSelectorForAutoSMJ.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SizeBasedBigTableSelectorForAutoSMJ.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SizeBasedBigTableSelectorForAutoSMJ.java
index f8aec84..9670daf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SizeBasedBigTableSelectorForAutoSMJ.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SizeBasedBigTableSelectorForAutoSMJ.java
@@ -57,7 +57,7 @@ public abstract class SizeBasedBigTableSelectorForAutoSMJ {
// If the size is present in the metastore, use it
if (size != null) {
try {
- return Long.valueOf(size);
+ return Long.parseLong(size);
} catch (NumberFormatException e) {
return -1;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveAlgorithmsUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveAlgorithmsUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveAlgorithmsUtil.java
index 6522714..0c13ee7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveAlgorithmsUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/cost/HiveAlgorithmsUtil.java
@@ -48,17 +48,17 @@ public class HiveAlgorithmsUtil {
private final double hdfsRead;
HiveAlgorithmsUtil(HiveConf conf) {
- cpuCost = Double.valueOf(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_CBO_COST_MODEL_CPU));
+ cpuCost = Double.parseDouble(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_CBO_COST_MODEL_CPU));
netCost = cpuCost
- * Double.valueOf(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_CBO_COST_MODEL_NET));
+ * Double.parseDouble(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_CBO_COST_MODEL_NET));
localFSWrite = netCost
- * Double.valueOf(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_CBO_COST_MODEL_LFS_WRITE));
+ * Double.parseDouble(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_CBO_COST_MODEL_LFS_WRITE));
localFSRead = netCost
- * Double.valueOf(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_CBO_COST_MODEL_LFS_READ));
+ * Double.parseDouble(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_CBO_COST_MODEL_LFS_READ));
hdfsWrite = localFSWrite
- * Double.valueOf(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_CBO_COST_MODEL_HDFS_WRITE));
+ * Double.parseDouble(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_CBO_COST_MODEL_HDFS_WRITE));
hdfsRead = localFSRead
- * Double.valueOf(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_CBO_COST_MODEL_HDFS_READ));
+ * Double.parseDouble(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_CBO_COST_MODEL_HDFS_READ));
}
public static RelOptCost computeCardinalityBasedCost(HiveRelNode hr) {
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java
index 16b055b..62f9d14 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java
@@ -92,7 +92,7 @@ public class ASTNode extends CommonTree implements Node,Serializable {
*/
@Override
public String getName() {
- return (Integer.valueOf(super.getToken().getType())).toString();
+ return String.valueOf(super.getToken().getType());
}
/**
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index fe9b8cc..46d2342 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -1702,10 +1702,10 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
List<Order> sortCols = new ArrayList<Order>();
int numBuckets = -1;
if (buckets.getChildCount() == 2) {
- numBuckets = (Integer.valueOf(buckets.getChild(1).getText())).intValue();
+ numBuckets = Integer.parseInt(buckets.getChild(1).getText());
} else {
sortCols = getColumnNamesOrder((ASTNode) buckets.getChild(1));
- numBuckets = (Integer.valueOf(buckets.getChild(2).getText())).intValue();
+ numBuckets = Integer.parseInt(buckets.getChild(2).getText());
}
if (numBuckets <= 0) {
throw new SemanticException(ErrorMsg.INVALID_BUCKET_NUMBER.getMsg());
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
index 5f13277..a9e503d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
@@ -138,7 +138,7 @@ public final class ParseUtils {
}
String lengthStr = node.getChild(0).getText();
- return TypeInfoFactory.getVarcharTypeInfo(Integer.valueOf(lengthStr));
+ return TypeInfoFactory.getVarcharTypeInfo(Integer.parseInt(lengthStr));
}
public static CharTypeInfo getCharTypeInfo(ASTNode node)
@@ -148,7 +148,7 @@ public final class ParseUtils {
}
String lengthStr = node.getChild(0).getText();
- return TypeInfoFactory.getCharTypeInfo(Integer.valueOf(lengthStr));
+ return TypeInfoFactory.getCharTypeInfo(Integer.parseInt(lengthStr));
}
static int getIndex(String[] list, String elem) {
@@ -212,12 +212,12 @@ public final class ParseUtils {
if (node.getChildCount() >= 1) {
String precStr = node.getChild(0).getText();
- precision = Integer.valueOf(precStr);
+ precision = Integer.parseInt(precStr);
}
if (node.getChildCount() == 2) {
String scaleStr = node.getChild(1).getText();
- scale = Integer.valueOf(scaleStr);
+ scale = Integer.parseInt(scaleStr);
}
return TypeInfoFactory.getDecimalTypeInfo(precision, scale);
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSpec.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSpec.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSpec.java
index 5f80528..4668271 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSpec.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSpec.java
@@ -126,7 +126,8 @@ public class ReplicationSpec {
}
this.eventId = keyFetcher.apply(ReplicationSpec.KEY.EVENT_ID.toString());
this.currStateId = keyFetcher.apply(ReplicationSpec.KEY.CURR_STATE_ID.toString());
- this.isNoop = Boolean.valueOf(keyFetcher.apply(ReplicationSpec.KEY.NOOP.toString())).booleanValue();
+ this.isNoop = Boolean.parseBoolean(
+ keyFetcher.apply(ReplicationSpec.KEY.NOOP.toString()));
}
/**
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 987f25d..9b565c5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -737,11 +737,11 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
int seedNum = conf.getIntVar(ConfVars.HIVESAMPLERANDOMNUM);
sample = new SplitSample(percent, seedNum);
} else if (type.getType() == HiveParser.TOK_ROWCOUNT) {
- sample = new SplitSample(Integer.valueOf(value));
+ sample = new SplitSample(Integer.parseInt(value));
} else {
assert type.getType() == HiveParser.TOK_LENGTH;
assertCombineInputFormat(numerator, "Total Length");
- long length = Integer.valueOf(value.substring(0, value.length() - 1));
+ long length = Integer.parseInt(value.substring(0, value.length() - 1));
char last = value.charAt(value.length() - 1);
if (last == 'k' || last == 'K') {
length <<= 10;
@@ -11384,12 +11384,10 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
case HiveParser.TOK_ALTERTABLE_BUCKETS:
bucketCols = getColumnNames((ASTNode) child.getChild(0));
if (child.getChildCount() == 2) {
- numBuckets = (Integer.valueOf(child.getChild(1).getText()))
- .intValue();
+ numBuckets = Integer.parseInt(child.getChild(1).getText());
} else {
sortCols = getColumnNamesOrder((ASTNode) child.getChild(1));
- numBuckets = (Integer.valueOf(child.getChild(2).getText()))
- .intValue();
+ numBuckets = Integer.parseInt(child.getChild(2).getText());
}
break;
case HiveParser.TOK_TABLEROWFORMAT:
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java
index c74b4c0..ac71565 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TableSample.java
@@ -68,8 +68,8 @@ public class TableSample {
* The list of expressions in the ON part of the TABLESAMPLE clause
*/
public TableSample(String num, String den, ArrayList<ASTNode> exprs) {
- numerator = Integer.valueOf(num).intValue();
- denominator = Integer.valueOf(den).intValue();
+ numerator = Integer.parseInt(num);
+ denominator = Integer.parseInt(den);
this.exprs = exprs;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
index aa33103..8a85574 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
@@ -440,7 +440,7 @@ public class GenSparkUtils {
if (fso != null) {
String bucketCount = fso.getConf().getTableInfo().getProperties().getProperty(
hive_metastoreConstants.BUCKET_COUNT);
- if (bucketCount != null && Integer.valueOf(bucketCount) > 1) {
+ if (bucketCount != null && Integer.parseInt(bucketCount) > 1) {
edgeProperty.setMRShuffle();
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
index ae8c77f..2992568 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
@@ -567,7 +567,7 @@ public final class PlanUtils {
List<FieldSchema> schemas = new ArrayList<FieldSchema>(cols.size());
for (int i = 0; i < cols.size(); i++) {
String name = cols.get(i).getInternalName();
- if (name.equals(Integer.valueOf(i).toString())) {
+ if (name.equals(String.valueOf(i))) {
name = fieldPrefix + name;
}
schemas.add(MetaStoreUtils.getFieldSchemaFromTypeInfo(name, cols.get(i)
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java
index e2aaa70..07df15a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java
@@ -94,7 +94,7 @@ public class FSStatsAggregator implements StatsAggregator {
if (null == statVal) { // partition was found, but was empty.
continue;
}
- counter += Long.valueOf(statVal);
+ counter += Long.parseLong(statVal);
}
LOG.info("Read stats for : " + partID + "\t" + statType + "\t" + counter);
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java
index e5d89e8..3a49b30 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java
@@ -83,7 +83,8 @@ public class FSStatsPublisher implements StatsPublisher {
if (null != statMap) {
// In case of LB, we might get called repeatedly.
for (Entry<String, String> e : statMap.entrySet()) {
- cpy.put(e.getKey(), String.valueOf(Long.valueOf(e.getValue()) + Long.valueOf(cpy.get(e.getKey()))));
+ cpy.put(e.getKey(),
+ String.valueOf(Long.parseLong(e.getValue()) + Long.parseLong(cpy.get(e.getKey()))));
}
}
statsMap.put(partKV, cpy);
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
index f1f1db2..931be90 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
@@ -484,7 +484,7 @@ public class CompactorMR {
LOG.warn("Found a non-bucket file that we thought matched the bucket pattern! " +
file.toString() + " Matcher=" + matcher.toString());
}
- int bucketNum = Integer.valueOf(matcher.group());
+ int bucketNum = Integer.parseInt(matcher.group());
BucketTracker bt = splitToBucketMap.get(bucketNum);
if (bt == null) {
bt = new BucketTracker();
@@ -628,15 +628,15 @@ public class CompactorMR {
StringableMap(String s) {
String[] parts = s.split(":", 2);
// read that many chars
- int numElements = Integer.valueOf(parts[0]);
+ int numElements = Integer.parseInt(parts[0]);
s = parts[1];
for (int i = 0; i < numElements; i++) {
parts = s.split(":", 2);
- int len = Integer.valueOf(parts[0]);
+ int len = Integer.parseInt(parts[0]);
String key = null;
if (len > 0) key = parts[1].substring(0, len);
parts = parts[1].substring(len).split(":", 2);
- len = Integer.valueOf(parts[0]);
+ len = Integer.parseInt(parts[0]);
String value = null;
if (len > 0) value = parts[1].substring(0, len);
s = parts[1].substring(len);
@@ -683,11 +683,11 @@ public class CompactorMR {
StringableList(String s) {
String[] parts = s.split(":", 2);
// read that many chars
- int numElements = Integer.valueOf(parts[0]);
+ int numElements = Integer.parseInt(parts[0]);
s = parts[1];
for (int i = 0; i < numElements; i++) {
parts = s.split(":", 2);
- int len = Integer.valueOf(parts[0]);
+ int len = Integer.parseInt(parts[0]);
String val = parts[1].substring(0, len);
s = parts[1].substring(len);
add(new Path(val));
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
index e932f11..9cbc114 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
@@ -169,7 +169,7 @@ public class UDFToDouble extends UDF {
return null;
}
try {
- doubleWritable.set(Double.valueOf(i.toString()));
+ doubleWritable.set(Double.parseDouble(i.toString()));
return doubleWritable;
} catch (NumberFormatException e) {
// MySQL returns 0 if the string is not a well-formed double value.
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
index 119eaca..c612307 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
@@ -170,7 +170,7 @@ public class UDFToFloat extends UDF {
return null;
}
try {
- floatWritable.set(Float.valueOf(i.toString()));
+ floatWritable.set(Float.parseFloat(i.toString()));
return floatWritable;
} catch (NumberFormatException e) {
// MySQL returns 0 if the string is not a well-formed numeric value.
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java
index 04f72a6..eb0f9e2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java
@@ -75,7 +75,7 @@ public class GenericUDFBetween extends GenericUDF {
public String getDisplayString(String[] children) {
StringBuilder sb = new StringBuilder();
sb.append(children[1]);
- if (Boolean.valueOf(children[0])) {
+ if (Boolean.parseBoolean(children[0])) {
sb.append(" NOT");
}
sb.append(" BETWEEN ");
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/ql/src/test/results/clientnegative/dyn_part_max.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/dyn_part_max.q.out b/ql/src/test/results/clientnegative/dyn_part_max.q.out
index dd5e8e9..4386720 100644
--- a/ql/src/test/results/clientnegative/dyn_part_max.q.out
+++ b/ql/src/test/results/clientnegative/dyn_part_max.q.out
@@ -26,4 +26,4 @@ PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@max_parts
Failed with exception Number of dynamic partitions created is 49, which is more than 10. To solve this try to set hive.exec.max.dynamic.partitions to at least 49.
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MoveTask
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MoveTask. Number of dynamic partitions created is 49, which is more than 10. To solve this try to set hive.exec.max.dynamic.partitions to at least 49.
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
index a0a790c..551a9da 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
@@ -77,7 +77,7 @@ public class MetadataTypedColumnsetSerDe extends AbstractSerDe {
if (altValue != null && altValue.length() > 0) {
try {
byte[] b = new byte[1];
- b[0] = Byte.valueOf(altValue).byteValue();
+ b[0] = Byte.parseByte(altValue);
return new String(b);
} catch (NumberFormatException e) {
return altValue;
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java
index 7fca311..b275af0 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/thrift_grammar.java
@@ -2052,7 +2052,7 @@ public class thrift_grammar/* @bgen(jjtree) */implements
jjtree.closeNodeScope(jjtn000, true);
jjtc000 = false;
if (fidnum.length() > 0) {
- int fidInt = Integer.valueOf(fidnum);
+ int fidInt = Integer.parseInt(fidnum);
jjtn000.fieldid = fidInt;
} else {
jjtn000.fieldid = field_val--;
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java
index 46dfaa7..7232d0b 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java
@@ -104,7 +104,8 @@ public class LazySerDeParameters implements LazyObjectInspectorParameters {
needsEscape[b & 0xFF] = true; // Converts the negative byte into positive index
}
- boolean isEscapeCRLF = Boolean.valueOf(tbl.getProperty(serdeConstants.SERIALIZATION_ESCAPE_CRLF));
+ boolean isEscapeCRLF =
+ Boolean.parseBoolean(tbl.getProperty(serdeConstants.SERIALIZATION_ESCAPE_CRLF));
if (isEscapeCRLF) {
needsEscape['\r'] = true;
needsEscape['\n'] = true;
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
index ee39196..6d7369b 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
@@ -490,7 +490,7 @@ public final class LazyUtils {
public static byte getByte(String altValue, byte defaultVal) {
if (altValue != null && altValue.length() > 0) {
try {
- return Byte.valueOf(altValue).byteValue();
+ return Byte.parseByte(altValue);
} catch (NumberFormatException e) {
return (byte) altValue.charAt(0);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java
index 6144052..ad1f872 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java
@@ -423,7 +423,7 @@ public class TCTLSeparatedProtocol extends TProtocol implements
if (altValue != null && altValue.length() > 0) {
try {
byte[] b = new byte[1];
- b[0] = Byte.valueOf(altValue).byteValue();
+ b[0] = Byte.parseByte(altValue);
return new String(b);
} catch (NumberFormatException e) {
return altValue;
@@ -451,11 +451,10 @@ public class TCTLSeparatedProtocol extends TProtocol implements
rowSeparator);
mapSeparator = getByteValue(tbl.getProperty(serdeConstants.MAPKEY_DELIM),
mapSeparator);
- returnNulls = Boolean.valueOf(
- tbl.getProperty(ReturnNullsKey, String.valueOf(returnNulls)))
- .booleanValue();
- bufferSize = Integer.valueOf(
- tbl.getProperty(BufferSizeKey, String.valueOf(bufferSize))).intValue();
+ returnNulls = Boolean.parseBoolean(
+ tbl.getProperty(ReturnNullsKey, String.valueOf(returnNulls)));
+ bufferSize = Integer.parseInt(
+ tbl.getProperty(BufferSizeKey, String.valueOf(bufferSize)));
nullString = tbl.getProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, "\\N");
quote = tbl.getProperty(serdeConstants.QUOTE_CHAR, null);
@@ -766,8 +765,7 @@ public class TCTLSeparatedProtocol extends TProtocol implements
public boolean readBool() throws TException {
String val = readString();
lastPrimitiveWasNullFlag = val == null;
- return val == null || val.isEmpty() ? false : Boolean.valueOf(val)
- .booleanValue();
+ return val == null || val.isEmpty() ? false : Boolean.parseBoolean(val);
}
@Override
@@ -775,7 +773,7 @@ public class TCTLSeparatedProtocol extends TProtocol implements
String val = readString();
lastPrimitiveWasNullFlag = val == null;
try {
- return val == null || val.isEmpty() ? 0 : Byte.valueOf(val).byteValue();
+ return val == null || val.isEmpty() ? 0 : Byte.parseByte(val);
} catch (NumberFormatException e) {
lastPrimitiveWasNullFlag = true;
return 0;
@@ -787,7 +785,7 @@ public class TCTLSeparatedProtocol extends TProtocol implements
String val = readString();
lastPrimitiveWasNullFlag = val == null;
try {
- return val == null || val.isEmpty() ? 0 : Short.valueOf(val).shortValue();
+ return val == null || val.isEmpty() ? 0 : Short.parseShort(val);
} catch (NumberFormatException e) {
lastPrimitiveWasNullFlag = true;
return 0;
@@ -799,7 +797,7 @@ public class TCTLSeparatedProtocol extends TProtocol implements
String val = readString();
lastPrimitiveWasNullFlag = val == null;
try {
- return val == null || val.isEmpty() ? 0 : Integer.valueOf(val).intValue();
+ return val == null || val.isEmpty() ? 0 : Integer.parseInt(val);
} catch (NumberFormatException e) {
lastPrimitiveWasNullFlag = true;
return 0;
@@ -811,7 +809,7 @@ public class TCTLSeparatedProtocol extends TProtocol implements
String val = readString();
lastPrimitiveWasNullFlag = val == null;
try {
- return val == null || val.isEmpty() ? 0 : Long.valueOf(val).longValue();
+ return val == null || val.isEmpty() ? 0 : Long.parseLong(val);
} catch (NumberFormatException e) {
lastPrimitiveWasNullFlag = true;
return 0;
@@ -823,8 +821,7 @@ public class TCTLSeparatedProtocol extends TProtocol implements
String val = readString();
lastPrimitiveWasNullFlag = val == null;
try {
- return val == null || val.isEmpty() ? 0 : Double.valueOf(val)
- .doubleValue();
+ return val == null || val.isEmpty() ? 0 : Double.parseDouble(val);
} catch (NumberFormatException e) {
lastPrimitiveWasNullFlag = true;
return 0;
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
index d3bb4e4..16daecf 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
@@ -430,7 +430,7 @@ public final class TypeInfoUtils {
int length = 1;
if (params.length == 1) {
- length = Integer.valueOf(params[0]);
+ length = Integer.parseInt(params[0]);
if (typeEntry.primitiveCategory == PrimitiveCategory.VARCHAR) {
BaseCharUtils.validateVarcharParameter(length);
return TypeInfoFactory.getVarcharTypeInfo(length);
@@ -451,8 +451,8 @@ public final class TypeInfoUtils {
// precision/scale. In this case, the default (10,0) is assumed. Thus, do nothing here.
} else if (params.length == 2) {
// New metadata always have two parameters.
- precision = Integer.valueOf(params[0]);
- scale = Integer.valueOf(params[1]);
+ precision = Integer.parseInt(params[0]);
+ scale = Integer.parseInt(params[1]);
HiveDecimalUtils.validateParameter(precision, scale);
} else if (params.length > 2) {
throw new IllegalArgumentException("Type decimal only takes two parameter, but " +
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/service/src/java/org/apache/hive/service/cli/HiveSQLException.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/HiveSQLException.java b/service/src/java/org/apache/hive/service/cli/HiveSQLException.java
index 42aaae0..a81fe0f 100644
--- a/service/src/java/org/apache/hive/service/cli/HiveSQLException.java
+++ b/service/src/java/org/apache/hive/service/cli/HiveSQLException.java
@@ -210,8 +210,8 @@ public class HiveSQLException extends SQLException {
String exceptionMessage = detail.substring(i1 + 1, i2);
Throwable ex = newInstance(exceptionClass, exceptionMessage);
- Integer length = Integer.valueOf(detail.substring(i2 + 1, i3));
- Integer unique = Integer.valueOf(detail.substring(i3 + 1));
+ int length = Integer.parseInt(detail.substring(i2 + 1, i3));
+ int unique = Integer.parseInt(detail.substring(i3 + 1));
int i = 0;
StackTraceElement[] trace = new StackTraceElement[length];
@@ -226,7 +226,7 @@ public class HiveSQLException extends SQLException {
if (fileName.isEmpty()) {
fileName = null;
}
- int lineNumber = Integer.valueOf(detail.substring(j3 + 1));
+ int lineNumber = Integer.parseInt(detail.substring(j3 + 1));
trace[i] = new StackTraceElement(className, methodName, fileName, lineNumber);
}
int common = trace.length - i;
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
index 0a2a761..be9833d 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
@@ -227,7 +227,7 @@ public abstract class ThriftCLIService extends AbstractService implements TCLISe
TimeUnit.SECONDS);
portString = System.getenv("HIVE_SERVER2_THRIFT_HTTP_PORT");
if (portString != null) {
- portNum = Integer.valueOf(portString);
+ portNum = Integer.parseInt(portString);
} else {
portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT);
}
@@ -238,7 +238,7 @@ public abstract class ThriftCLIService extends AbstractService implements TCLISe
hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_WORKER_KEEPALIVE_TIME, TimeUnit.SECONDS);
portString = System.getenv("HIVE_SERVER2_THRIFT_PORT");
if (portString != null) {
- portNum = Integer.valueOf(portString);
+ portNum = Integer.parseInt(portString);
} else {
portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/547b37dc/testutils/src/java/org/apache/hive/testutils/junit/runners/ConcurrentTestRunner.java
----------------------------------------------------------------------
diff --git a/testutils/src/java/org/apache/hive/testutils/junit/runners/ConcurrentTestRunner.java b/testutils/src/java/org/apache/hive/testutils/junit/runners/ConcurrentTestRunner.java
index 3f6cd6a..ed47481 100644
--- a/testutils/src/java/org/apache/hive/testutils/junit/runners/ConcurrentTestRunner.java
+++ b/testutils/src/java/org/apache/hive/testutils/junit/runners/ConcurrentTestRunner.java
@@ -40,7 +40,7 @@ public class ConcurrentTestRunner extends BlockJUnit4ClassRunner {
String numThreadsProp = System.getProperty("test.concurrency.num.threads");
if (numThreadsProp != null) {
- numThreads = Integer.valueOf(numThreadsProp);
+ numThreads = Integer.parseInt(numThreadsProp);
}
setScheduler(new ConcurrentScheduler(newFixedThreadPool(numThreads, new ConcurrentTestRunnerThreadFactory())));
@@ -59,4 +59,4 @@ public class ConcurrentTestRunner extends BlockJUnit4ClassRunner {
return new Thread(runnable, threadName);
}
}
-}
\ No newline at end of file
+}