You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ja...@apache.org on 2014/10/21 20:26:31 UTC
[2/2] git commit: PHOENIX-1370 Allow query timeout to differ from RPC
timeout
PHOENIX-1370 Allow query timeout to differ from RPC timeout
Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/29a3d44c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/29a3d44c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/29a3d44c
Branch: refs/heads/4.0
Commit: 29a3d44c05700fae4477ec4df65e99083702e92d
Parents: 3e89500
Author: James Taylor <jt...@salesforce.com>
Authored: Tue Oct 21 11:30:39 2014 -0700
Committer: James Taylor <jt...@salesforce.com>
Committed: Tue Oct 21 11:30:39 2014 -0700
----------------------------------------------------------------------
.../java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java | 7 +------
.../java/org/apache/phoenix/query/QueryServicesOptions.java | 7 -------
2 files changed, 1 insertion(+), 13 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/phoenix/blob/29a3d44c/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
index 880cc13..e29405f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
@@ -17,7 +17,6 @@
*/
package org.apache.phoenix.mapreduce;
-import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
@@ -49,21 +48,16 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
-import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult;
-import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
import org.apache.phoenix.job.JobManager;
import org.apache.phoenix.query.QueryConstants;
-import org.apache.phoenix.schema.MetaDataClient;
import org.apache.phoenix.schema.PTable;
-import org.apache.phoenix.schema.TableNotFoundException;
import org.apache.phoenix.util.CSVCommonsLoader;
import org.apache.phoenix.util.ColumnInfo;
import org.apache.phoenix.util.PhoenixRuntime;
@@ -438,6 +432,7 @@ public class CsvBulkLoadTool extends Configured implements Tool {
boolean success = job.waitForCompletion(true);
if (!success) {
LOG.error("Import job failed, check JobTracker for details");
+ htable.close();
return false;
}
http://git-wip-us.apache.org/repos/asf/phoenix/blob/29a3d44c/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java b/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
index e890cd7..7ee225b 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
@@ -59,7 +59,6 @@ import static org.apache.phoenix.query.QueryServices.USE_INDEXES_ATTRIB;
import java.util.Map.Entry;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
import org.apache.phoenix.schema.SaltingUtil;
import org.apache.phoenix.trace.util.Tracing;
@@ -163,12 +162,6 @@ public class QueryServicesOptions {
}
public ReadOnlyProps getProps(ReadOnlyProps defaultProps) {
- // Ensure that HBase RPC time out value is at least as large as our thread time out for query.
- int threadTimeOutMS = config.getInt(THREAD_TIMEOUT_MS_ATTRIB, DEFAULT_THREAD_TIMEOUT_MS);
- int hbaseRPCTimeOut = config.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY, HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
- if (threadTimeOutMS > hbaseRPCTimeOut) {
- config.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, threadTimeOutMS);
- }
return new ReadOnlyProps(defaultProps, config.iterator());
}