You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2014/11/10 06:30:05 UTC
svn commit: r1637790 - in /hive/branches/spark: ./
ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/
ql/src/java/org/apache/hadoop/hive/ql/exec/spark/counter/
ql/src/test/org/apache/hadoop/hive/ql/exec/spark/ serde/
shims/0.20/src/main/java/org/a...
Author: xuefu
Date: Mon Nov 10 05:30:05 2014
New Revision: 1637790
URL: http://svn.apache.org/r1637790
Log:
HIVE-8758: Fix hadoop-1 build [Spark Branch] (Jimmy via Xuefu)
Modified:
hive/branches/spark/pom.xml
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/counter/SparkCounterGroup.java
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/counter/SparkCounters.java
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveKVResultCache.java
hive/branches/spark/serde/pom.xml
hive/branches/spark/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
hive/branches/spark/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
hive/branches/spark/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
Modified: hive/branches/spark/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/pom.xml?rev=1637790&r1=1637789&r2=1637790&view=diff
==============================================================================
--- hive/branches/spark/pom.xml (original)
+++ hive/branches/spark/pom.xml Mon Nov 10 05:30:05 2014
@@ -167,6 +167,7 @@
<jpam.version>1.1</jpam.version>
<felix.version>2.4.0</felix.version>
<curator.version>2.6.0</curator.version>
+ <jsr305.version>3.0.0</jsr305.version>
</properties>
<repositories>
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java?rev=1637790&r1=1637789&r2=1637790&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java Mon Nov 10 05:30:05 2014
@@ -118,7 +118,7 @@ public class MapJoinTableContainerSerDe
for (FileStatus fileStatus: fs.listStatus(folder)) {
Path filePath = fileStatus.getPath();
- if (!fileStatus.isFile()) {
+ if (fileStatus.isDir()) {
throw new HiveException("Error, not a file: " + filePath);
}
InputStream is = null;
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/counter/SparkCounterGroup.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/counter/SparkCounterGroup.java?rev=1637790&r1=1637789&r2=1637790&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/counter/SparkCounterGroup.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/counter/SparkCounterGroup.java Mon Nov 10 05:30:05 2014
@@ -21,14 +21,14 @@ import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
-import org.apache.hadoop.mapreduce.util.ResourceBundles;
+import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.spark.api.java.JavaSparkContext;
/**
* We use group to fold all the same kind of counters.
*/
public class SparkCounterGroup implements Serializable {
-
+ private static final long serialVersionUID = 1L;
private String groupName;
private String groupDisplayName;
private Map<String, SparkCounter> sparkCounters;
@@ -47,7 +47,7 @@ public class SparkCounterGroup implement
}
public void createCounter(String name, long initValue) {
- String displayName = ResourceBundles.getCounterGroupName(name, name);
+ String displayName = ShimLoader.getHadoopShims().getCounterGroupName(groupName, groupName);
SparkCounter counter = new SparkCounter(name, displayName, groupName, initValue, javaSparkContext);
sparkCounters.put(name, counter);
}
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/counter/SparkCounters.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/counter/SparkCounters.java?rev=1637790&r1=1637789&r2=1637790&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/counter/SparkCounters.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/counter/SparkCounters.java Mon Nov 10 05:30:05 2014
@@ -26,13 +26,12 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
-import org.apache.hadoop.hive.ql.exec.FilterOperator;
import org.apache.hadoop.hive.ql.exec.JoinOperator;
import org.apache.hadoop.hive.ql.exec.MapOperator;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.ScriptOperator;
-import org.apache.hadoop.mapreduce.util.ResourceBundles;
+import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.spark.api.java.JavaSparkContext;
/**
@@ -48,6 +47,8 @@ import org.apache.spark.api.java.JavaSpa
* 3. Hive could only get Counter value at driver side.
*/
public class SparkCounters implements Serializable {
+ private static final long serialVersionUID = 1L;
+
private static final Log LOG = LogFactory.getLog(SparkCounters.class);
private Map<String, SparkCounterGroup> sparkCounterGroups;
@@ -113,7 +114,8 @@ public class SparkCounters implements Se
private SparkCounterGroup getGroup(String groupName) {
SparkCounterGroup group = sparkCounterGroups.get(groupName);
if (group == null) {
- String groupDisplayName = ResourceBundles.getCounterGroupName(groupName, groupName);
+ String groupDisplayName =
+ ShimLoader.getHadoopShims().getCounterGroupName(groupName, groupName);
group = new SparkCounterGroup(groupName, groupDisplayName, javaSparkContext);
sparkCounterGroups.put(groupName, group);
}
Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveKVResultCache.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveKVResultCache.java?rev=1637790&r1=1637789&r2=1637790&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveKVResultCache.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/TestHiveKVResultCache.java Mon Nov 10 05:30:05 2014
@@ -121,8 +121,8 @@ public class TestHiveKVResultCache {
HashSet<Long> primaryRowKeys = new HashSet<Long>();
HashSet<Long> separateRowKeys = new HashSet<Long>();
for (Tuple2<HiveKey, BytesWritable> item: output) {
- String key = new String(item._1.copyBytes());
- String value = new String(item._2.copyBytes());
+ String key = new String(item._1.getBytes());
+ String value = new String(item._2.getBytes());
String prefix = key.substring(0, key.indexOf('_'));
Long id = Long.valueOf(key.substring(5 + prefix.length()));
if (prefix.equals(prefix1)) {
Modified: hive/branches/spark/serde/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/pom.xml?rev=1637790&r1=1637789&r2=1637790&view=diff
==============================================================================
--- hive/branches/spark/serde/pom.xml (original)
+++ hive/branches/spark/serde/pom.xml Mon Nov 10 05:30:05 2014
@@ -46,6 +46,11 @@
</dependency>
<!-- inter-project -->
<dependency>
+ <groupId>com.google.code.findbugs</groupId>
+ <artifactId>jsr305</artifactId>
+ <version>${jsr305.version}</version>
+ </dependency>
+ <dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>${commons-codec.version}</version>
Modified: hive/branches/spark/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1637790&r1=1637789&r2=1637790&view=diff
==============================================================================
--- hive/branches/spark/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/branches/spark/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Mon Nov 10 05:30:05 2014
@@ -909,6 +909,11 @@ public class Hadoop20Shims implements Ha
throw new IOException("Merging of credentials not supported in this version of hadoop");
}
+ @Override
+ public String getCounterGroupName(String group, String defaultValue) {
+ return defaultValue;
+ }
+
protected void run(FsShell shell, String[] command) throws Exception {
LOG.debug(ArrayUtils.toString(command));
shell.run(command);
Modified: hive/branches/spark/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1637790&r1=1637789&r2=1637790&view=diff
==============================================================================
--- hive/branches/spark/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/branches/spark/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Mon Nov 10 05:30:05 2014
@@ -533,6 +533,11 @@ public class Hadoop20SShims extends Hado
}
@Override
+ public String getCounterGroupName(String group, String defaultValue) {
+ return defaultValue;
+ }
+
+ @Override
public String getPassword(Configuration conf, String name) {
// No password API, just retrieve value from conf
return conf.get(name);
Modified: hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1637790&r1=1637789&r2=1637790&view=diff
==============================================================================
--- hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Mon Nov 10 05:30:05 2014
@@ -71,6 +71,7 @@ import org.apache.hadoop.mapreduce.TaskI
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.mapreduce.task.JobContextImpl;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+import org.apache.hadoop.mapreduce.util.ResourceBundles;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.UserGroupInformation;
@@ -776,6 +777,11 @@ public class Hadoop23Shims extends Hadoo
dest.getCredentials().mergeAll(src.getCredentials());
}
+ @Override
+ public String getCounterGroupName(String group, String defaultValue) {
+ return ResourceBundles.getCounterGroupName(group, defaultValue);
+ }
+
protected static final Method accessMethod;
protected static final Method getPasswordMethod;
Modified: hive/branches/spark/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1637790&r1=1637789&r2=1637790&view=diff
==============================================================================
--- hive/branches/spark/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/branches/spark/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java Mon Nov 10 05:30:05 2014
@@ -715,6 +715,14 @@ public interface HadoopShims {
public void mergeCredentials(JobConf dest, JobConf src) throws IOException;
/**
+ * Get the counter group display name
+ * @param group the group name to lookup
+ * @param defaultValue of the group
+ * @return the group display name
+ */
+ public String getCounterGroupName(String group, String defaultValue);
+
+ /**
* Check if the configured UGI has access to the path for the given file system action.
* Method will return successfully if action is permitted. AccessControlExceptoin will
* be thrown if user does not have access to perform the action. Other exceptions may