You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by sz...@apache.org on 2015/01/08 03:00:12 UTC

svn commit: r1650201 [3/3] - in /hive/branches/spark: itests/hive-unit/src/test/java/org/apache/hive/jdbc/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/se...

Modified: hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java (original)
+++ hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java Thu Jan  8 02:00:11 2015
@@ -74,7 +74,7 @@ import com.google.common.util.concurrent
 @InterfaceAudience.Private
 public class RemoteDriver {
 
-  private final static Logger LOG = LoggerFactory.getLogger(RemoteDriver.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RemoteDriver.class);
 
   private final Map<String, JobWrapper<?>> activeJobs;
   private final Object shutdownLock;
@@ -108,8 +108,8 @@ public class RemoteDriver {
         String[] val = getArg(args, idx).split("[=]", 2);
         conf.set(val[0], val[1]);
       } else {
-        throw new IllegalArgumentException("Invalid command line: " +
-            Joiner.on(" ").join(args));
+        throw new IllegalArgumentException("Invalid command line: "
+          + Joiner.on(" ").join(args));
       }
     }
 
@@ -158,7 +158,7 @@ public class RemoteDriver {
     }
 
     synchronized (jobQueue) {
-      for (Iterator<JobWrapper<?>> it = jobQueue.iterator(); it.hasNext(); ) {
+      for (Iterator<JobWrapper<?>> it = jobQueue.iterator(); it.hasNext();) {
         it.next().submit();
       }
     }
@@ -216,8 +216,8 @@ public class RemoteDriver {
   private String getArg(String[] args, int keyIdx) {
     int valIdx = keyIdx + 1;
     if (args.length <= valIdx) {
-      throw new IllegalArgumentException("Invalid command line: " +
-          Joiner.on(" ").join(args));
+      throw new IllegalArgumentException("Invalid command line: "
+        + Joiner.on(" ").join(args));
     }
     return args[valIdx];
   }
@@ -382,7 +382,7 @@ public class RemoteDriver {
     public void onJobEnd(SparkListenerJobEnd jobEnd) {
       synchronized (stageToJobId) {
         for (Iterator<Map.Entry<Integer, Integer>> it = stageToJobId.entrySet().iterator();
-            it.hasNext(); ) {
+            it.hasNext();) {
           Map.Entry<Integer, Integer> e = it.next();
           if (e.getValue() == jobEnd.jobId()) {
             it.remove();
@@ -398,8 +398,8 @@ public class RemoteDriver {
 
     @Override
     public void onTaskEnd(SparkListenerTaskEnd taskEnd) {
-      if (taskEnd.reason() instanceof org.apache.spark.Success$ &&
-          !taskEnd.taskInfo().speculative()) {
+      if (taskEnd.reason() instanceof org.apache.spark.Success$
+          && !taskEnd.taskInfo().speculative()) {
         Metrics metrics = new Metrics(taskEnd.taskMetrics());
         Integer jobId;
         synchronized (stageToJobId) {

Modified: hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClient.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClient.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClient.java (original)
+++ hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClient.java Thu Jan  8 02:00:11 2015
@@ -17,8 +17,8 @@
 
 package org.apache.hive.spark.client;
 
-import java.net.URL;
 import java.io.Serializable;
+import java.net.URL;
 import java.util.concurrent.Future;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
@@ -69,7 +69,7 @@ public interface SparkClient extends Ser
   Future<?> addFile(URL url);
 
   /**
-   * Get the count of executors
+   * Get the count of executors.
    */
   Future<Integer> getExecutorCount();
 }

Modified: hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientFactory.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientFactory.java (original)
+++ hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientFactory.java Thu Jan  8 02:00:11 2015
@@ -20,13 +20,12 @@ package org.apache.hive.spark.client;
 import java.io.IOException;
 import java.util.Map;
 
-import com.google.common.base.Preconditions;
-import com.google.common.base.Throwables;
-import com.google.common.collect.Maps;
-import org.apache.spark.SparkException;
-
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hive.spark.client.rpc.RpcServer;
+import org.apache.spark.SparkException;
+
+import com.google.common.base.Preconditions;
+import com.google.common.base.Throwables;
 
 /**
  * Factory for SparkClient instances.

Modified: hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java (original)
+++ hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java Thu Jan  8 02:00:11 2015
@@ -17,12 +17,16 @@
 
 package org.apache.hive.spark.client;
 
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.util.concurrent.GenericFutureListener;
+import io.netty.util.concurrent.Promise;
+
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileOutputStream;
+import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
-import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.Serializable;
 import java.io.Writer;
@@ -32,33 +36,29 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.UUID;
 import java.util.concurrent.Future;
-import java.util.concurrent.TimeoutException;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import org.apache.hive.spark.client.rpc.Rpc;
+import org.apache.hive.spark.client.rpc.RpcServer;
+import org.apache.spark.SparkContext;
+import org.apache.spark.SparkException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.google.common.base.Charsets;
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Throwables;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import io.netty.channel.ChannelHandlerContext;
-import io.netty.util.concurrent.GenericFutureListener;
-import io.netty.util.concurrent.Promise;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.hive.spark.client.rpc.Rpc;
-import org.apache.hive.spark.client.rpc.RpcServer;
-import org.apache.spark.SparkContext;
-import org.apache.spark.SparkException;
 
 class SparkClientImpl implements SparkClient {
   private static final long serialVersionUID = 1L;
 
   private static final Logger LOG = LoggerFactory.getLogger(SparkClientImpl.class);
 
-  private static final String DEFAULT_CONNECTION_TIMEOUT = "60"; // In seconds
   private static final long DEFAULT_SHUTDOWN_TIMEOUT = 10000; // In milliseconds
 
   private static final String DRIVER_OPTS_KEY = "spark.driver.extraJavaOptions";
@@ -206,7 +206,7 @@ class SparkClientImpl implements SparkCl
         }
       }
       String driverJavaOpts = Joiner.on(" ").skipNulls().join(
-          "-Dhive.spark.log.dir=" + sparkLogDir,conf.get(DRIVER_OPTS_KEY));
+          "-Dhive.spark.log.dir=" + sparkLogDir, conf.get(DRIVER_OPTS_KEY));
       String executorJavaOpts = Joiner.on(" ").skipNulls().join(
           "-Dhive.spark.log.dir=" + sparkLogDir, conf.get(EXECUTOR_OPTS_KEY));
 

Modified: hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/metrics/InputMetrics.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/metrics/InputMetrics.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/metrics/InputMetrics.java (original)
+++ hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/metrics/InputMetrics.java Thu Jan  8 02:00:11 2015
@@ -36,6 +36,7 @@ public class InputMetrics implements Ser
     // For Serialization only.
     this(null, 0L);
   }
+
   public InputMetrics(
       DataReadMethod readMethod,
       long bytesRead) {

Modified: hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/metrics/Metrics.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/metrics/Metrics.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/metrics/Metrics.java (original)
+++ hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/metrics/Metrics.java Thu Jan  8 02:00:11 2015
@@ -98,15 +98,15 @@ public class Metrics implements Serializ
       optionalShuffleWriteMetrics(metrics));
   }
 
-  private static final InputMetrics optionalInputMetric(TaskMetrics metrics) {
+  private static InputMetrics optionalInputMetric(TaskMetrics metrics) {
     return metrics.inputMetrics().isDefined() ? new InputMetrics(metrics) : null;
   }
 
-  private static final ShuffleReadMetrics optionalShuffleReadMetric(TaskMetrics metrics) {
+  private static ShuffleReadMetrics optionalShuffleReadMetric(TaskMetrics metrics) {
     return metrics.shuffleReadMetrics().isDefined() ? new ShuffleReadMetrics(metrics) : null;
   }
 
-  private static final ShuffleWriteMetrics optionalShuffleWriteMetrics(TaskMetrics metrics) {
+  private static ShuffleWriteMetrics optionalShuffleWriteMetrics(TaskMetrics metrics) {
     return metrics.shuffleWriteMetrics().isDefined() ? new ShuffleWriteMetrics(metrics) : null;
   }
 

Modified: hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/Rpc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/Rpc.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/Rpc.java (original)
+++ hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/Rpc.java Thu Jan  8 02:00:11 2015
@@ -17,19 +17,6 @@
 
 package org.apache.hive.spark.client.rpc;
 
-import java.io.Closeable;
-import java.util.Collection;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
-import java.util.concurrent.atomic.AtomicReference;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Throwables;
-import com.google.common.collect.Lists;
 import io.netty.bootstrap.Bootstrap;
 import io.netty.channel.Channel;
 import io.netty.channel.ChannelFuture;
@@ -37,23 +24,36 @@ import io.netty.channel.ChannelFutureLis
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.channel.ChannelInboundHandlerAdapter;
 import io.netty.channel.ChannelOption;
-import io.netty.channel.EventLoopGroup;
 import io.netty.channel.embedded.EmbeddedChannel;
 import io.netty.channel.nio.NioEventLoopGroup;
 import io.netty.channel.socket.SocketChannel;
 import io.netty.channel.socket.nio.NioSocketChannel;
-import io.netty.handler.logging.LoggingHandler;
 import io.netty.handler.logging.LogLevel;
+import io.netty.handler.logging.LoggingHandler;
 import io.netty.util.concurrent.EventExecutorGroup;
 import io.netty.util.concurrent.Future;
 import io.netty.util.concurrent.GenericFutureListener;
 import io.netty.util.concurrent.ImmediateEventExecutor;
 import io.netty.util.concurrent.Promise;
 import io.netty.util.concurrent.ScheduledFuture;
+
+import java.io.Closeable;
+import java.util.Collection;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.atomic.AtomicReference;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hadoop.hive.common.classification.InterfaceAudience;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Throwables;
+import com.google.common.collect.Lists;
 
 /**
  * Encapsulates the RPC functionality. Provides higher-level methods to talk to the remote
@@ -215,6 +215,7 @@ public class Rpc implements Closeable {
   private final EventExecutorGroup egroup;
   private final Object channelLock;
 
+  @SuppressWarnings({ "rawtypes", "unchecked" })
   private Rpc(Channel channel, RpcDispatcher dispatcher, EventExecutorGroup egroup) {
     Preconditions.checkArgument(channel != null);
     Preconditions.checkArgument(dispatcher != null);
@@ -331,7 +332,7 @@ public class Rpc implements Closeable {
     }
   }
 
-  public static interface Listener {
+  public interface Listener {
 
     void rpcClosed(Rpc rpc);
 

Modified: hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java (original)
+++ hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcConfiguration.java Thu Jan  8 02:00:11 2015
@@ -38,37 +38,37 @@ public final class RpcConfiguration {
   private static final Logger LOG = LoggerFactory.getLogger(RpcConfiguration.class);
 
   /** Connection timeout for RPC clients. */
-  public final static String CONNECT_TIMEOUT_MS_KEY = "hive.spark.client.connect.timeout.ms";
-  private final static int CONNECT_TIMEOUT_MS_DEFAULT = 1000;
+  public static final String CONNECT_TIMEOUT_MS_KEY = "hive.spark.client.connect.timeout.ms";
+  private static final int CONNECT_TIMEOUT_MS_DEFAULT = 1000;
 
   /**
-   * How long the server should wait for clients to connect back after they're registered. Also
-   * used to time out the client waiting for the server to reply to its "hello" message.
+   * How long the server should wait for clients to connect back after they're
+   * registered. Also used to time out the client waiting for the server to
+   * reply to its "hello" message.
    */
-  public final static String SERVER_CONNECT_TIMEOUT_MS_KEY =
-      "hive.spark.client.server.connect.timeout.ms";
-  private final static long SERVER_CONNECT_TIMEOUT_MS_DEFAULT = 10000L;
+  public static final String SERVER_CONNECT_TIMEOUT_MS_KEY = "hive.spark.client.server.connect.timeout.ms";
+  private static final long SERVER_CONNECT_TIMEOUT_MS_DEFAULT = 10000L;
 
   /**
-   * Number of bits of randomness in the generated client secrets. Rounded down to the nearest
-   * multiple of 8.
+   * Number of bits of randomness in the generated client secrets. Rounded down
+   * to the nearest multiple of 8.
    */
-  public final static String SECRET_RANDOM_BITS_KEY = "hive.spark.client.secret.bits";
-  private final static int SECRET_RANDOM_BITS_DEFAULT = 256;
+  public static final String SECRET_RANDOM_BITS_KEY = "hive.spark.client.secret.bits";
+  private static final int SECRET_RANDOM_BITS_DEFAULT = 256;
 
   /** Hostname or IP address to advertise for the server. */
-  public final static String SERVER_LISTEN_ADDRESS_KEY = "hive.spark.client.server.address";
+  public static final String SERVER_LISTEN_ADDRESS_KEY = "hive.spark.client.server.address";
 
   /** Maximum number of threads to use for the RPC event loop. */
-  public final static String RPC_MAX_THREADS_KEY = "hive.spark.client.rpc.threads";
-  public final static int RPC_MAX_THREADS_DEFAULT = 8;
+  public static final String RPC_MAX_THREADS_KEY = "hive.spark.client.rpc.threads";
+  public static final int RPC_MAX_THREADS_DEFAULT = 8;
 
   /** Maximum message size. Default = 10MB. */
-  public final static String RPC_MAX_MESSAGE_SIZE_KEY = "hive.spark.client.rpc.max.size";
-  public final static int RPC_MAX_MESSAGE_SIZE_DEFAULT = 50 * 1024 * 1024;
+  public static final String RPC_MAX_MESSAGE_SIZE_KEY = "hive.spark.client.rpc.max.size";
+  public static final int RPC_MAX_MESSAGE_SIZE_DEFAULT = 50 * 1024 * 1024;
 
   /** Channel logging level. */
-  public final static String RPC_CHANNEL_LOG_LEVEL_KEY = "hive.spark.client.channel.log.level";
+  public static final String RPC_CHANNEL_LOG_LEVEL_KEY = "hive.spark.client.channel.log.level";
 
   private final Map<String, String> config;
 
@@ -104,7 +104,8 @@ public final class RpcConfiguration {
 
     InetAddress address = InetAddress.getLocalHost();
     if (address.isLoopbackAddress()) {
-      // Address resolves to something like 127.0.1.1, which happens on Debian; try to find
+      // Address resolves to something like 127.0.1.1, which happens on Debian;
+      // try to find
       // a better address using the local network interfaces
       Enumeration<NetworkInterface> ifaces = NetworkInterface.getNetworkInterfaces();
       while (ifaces.hasMoreElements()) {
@@ -112,17 +113,13 @@ public final class RpcConfiguration {
         Enumeration<InetAddress> addrs = ni.getInetAddresses();
         while (addrs.hasMoreElements()) {
           InetAddress addr = addrs.nextElement();
-          if (!addr.isLinkLocalAddress() &&
-              !addr.isLoopbackAddress() &&
-              addr instanceof Inet4Address) {
+          if (!addr.isLinkLocalAddress() && !addr.isLoopbackAddress()
+              && addr instanceof Inet4Address) {
             // We've found an address that looks reasonable!
-            LOG.warn("Your hostname, {}, resolves to a loopback address; using {} " +
-              " instead (on interface {})",
-              address.getHostName(),
-              addr.getHostAddress(),
-              ni.getName());
-            LOG.warn("Set '{}' if you need to bind to another address.",
-                SERVER_LISTEN_ADDRESS_KEY);
+            LOG.warn("Your hostname, {}, resolves to a loopback address; using {} "
+                + " instead (on interface {})", address.getHostName(), addr.getHostAddress(),
+                ni.getName());
+            LOG.warn("Set '{}' if you need to bind to another address.", SERVER_LISTEN_ADDRESS_KEY);
             return addr.getHostAddress();
           }
         }

Modified: hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcDispatcher.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcDispatcher.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcDispatcher.java (original)
+++ hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcDispatcher.java Thu Jan  8 02:00:11 2015
@@ -94,7 +94,7 @@ public abstract class RpcDispatcher exte
   }
 
   private OutstandingRpc findRpc(long id) {
-    for (Iterator<OutstandingRpc> it = rpcs.iterator(); it.hasNext(); ) {
+    for (Iterator<OutstandingRpc> it = rpcs.iterator(); it.hasNext();) {
       OutstandingRpc rpc = it.next();
       if (rpc.id == id) {
         it.remove();

Modified: hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcServer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcServer.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcServer.java (original)
+++ hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcServer.java Thu Jan  8 02:00:11 2015
@@ -17,19 +17,6 @@
 
 package org.apache.hive.spark.client.rpc;
 
-import java.io.Closeable;
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.security.SecureRandom;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.concurrent.ConcurrentLinkedQueue;
-import java.util.concurrent.TimeoutException;
-import java.util.concurrent.TimeUnit;
-
-import com.google.common.base.Optional;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import io.netty.bootstrap.ServerBootstrap;
 import io.netty.channel.Channel;
 import io.netty.channel.ChannelHandlerContext;
@@ -44,10 +31,23 @@ import io.netty.util.concurrent.GenericF
 import io.netty.util.concurrent.Promise;
 import io.netty.util.concurrent.ScheduledFuture;
 
+import java.io.Closeable;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.security.SecureRandom;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.concurrent.ConcurrentLinkedQueue;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+
 /**
  * An RPC server. The server matches remote clients based on a secret that is generated on
  * the server - the secret needs to be given to the client through some other mechanism for
@@ -173,7 +173,7 @@ public class RpcServer implements Closea
   public void close() {
     try {
       channel.close();
-      for (Iterator<ClientInfo> clients = pendingClients.iterator(); clients.hasNext(); ) {
+      for (Iterator<ClientInfo> clients = pendingClients.iterator(); clients.hasNext();) {
         ClientInfo client = clients.next();
         clients.remove();
         client.promise.cancel(true);
@@ -191,7 +191,7 @@ public class RpcServer implements Closea
     protected void handle(ChannelHandlerContext ctx, Rpc.Hello msg) {
       cancelTask.cancel(true);
 
-      for (Iterator<ClientInfo> clients = pendingClients.iterator(); clients.hasNext(); ) {
+      for (Iterator<ClientInfo> clients = pendingClients.iterator(); clients.hasNext();) {
         ClientInfo client = clients.next();
         if (client.secret.equals(msg.secret)) {
           rpc.replaceDispatcher(client.dispatcher);

Modified: hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounters.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounters.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounters.java (original)
+++ hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/counter/SparkCounters.java Thu Jan  8 02:00:11 2015
@@ -23,8 +23,6 @@ import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.spark.api.java.JavaSparkContext;
 
 /**
@@ -124,7 +122,7 @@ public class SparkCounters implements Se
     StringBuilder sb = new StringBuilder();
     Map<String, SparkCounterGroup> groups = getSparkCounterGroups();
     if (groups != null) {
-      for(Map.Entry<String, SparkCounterGroup> groupEntry : groups.entrySet()) {
+      for (Map.Entry<String, SparkCounterGroup> groupEntry : groups.entrySet()) {
         String groupName = groupEntry.getKey();
         SparkCounterGroup group = groupEntry.getValue();
         sb.append(groupName).append("\n");

Modified: hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/TestMetricsCollection.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/TestMetricsCollection.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/TestMetricsCollection.java (original)
+++ hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/TestMetricsCollection.java Thu Jan  8 02:00:11 2015
@@ -17,14 +17,20 @@
 
 package org.apache.hive.spark.client;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
 import java.util.Arrays;
 
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Sets;
+import org.apache.hive.spark.client.metrics.DataReadMethod;
+import org.apache.hive.spark.client.metrics.InputMetrics;
+import org.apache.hive.spark.client.metrics.Metrics;
+import org.apache.hive.spark.client.metrics.ShuffleReadMetrics;
+import org.apache.hive.spark.client.metrics.ShuffleWriteMetrics;
 import org.junit.Test;
-import static org.junit.Assert.*;
 
-import org.apache.hive.spark.client.metrics.*;
+import com.google.common.collect.ImmutableSet;
 
 public class TestMetricsCollection {
 

Modified: hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java (original)
+++ hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java Thu Jan  8 02:00:11 2015
@@ -268,15 +268,6 @@ public class TestSparkClient {
 
   }
 
-  private static class ErrorJob implements Job<String> {
-
-    @Override
-    public String call(JobContext jc) {
-      throw new IllegalStateException("This job does not work.");
-    }
-
-  }
-
   private static class JarJob implements Job<String>, Function<Integer, String> {
 
     @Override
@@ -299,10 +290,6 @@ public class TestSparkClient {
 
     private final String fileName;
 
-    FileJob() {
-      this(null);
-    }
-
     FileJob(String fileName) {
       this.fileName = fileName;
     }
@@ -346,7 +333,7 @@ public class TestSparkClient {
 
   }
 
-  private static abstract class TestFunction {
+  private abstract static class TestFunction {
     abstract void call(SparkClient client) throws Exception;
     void config(Map<String, String> conf) { }
   }

Modified: hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestKryoMessageCodec.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestKryoMessageCodec.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestKryoMessageCodec.java (original)
+++ hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestKryoMessageCodec.java Thu Jan  8 02:00:11 2015
@@ -17,16 +17,21 @@
 
 package org.apache.hive.spark.client.rpc;
 
-import java.util.List;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
-import com.google.common.collect.Lists;
 import io.netty.buffer.ByteBuf;
 import io.netty.buffer.UnpooledByteBufAllocator;
 import io.netty.channel.embedded.EmbeddedChannel;
 import io.netty.handler.logging.LoggingHandler;
 
+import java.util.List;
+
 import org.junit.Test;
-import static org.junit.Assert.*;
+
+import com.google.common.collect.Lists;
 
 public class TestKryoMessageCodec {
 

Modified: hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java?rev=1650201&r1=1650200&r2=1650201&view=diff
==============================================================================
--- hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java (original)
+++ hive/branches/spark/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java Thu Jan  8 02:00:11 2015
@@ -38,7 +38,10 @@ import org.slf4j.LoggerFactory;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
-import static org.junit.Assert.*;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 public class TestRpc {
 
@@ -255,6 +258,5 @@ public class TestRpc {
       // No op. Shouldn't actually be called, if it is, the test will fail.
     }
 
-  };
-
+  }
 }