You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by vi...@apache.org on 2018/06/26 05:17:39 UTC

[3/8] hive git commit: Temp work related to docker execution phase. code may not compile

Temp work related to docker execution phase. code may not compile


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ea53020e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ea53020e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ea53020e

Branch: refs/heads/HIVE-19429
Commit: ea53020e84c51f35810b0977e160c78b1376f07e
Parents: 4192812
Author: Vihang Karajgaonkar <vi...@cloudera.com>
Authored: Mon Jun 4 11:31:48 2018 -0700
Committer: Vihang Karajgaonkar <vi...@cloudera.com>
Committed: Mon Jun 4 11:34:21 2018 -0700

----------------------------------------------------------------------
 .../hive/ptest/execution/ContainerClient.java   |  11 ++
 .../hive/ptest/execution/ExecutionPhase.java    | 101 ++++++-----
 .../hive/ptest/execution/HostExecutor.java      |  30 ++--
 .../containers/AbortContainerException.java     |  23 +++
 .../execution/containers/DockerClient.java      |  38 ++++
 .../containers/DockerExecutionPhase.java        |  73 ++++++++
 .../containers/DockerHostExectutor.java         | 177 +++++++++++++++++++
 .../execution/containers/DockerPrepPhase.java   |   4 +
 .../src/main/resources/dockerfile-template.vm   |   8 +
 9 files changed, 402 insertions(+), 63 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/ea53020e/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ContainerClient.java
----------------------------------------------------------------------
diff --git a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ContainerClient.java b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ContainerClient.java
index a9a8ead..d4f887c 100644
--- a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ContainerClient.java
+++ b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ContainerClient.java
@@ -20,12 +20,23 @@
 package org.apache.hive.ptest.execution;
 
 import org.apache.hive.ptest.execution.ContainerClientFactory.ContainerClientContext;
+<<<<<<< HEAD
+=======
+import org.apache.hive.ptest.execution.conf.TestBatch;
+>>>>>>> a1fe94a... Temp work related to docker execution phase. code may not compile
 
 import java.io.IOException;
 import java.util.concurrent.TimeUnit;
 
 public interface ContainerClient {
+<<<<<<< HEAD
   void defineImage(String dir, String label) throws IOException;
   String getBuildCommand(String dir, long toWait, TimeUnit unit)
       throws Exception;
+=======
+  void defineImage(String dir) throws Exception;
+  String getBuildCommand(String dir, long toWait, TimeUnit unit)
+      throws Exception;
+  String getRunContainerCommand(String containerName, TestBatch testBatch);
+>>>>>>> a1fe94a... Temp work related to docker execution phase. code may not compile
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ea53020e/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ExecutionPhase.java
----------------------------------------------------------------------
diff --git a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ExecutionPhase.java b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ExecutionPhase.java
index 7ab98f6..1f7d2fd 100644
--- a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ExecutionPhase.java
+++ b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ExecutionPhase.java
@@ -22,10 +22,8 @@ import java.io.File;
 import java.io.IOException;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.LinkedBlockingQueue;
@@ -52,11 +50,11 @@ public class ExecutionPhase extends Phase {
   private final HostExecutorBuilder hostExecutorBuilder;
   private final File succeededLogDir;
   private final File failedLogDir;
-  private final BlockingQueue<TestBatch> parallelWorkQueue;
-  private final BlockingQueue<TestBatch> isolatedWorkQueue;
+  protected final BlockingQueue<TestBatch> parallelWorkQueue;
+  protected final BlockingQueue<TestBatch> isolatedWorkQueue;
   private final Set<String> executedTests;
   private final Set<String> failedTests;
-  private final Supplier<List<TestBatch>> testBatchSupplier;
+  protected final Supplier<List<TestBatch>> testBatchSupplier;
   private final Set<TestBatch> failedTestResults;
 
   public ExecutionPhase(List<HostExecutor> hostExecutors, ExecutionContext executionContext,
@@ -82,15 +80,7 @@ public class ExecutionPhase extends Phase {
   @Override
   public void execute() throws Throwable {
     long start = System.currentTimeMillis();
-    List<TestBatch> testBatches = Lists.newArrayList();
-    for(TestBatch batch : testBatchSupplier.get()) {
-      testBatches.add(batch);
-      if(batch.isParallel()) {
-        parallelWorkQueue.add(batch);
-      } else {
-        isolatedWorkQueue.add(batch);
-      }
-    }
+    List<TestBatch> testBatches = getTestBatches();
     logger.info("ParallelWorkQueueSize={}, IsolatedWorkQueueSize={}", parallelWorkQueue.size(),
         isolatedWorkQueue.size());
     if (logger.isDebugEnabled()) {
@@ -113,39 +103,7 @@ public class ExecutionPhase extends Phase {
         }
         Futures.allAsList(results).get();
       } while(!(parallelWorkQueue.isEmpty() && isolatedWorkQueue.isEmpty()));
-      for(TestBatch batch : testBatches) {
-        File batchLogDir;
-        if(failedTestResults.contains(batch)) {
-          batchLogDir = new File(failedLogDir, batch.getName());
-        } else {
-          batchLogDir = new File(succeededLogDir, batch.getName());
-        }
-        JUnitReportParser parser = new JUnitReportParser(logger, batchLogDir);
-        executedTests.addAll(parser.getAllExecutedTests());
-        for (String failedTest : parser.getAllFailedTests()) {
-          failedTests.add(failedTest + " (batchId=" + batch.getBatchId() + ")");
-        }
-
-        // if the TEST*.xml was not generated or was corrupt, let someone know
-        if (parser.getTestClassesWithReportAvailable().size() < batch.getTestClasses().size()) {
-          Set<String> expTestClasses = new HashSet<>(batch.getTestClasses());
-          expTestClasses.removeAll(parser.getTestClassesWithReportAvailable());
-          for (String testClass : expTestClasses) {
-            StringBuilder messageBuilder = new StringBuilder();
-            messageBuilder.append(testClass).append(" - did not produce a TEST-*.xml file (likely timed out)")
-                .append(" (batchId=").append(batch.getBatchId()).append(")");
-            if (batch instanceof QFileTestBatch) {
-              Collection<String> tests = ((QFileTestBatch)batch).getTests();
-              if (tests.size() != 0) {
-                messageBuilder.append("\n\t[");
-                messageBuilder.append(Joiner.on(",").join(tests));
-                messageBuilder.append("]");
-              }
-            }
-            failedTests.add(messageBuilder.toString());
-          }
-        }
-      }
+      checkResults(testBatches);
     } finally {
       long elapsed = System.currentTimeMillis() - start;
       addAggregatePerfMetrics();
@@ -154,6 +112,55 @@ public class ExecutionPhase extends Phase {
     }
   }
 
+  protected void checkResults(List<TestBatch> testBatches) throws Exception {
+    for(TestBatch batch : testBatches) {
+      File batchLogDir;
+      if(failedTestResults.contains(batch)) {
+        batchLogDir = new File(failedLogDir, batch.getName());
+      } else {
+        batchLogDir = new File(succeededLogDir, batch.getName());
+      }
+      JUnitReportParser parser = new JUnitReportParser(logger, batchLogDir);
+      executedTests.addAll(parser.getAllExecutedTests());
+      for (String failedTest : parser.getAllFailedTests()) {
+        failedTests.add(failedTest + " (batchId=" + batch.getBatchId() + ")");
+      }
+
+      // if the TEST*.xml was not generated or was corrupt, let someone know
+      if (parser.getTestClassesWithReportAvailable().size() < batch.getTestClasses().size()) {
+        Set<String> expTestClasses = new HashSet<>(batch.getTestClasses());
+        expTestClasses.removeAll(parser.getTestClassesWithReportAvailable());
+        for (String testClass : expTestClasses) {
+          StringBuilder messageBuilder = new StringBuilder();
+          messageBuilder.append(testClass).append(" - did not produce a TEST-*.xml file (likely timed out)")
+              .append(" (batchId=").append(batch.getBatchId()).append(")");
+          if (batch instanceof QFileTestBatch) {
+            Collection<String> tests = ((QFileTestBatch)batch).getTests();
+            if (tests.size() != 0) {
+              messageBuilder.append("\n\t[");
+              messageBuilder.append(Joiner.on(",").join(tests));
+              messageBuilder.append("]");
+            }
+          }
+          failedTests.add(messageBuilder.toString());
+        }
+      }
+    }
+  }
+
+  protected List<TestBatch> getTestBatches() {
+    List<TestBatch> testBatches = Lists.newArrayList();
+    for(TestBatch batch : testBatchSupplier.get()) {
+      testBatches.add(batch);
+      if(batch.isParallel()) {
+        parallelWorkQueue.add(batch);
+      } else {
+        isolatedWorkQueue.add(batch);
+      }
+    }
+    return testBatches;
+  }
+
   public static final String TOTAL_RSYNC_TIME = "TotalRsyncElapsedTime";
   private void addAggregatePerfMetrics() {
     long totalRsycTime = 0L;

http://git-wip-us.apache.org/repos/asf/hive/blob/ea53020e/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutor.java
----------------------------------------------------------------------
diff --git a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutor.java b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutor.java
index 24f0bf8..74be1e0 100644
--- a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutor.java
+++ b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutor.java
@@ -56,29 +56,27 @@ import com.google.common.util.concurrent.ListenableFuture;
 import com.google.common.util.concurrent.ListeningExecutorService;
 
 public class HostExecutor {
-  private final Host mHost;
+  protected final Host mHost;
   private final List<Drone> mDrones;
-  private final ListeningExecutorService mExecutor;
+  protected final ListeningExecutorService mExecutor;
   private final SSHCommandExecutor mSSHCommandExecutor;
   private final RSyncCommandExecutor mRSyncCommandExecutor;
-  private final ImmutableMap<String, String> mTemplateDefaults;
-  private final Logger mLogger;
+  protected final ImmutableMap<String, String> mTemplateDefaults;
+  protected final Logger mLogger;
   private final File mLocalScratchDirectory;
   private final File mSuccessfulTestLogDir;
   private final File mFailedTestLogDir;
-  private final long mNumPollSeconds;
+  protected final long mNumPollSeconds;
   private final boolean fetchLogsForSuccessfulTests;
-  private volatile boolean mShutdown;
-  private int numParallelBatchesProcessed = 0;
-  private int numIsolatedBatchesProcessed = 0;
+  protected volatile boolean mShutdown;
+  protected int numParallelBatchesProcessed = 0;
+  protected int numIsolatedBatchesProcessed = 0;
   private AtomicLong totalElapsedTimeInRsync = new AtomicLong(0L);
   
-  HostExecutor(Host host, String privateKey, ListeningExecutorService executor,
-      SSHCommandExecutor sshCommandExecutor,
-      RSyncCommandExecutor rsyncCommandExecutor,
-      ImmutableMap<String, String> templateDefaults, File scratchDir,
-      File succeededLogDir, File failedLogDir, long numPollSeconds,
-      boolean fetchLogsForSuccessfulTests, Logger logger) {
+  protected HostExecutor(Host host, String privateKey, ListeningExecutorService executor,
+      SSHCommandExecutor sshCommandExecutor, RSyncCommandExecutor rsyncCommandExecutor,
+      ImmutableMap<String, String> templateDefaults, File scratchDir, File succeededLogDir,
+      File failedLogDir, long numPollSeconds, boolean fetchLogsForSuccessfulTests, Logger logger) {
     List<Drone> drones = Lists.newArrayList();
     String[] localDirs = host.getLocalDirectories();
     for (int index = 0; index < host.getThreads(); index++) {
@@ -103,7 +101,7 @@ public class HostExecutor {
   /**
    * @return failed tests
    */
-  ListenableFuture<Void> submitTests(final BlockingQueue<TestBatch> parallelWorkQueue,
+  protected ListenableFuture<Void> submitTests(final BlockingQueue<TestBatch> parallelWorkQueue,
       final BlockingQueue<TestBatch> isolatedWorkQueue, final Set<TestBatch> failedTestResults) {
     return mExecutor.submit(new Callable<Void>() {
       @Override
@@ -152,7 +150,7 @@ public class HostExecutor {
    * leaving this host with zero functioning drones. If all drones
    * are removed the host will be replaced before the next run.
    */
-  private void executeTests(final BlockingQueue<TestBatch> parallelWorkQueue,
+  protected void executeTests(final BlockingQueue<TestBatch> parallelWorkQueue,
       final BlockingQueue<TestBatch> isolatedWorkQueue, final Set<TestBatch> failedTestResults)
           throws Exception {
     if(mShutdown) {

http://git-wip-us.apache.org/repos/asf/hive/blob/ea53020e/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/AbortContainerException.java
----------------------------------------------------------------------
diff --git a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/AbortContainerException.java b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/AbortContainerException.java
new file mode 100644
index 0000000..efc0b66
--- /dev/null
+++ b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/AbortContainerException.java
@@ -0,0 +1,23 @@
+/*
+ *
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hive.ptest.execution.containers;
+
+public class AbortContainerException extends Exception {
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ea53020e/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerClient.java
----------------------------------------------------------------------
diff --git a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerClient.java b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerClient.java
index d2428a9..767459c 100644
--- a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerClient.java
+++ b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerClient.java
@@ -23,6 +23,10 @@ import org.apache.hive.ptest.api.server.TestLogger;
 import org.apache.hive.ptest.execution.ContainerClient;
 import org.apache.hive.ptest.execution.ContainerClientFactory.ContainerClientContext;
 import org.apache.hive.ptest.execution.Templates;
+<<<<<<< HEAD
+=======
+import org.apache.hive.ptest.execution.conf.TestBatch;
+>>>>>>> a1fe94a... Temp work related to docker execution phase. code may not compile
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -47,9 +51,17 @@ public class DockerClient implements ContainerClient {
   }
 
   @Override
+<<<<<<< HEAD
   public void defineImage(String dir, String label) throws IOException {
     if (label == null)
       label = UUID.randomUUID().toString();
+=======
+  public void defineImage(String dir) throws Exception {
+    final String label = context.getTemplateDefaults().get("buildTag");
+    if (label == null) {
+      throw new Exception("buildTag not found");
+    }
+>>>>>>> a1fe94a... Temp work related to docker execution phase. code may not compile
     File dockerfile = new File(dir, "Dockerfile");
     logger.info("Writing {} from template", dockerfile);
     Map<String, String> templateDefaults = context.getTemplateDefaults();
@@ -68,8 +80,12 @@ public class DockerClient implements ContainerClient {
     logger.info("Building image");
     String dockerBuildCommand =
         new StringBuilder("docker build")
+<<<<<<< HEAD
             //TODO do we need --tag?
             //.append(" --tag " + imageName())
+=======
+            .append(" --tag " + imageName())
+>>>>>>> a1fe94a... Temp work related to docker execution phase. code may not compile
             .append(" --build-arg ")
             .append(" workingDir=$workingDir")
             .append(" --build-arg ")
@@ -87,8 +103,30 @@ public class DockerClient implements ContainerClient {
     return dockerBuildCommand;
   }
 
+<<<<<<< HEAD
   private String imageName() {
     //TODO add a proper image name using patch number
     return "Ptest-dummy-build";
+=======
+  @Override
+  public String getRunContainerCommand(String containerName, TestBatch batch) {
+    return new StringBuilder("docker run")
+        .append(" --name " + containerName)
+        .append(" " + imageName())
+        .append(" /bin/bash")
+        .append(" -c")
+        .append("( cd " + batch.getTestModuleRelativeDir() + "; ")
+        .append("/usr/bin/mvn")
+        .append(" -Dsurefire.timeout=40m")
+        .append(" -B test")
+        .append(" " + batch.getTestArguments())
+        .append(" 1>$workingDir/logs"  + File.separatorChar + "maven.txt")
+        .append(" 2>&1")
+        .toString();
+  }
+
+  private String imageName() {
+    return context.getTemplateDefaults().get("buildTag");
+>>>>>>> a1fe94a... Temp work related to docker execution phase. code may not compile
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ea53020e/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerExecutionPhase.java
----------------------------------------------------------------------
diff --git a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerExecutionPhase.java b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerExecutionPhase.java
new file mode 100644
index 0000000..4774933
--- /dev/null
+++ b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerExecutionPhase.java
@@ -0,0 +1,73 @@
+/*
+ *
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hive.ptest.execution.containers;
+
+import com.google.common.base.Supplier;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+import org.apache.hive.ptest.execution.ContainerClient;
+import org.apache.hive.ptest.execution.ContainerClientFactory;
+import org.apache.hive.ptest.execution.ContainerClientFactory.ContainerClientContext;
+import org.apache.hive.ptest.execution.ContainerClientFactory.ContainerType;
+import org.apache.hive.ptest.execution.ExecutionPhase;
+import org.apache.hive.ptest.execution.HostExecutor;
+import org.apache.hive.ptest.execution.HostExecutorBuilder;
+import org.apache.hive.ptest.execution.LocalCommandFactory;
+import org.apache.hive.ptest.execution.conf.TestBatch;
+import org.apache.hive.ptest.execution.context.ExecutionContext;
+import org.apache.hive.ptest.execution.ssh.RemoteCommandResult;
+import org.slf4j.Logger;
+
+import java.io.File;
+import java.util.List;
+import java.util.Set;
+
+public class DockerExecutionPhase extends ExecutionPhase {
+  private final ContainerClient dockerClient;
+  public DockerExecutionPhase(List<HostExecutor> hostExecutors, ExecutionContext executionContext,
+      HostExecutorBuilder hostExecutorBuilder, LocalCommandFactory localCommandFactory,
+      ImmutableMap<String, String> templateDefaults, File succeededLogDir, File failedLogDir,
+      Supplier<List<TestBatch>> testBatchSupplier, Set<String> executedTests,
+      Set<String> failedTests, Logger logger) throws Exception {
+    super(hostExecutors, executionContext, hostExecutorBuilder, localCommandFactory,
+        templateDefaults, succeededLogDir, failedLogDir, testBatchSupplier, executedTests,
+        failedTests, logger);
+    ContainerClientContext context = new ContainerClientContext(logger, templateDefaults);
+    dockerClient = ContainerClientFactory.get(ContainerType.DOCKER, context);
+  }
+
+  @Override
+  public List<TestBatch> getTestBatches() {
+    List<TestBatch> testBatches = Lists.newArrayList();
+    //Docker containers should be able to run all the batches in parallel
+    for (TestBatch batch : testBatchSupplier.get()) {
+      testBatches.add(batch);
+      parallelWorkQueue.add(batch);
+    }
+    return testBatches;
+  }
+
+  @Override
+  protected List<RemoteCommandResult> initalizeHosts()
+      throws Exception {
+    //TODO kill docker containers in case they are running here
+    return Lists.newArrayList();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ea53020e/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerHostExectutor.java
----------------------------------------------------------------------
diff --git a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerHostExectutor.java b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerHostExectutor.java
new file mode 100644
index 0000000..133d54a
--- /dev/null
+++ b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerHostExectutor.java
@@ -0,0 +1,177 @@
+/*
+ *
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hive.ptest.execution.containers;
+
+import com.google.common.base.Preconditions;
+import com.google.common.base.Stopwatch;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.util.concurrent.Futures;
+import com.google.common.util.concurrent.ListenableFuture;
+import com.google.common.util.concurrent.ListeningExecutorService;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hive.ptest.execution.AbortDroneException;
+import org.apache.hive.ptest.execution.Constants;
+import org.apache.hive.ptest.execution.ContainerClient;
+import org.apache.hive.ptest.execution.ContainerClientFactory;
+import org.apache.hive.ptest.execution.ContainerClientFactory.ContainerClientContext;
+import org.apache.hive.ptest.execution.ContainerClientFactory.ContainerType;
+import org.apache.hive.ptest.execution.Dirs;
+import org.apache.hive.ptest.execution.HostExecutor;
+import org.apache.hive.ptest.execution.Templates;
+import org.apache.hive.ptest.execution.conf.Host;
+import org.apache.hive.ptest.execution.conf.TestBatch;
+import org.apache.hive.ptest.execution.ssh.RSyncCommandExecutor;
+import org.apache.hive.ptest.execution.ssh.RemoteCommandResult;
+import org.apache.hive.ptest.execution.ssh.SSHCommand;
+import org.apache.hive.ptest.execution.ssh.SSHCommandExecutor;
+import org.slf4j.Logger;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicLong;
+
+public class DockerHostExectutor extends HostExecutor {
+  private final ContainerClientContext containerClientContext;
+  private final ContainerClient dockerClient;
+  private final int numParallelContainersPerHost;
+  private String containerName;
+
+  DockerHostExectutor(Host host, String privateKey, ListeningExecutorService executor,
+      SSHCommandExecutor sshCommandExecutor, RSyncCommandExecutor rsyncCommandExecutor,
+      ImmutableMap<String, String> templateDefaults, File scratchDir, File succeededLogDir,
+      File failedLogDir, long numPollSeconds, boolean fetchLogsForSuccessfulTests, Logger logger)
+      throws Exception {
+    super(host, privateKey, executor, sshCommandExecutor, rsyncCommandExecutor, templateDefaults,
+        scratchDir, succeededLogDir, failedLogDir, numPollSeconds, fetchLogsForSuccessfulTests,
+        logger);
+    containerClientContext = new ContainerClientContext(logger, templateDefaults);
+    dockerClient = ContainerClientFactory.get(ContainerType.DOCKER, containerClientContext);
+    //TODO get this value from executionContext
+    numParallelContainersPerHost = 3;
+  }
+
+  @Override
+  protected void executeTests(final BlockingQueue<TestBatch> parallelWorkQueue,
+      final BlockingQueue<TestBatch> isolatedWorkQueue, final Set<TestBatch> failedTestResults)
+      throws Exception {
+    if(mShutdown) {
+      mLogger.warn("Shutting down host " + mHost.getName());
+      return;
+    }
+    mLogger.info("Starting parallel execution on " + mHost.getName() + " using dockers");
+    List<ListenableFuture<Void>> containerResults = Lists.newArrayList();
+    for(int containerId = 0; containerId < numParallelContainersPerHost; containerId++) {
+      containerResults.add(mExecutor.submit(new Callable<Void>() {
+        @Override
+        public Void call() throws Exception {
+          TestBatch batch = null;
+          Stopwatch sw = Stopwatch.createUnstarted();
+          try {
+            do {
+              batch = parallelWorkQueue.poll(mNumPollSeconds, TimeUnit.SECONDS);
+              if(mShutdown) {
+                mLogger.warn("Shutting down host " + mHost.getName());
+                return null;
+              }
+              if(batch != null) {
+                numParallelBatchesProcessed++;
+                sw.reset().start();
+                try {
+                  if (!executeTestBatch(batch, failedTestResults)) {
+                    failedTestResults.add(batch);
+                  }
+                } finally {
+                  sw.stop();
+                  mLogger.info("Finished processing parallel batch [{}] on host {}. ElapsedTime(ms)={}",
+                      new Object[]{batch.getName(),mHost.toShortString(), sw.elapsed(TimeUnit.MILLISECONDS)});
+                }
+              }
+            } while(!mShutdown && !parallelWorkQueue.isEmpty());
+          } catch(AbortContainerException ex) {
+            mLogger.error("Aborting container during parallel execution", ex);
+            if(batch != null) {
+              Preconditions.checkState(parallelWorkQueue.add(batch),
+                  "Could not add batch to parallel queue " + batch);
+            }
+          }
+          return null;
+        }
+      }));
+    }
+    if(mShutdown) {
+      mLogger.warn("Shutting down host " + mHost.getName());
+      return;
+    }
+    Futures.allAsList(containerResults).get();
+  }
+
+  private boolean executeTestBatch(TestBatch batch, Set<TestBatch> failedTestResults)
+      throws AbortContainerException {
+    String runCommand = dockerClient.getRunContainerCommand(getContainerName(), batch);
+    Stopwatch sw = Stopwatch.createStarted();
+    mLogger.info("Executing " + batch + " with " + runCommand);
+    sw.stop();
+    /*mLogger.info("Completed executing tests for batch [{}] on host {}. ElapsedTime(ms)={}",
+        new Object[] { batch.getName(), getHost().toShortString(),
+            sw.elapsed(TimeUnit.MILLISECONDS) });
+    File batchLogDir = null;
+    if (sshResult.getExitCode() == Constants.EXIT_CODE_UNKNOWN) {
+      throw new AbortDroneException(
+          "Drone " + drone.toString() + " exited with " + Constants.EXIT_CODE_UNKNOWN + ": " + sshResult);
+    }
+    if (mShutdown) {
+      mLogger.warn("Shutting down host " + mHost.getName());
+      return false;
+    }
+    boolean result;
+    if (sshResult.getExitCode() != 0 || sshResult.getException() != null) {
+      result = false;
+      batchLogDir = Dirs.create(new File(mFailedTestLogDir, batch.getName()));
+    } else {
+      result = true;
+      batchLogDir = Dirs.create(new File(mSuccessfulTestLogDir, batch.getName()));
+    }
+    copyFromDroneToLocal(drone, batchLogDir.getAbsolutePath(), drone.getLocalLogDirectory() + "/",
+        fetchLogsForSuccessfulTests || !result);
+    File logFile = new File(batchLogDir, String.format("%s.txt", batch.getName()));
+    PrintWriter writer = new PrintWriter(logFile);
+    writer.write(String.format("result = '%s'\n", sshResult.toString()));
+    writer.write(String.format("output = '%s'\n", sshResult.getOutput()));
+    if (sshResult.getException() != null) {
+      sshResult.getException().printStackTrace(writer);
+    }
+    writer.close();*/
+    return false;
+  }
+
+  AtomicLong containerNameId = new AtomicLong(1);
+  public String getContainerName() {
+    return mHost.getName() + "-" + mTemplateDefaults.get("buildTag") + "-" + String
+        .valueOf(containerNameId.getAndIncrement());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ea53020e/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerPrepPhase.java
----------------------------------------------------------------------
diff --git a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerPrepPhase.java b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerPrepPhase.java
index b1d9ad6..0e0a706 100644
--- a/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerPrepPhase.java
+++ b/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/containers/DockerPrepPhase.java
@@ -54,7 +54,11 @@ public class DockerPrepPhase extends PrepPhase {
     long elapsedTime;
     start = System.currentTimeMillis();
     //TODO give a proper label to the build
+<<<<<<< HEAD
     containerClient.defineImage(getLocalScratchDir(), null);
+=======
+    containerClient.defineImage(getLocalScratchDir());
+>>>>>>> a1fe94a... Temp work related to docker execution phase. code may not compile
     execLocally(getDockerBuildCommand());
     elapsedTime = TimeUnit.MINUTES.convert((System.currentTimeMillis() - start),
         TimeUnit.MILLISECONDS);

http://git-wip-us.apache.org/repos/asf/hive/blob/ea53020e/testutils/ptest2/src/main/resources/dockerfile-template.vm
----------------------------------------------------------------------
diff --git a/testutils/ptest2/src/main/resources/dockerfile-template.vm b/testutils/ptest2/src/main/resources/dockerfile-template.vm
index 3652720..9383e96 100644
--- a/testutils/ptest2/src/main/resources/dockerfile-template.vm
+++ b/testutils/ptest2/src/main/resources/dockerfile-template.vm
@@ -17,7 +17,11 @@ ARG workingDir
 ARG mavenEnvOpts
 ARG repository
 ARG branch
+<<<<<<< HEAD
 ARG label
+=======
+ARG buildTag
+>>>>>>> a1fe94a... Temp work related to docker execution phase. code may not compile
 
 RUN export MAVEN_OPTS="$mavenEnvOpts"
 RUN /usr/bin/git clone $repository
@@ -30,4 +34,8 @@ RUN /home/ptestuser/scratch/smart-apply-patch.sh /home/ptestuser/scratch/build.p
 RUN /usr/bin/mvn -B -T 4 -q install -Dtest=TestMetastoreConf
 RUN cd itests
 RUN /usr/bin/mvn -B -T 4 -q  install -DskipSparkTests -DskipTests
+<<<<<<< HEAD
 RUN echo This build is labeled $label
+=======
+RUN echo This build is labeled $buildTag
+>>>>>>> a1fe94a... Temp work related to docker execution phase. code may not compile