You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by zl...@apache.org on 2017/02/22 09:43:46 UTC

svn commit: r1783988 [5/24] - in /pig/branches/spark: ./ bin/ conf/ contrib/piggybank/java/ contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/evaluation/ contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/evaluation/util/apachelo...

Modified: pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestMultiStorage.java
URL: http://svn.apache.org/viewvc/pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestMultiStorage.java?rev=1783988&r1=1783987&r2=1783988&view=diff
==============================================================================
--- pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestMultiStorage.java (original)
+++ pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestMultiStorage.java Wed Feb 22 09:43:41 2017
@@ -18,34 +18,41 @@ import java.io.FileWriter;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.PrintWriter;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
-import org.apache.pig.backend.executionengine.ExecException;
-import org.apache.pig.test.MiniCluster;
+import org.apache.pig.backend.executionengine.ExecJob;
+import org.apache.pig.test.MiniGenericCluster;
 import org.apache.pig.test.Util;
+import org.apache.pig.tools.pigstats.JobStats;
+import org.apache.pig.tools.pigstats.OutputStats;
+import org.apache.pig.tools.pigstats.PigStats;
+import org.apache.pig.tools.pigstats.mapreduce.SimplePigStats;
 import org.junit.After;
+import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.Test;
 
-import junit.framework.Assert;
-import junit.framework.TestCase;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
-public class TestMultiStorage extends TestCase {
+public class TestMultiStorage {
   private static final String INPUT_FILE = "MultiStorageInput.txt";
 
   private PigServer pigServer;
   private PigServer pigServerLocal;
 
-  private MiniCluster cluster = MiniCluster.buildCluster();
+  private static MiniGenericCluster cluster = MiniGenericCluster.buildCluster();
 
-  public TestMultiStorage() throws ExecException, IOException {
-    pigServer = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
-    pigServerLocal = new PigServer(ExecType.LOCAL);
+  public TestMultiStorage() throws Exception {
+    pigServer = new PigServer(cluster.getExecType(), cluster.getProperties());
+    pigServerLocal = new PigServer(Util.getLocalTestMode());
   }
 
   public static final PathFilter hiddenPathFilter = new PathFilter() {
@@ -74,59 +81,83 @@ public class TestMultiStorage extends Te
     Util.copyFromLocalToCluster(cluster, INPUT_FILE, INPUT_FILE);
   }
 
-  @Override
   @Before
   public void setUp() throws Exception {
     createFile();
     FileSystem fs = FileSystem.getLocal(new Configuration());
     Path localOut = new Path("local-out");
-    Path dummy = new Path("dummy");
     if (fs.exists(localOut)) {
       fs.delete(localOut, true);
     }
-    if (fs.exists(dummy)) {
-      fs.delete(dummy, true);
-    }
   }
 
-  @Override
   @After
   public void tearDown() throws Exception {
     new File(INPUT_FILE).delete();
     Util.deleteFile(cluster, INPUT_FILE);
+  }
+
+  @AfterClass
+  public static void shutdown() {
     cluster.shutDown();
   }
 
   enum Mode {
     local, cluster
-  };
+  }
 
   @Test
   public void testMultiStorage() throws IOException {
     final String LOAD = "A = LOAD '" + INPUT_FILE + "' as (id, name, n);";
     final String MULTI_STORE_CLUSTER = "STORE A INTO 'mr-out' USING "
         + "org.apache.pig.piggybank.storage.MultiStorage('mr-out', '1');";
-    final String MULTI_STORE_LOCAL = "STORE A INTO 'dummy' USING "
+    final String MULTI_STORE_LOCAL = "STORE A INTO 'local-out' USING "
         + "org.apache.pig.piggybank.storage.MultiStorage('local-out', '1');";
 
     System.out.print("Testing in LOCAL mode: ...");
-    //testMultiStorage(Mode.local, "local-out", LOAD, MULTI_STORE_LOCAL);
+    testMultiStorage(Mode.local, "local-out", LOAD, MULTI_STORE_LOCAL);
     System.out.println("Succeeded!");
-    
+
     System.out.print("Testing in CLUSTER mode: ...");
     testMultiStorage( Mode.cluster, "mr-out", LOAD, MULTI_STORE_CLUSTER);
     System.out.println("Succeeded!");
-    
-    
   }
 
-  /**
-   * The actual method that run the test in local or cluster mode. 
-   * 
-   * @param pigServer
-   * @param mode
-   * @param queries
-   * @throws IOException
+  @Test
+  public void testOutputStats() throws IOException {
+    FileSystem fs = cluster.getFileSystem();
+
+    pigServer.setBatchOn();
+    pigServer.registerQuery("A = LOAD '" + INPUT_FILE + "' as (id, name, n);");
+    pigServer.registerQuery("B = FILTER A BY name == 'apple';");
+    pigServer.registerQuery("STORE A INTO 'out1' USING org.apache.pig.piggybank.storage.MultiStorage('out1', '1');"); //153 bytes
+    pigServer.registerQuery("STORE B INTO 'out2' USING org.apache.pig.piggybank.storage.MultiStorage('out2', '1');"); // 45 bytes
+
+    ExecJob job = pigServer.executeBatch().get(0);
+
+    PigStats stats = job.getStatistics();
+    PigStats.JobGraph jobGraph = stats.getJobGraph();
+    JobStats jobStats = (JobStats) jobGraph.getSinks().get(0);
+    Map<String, Long> multiStoreCounters = jobStats.getMultiStoreCounters();
+    List<OutputStats> outputStats = SimplePigStats.get().getOutputStats();
+    OutputStats outputStats1 = "out1".equals(outputStats.get(0).getName()) ? outputStats.get(0) : outputStats.get(1);
+    OutputStats outputStats2 = "out2".equals(outputStats.get(0).getName()) ? outputStats.get(0) : outputStats.get(1);
+
+    assertEquals(153 + 45, stats.getBytesWritten());
+    assertEquals(2, outputStats.size()); // 2 split conditions
+    assertEquals(153, outputStats1.getBytes());
+    assertEquals(45, outputStats2.getBytes());
+    assertEquals(9, outputStats1.getRecords());
+    assertEquals(3, outputStats2.getRecords());
+    assertEquals(3L, multiStoreCounters.get("Output records in _1_out2").longValue());
+    assertEquals(9L, multiStoreCounters.get("Output records in _0_out1").longValue());
+
+    fs.delete(new Path("out1"), true);
+    fs.delete(new Path("out2"), true);
+  }
+
+    /**
+   * The actual method that run the test in local or cluster mode.
    */
   private void testMultiStorage( Mode mode, String outPath,
       String... queries) throws IOException {
@@ -142,42 +173,38 @@ public class TestMultiStorage extends Te
   /**
    * Test if records are split into directories corresponding to split field
    * values
-   * 
-   * @param mode
-   * @throws IOException
    */
   private void verifyResults(Mode mode, String outPath) throws IOException {
     FileSystem fs = (Mode.local == mode ? FileSystem
         .getLocal(new Configuration()) : cluster.getFileSystem());
     Path output = new Path(outPath);
-    Assert.assertTrue("Output dir does not exists!", fs.exists(output)
+    assertTrue("Output dir does not exists!", fs.exists(output)
         && fs.getFileStatus(output).isDir());
 
     Path[] paths = FileUtil.stat2Paths(fs.listStatus(output, hiddenPathFilter));
-    Assert.assertTrue("Split field dirs not found!", paths != null);
+    assertTrue("Split field dirs not found!", paths != null);
 
     for (Path path : paths) {
       String splitField = path.getName();
       Path[] files = FileUtil.stat2Paths(fs.listStatus(path, hiddenPathFilter));
-      Assert.assertTrue("No files found for path: " + path.toUri().getPath(),
+      assertTrue("No files found for path: " + path.toUri().getPath(),
           files != null);
       for (Path filePath : files) {
-        Assert.assertTrue("This shouldn't be a directory", fs.isFile(filePath));
-        
+        assertTrue("This shouldn't be a directory", fs.isFile(filePath));
         BufferedReader reader = new BufferedReader(new InputStreamReader(fs
                 .open(filePath)));
         String line = "";
         int count = 0;
         while ((line = reader.readLine()) != null) {
           String[] fields = line.split("\\t");
-          Assert.assertEquals(fields.length, 3);
-          Assert.assertEquals("Unexpected field value in the output record",
+          assertEquals(fields.length, 3);
+          assertEquals("Unexpected field value in the output record",
                 splitField, fields[1]);
           count++;
           System.out.println("field: " + fields[1]);
-        }        
+        }
         reader.close();
-        Assert.assertEquals(count, 3);
+        assertEquals(count, 3);
       }
     }
   }

Modified: pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestMultiStorageCompression.java
URL: http://svn.apache.org/viewvc/pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestMultiStorageCompression.java?rev=1783988&r1=1783987&r2=1783988&view=diff
==============================================================================
--- pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestMultiStorageCompression.java (original)
+++ pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestMultiStorageCompression.java Wed Feb 22 09:43:41 2017
@@ -21,11 +21,14 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
+import java.util.Set;
 
 import junit.framework.TestCase;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.BZip2Codec;
@@ -37,6 +40,10 @@ import org.apache.pig.backend.executione
 import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.test.Util;
 
+import com.google.common.collect.Sets;
+
+import org.junit.Assert;
+
 public class TestMultiStorageCompression extends TestCase {
 
    private static String patternString = "(\\d+)!+(\\w+)~+(\\w+)";
@@ -59,8 +66,8 @@ public class TestMultiStorageCompression
       filesToDelete.add(outputPath);
 
       try {
-         runQuery(outputPath, type);
-         verifyResults(type, filesToDelete, outputPath);
+         runQuery(outputPath, "0", type);
+         verifyResults(type, outputPath);
       } finally {
          cleanUpDirs(filesToDelete);
       }
@@ -77,22 +84,22 @@ public class TestMultiStorageCompression
       filesToDelete.add(outputPath);
 
       try {
-         runQuery(outputPath, type);
-         verifyResults(type, filesToDelete, outputPath);
+         runQuery(outputPath, "0", type);
+         verifyResults(type, outputPath);
       } finally {
          cleanUpDirs(filesToDelete);
       }
    }
 
-   private void cleanUpDirs(List<String> filesToDelete) {
+   private void cleanUpDirs(List<String> filesToDelete) throws IOException {
       // Delete files recursively
       Collections.reverse(filesToDelete);
       for (String string : filesToDelete)
-         new File(string).delete();
+         FileUtils.deleteDirectory(new File(string));
    }
 
 
-   private void verifyResults(String type, List<String> filesToDelete,
+   private void verifyResults(String type,
          String outputPath) throws IOException, FileNotFoundException {
       // Verify the output
       File outputDir = new File(outputPath);
@@ -114,12 +121,10 @@ public class TestMultiStorageCompression
              continue;
          String topFolder = outputPath + File.separator + indexFolder;
          File indexFolderFile = new File(topFolder);
-         filesToDelete.add(topFolder);
          String[] list = indexFolderFile.list();
          for (String outputFile : list) {
 
             String file = topFolder + File.separator + outputFile;
-            filesToDelete.add(file);
 
             // Skip off any file starting with .
             if (outputFile.startsWith("."))
@@ -159,7 +164,7 @@ public class TestMultiStorageCompression
       }
    }
 
-   private void runQuery(String outputPath, String compressionType)
+   private void runQuery(String outputPath, String keyColIndices, String compressionType)
          throws Exception, ExecException, IOException, FrontendException {
 
       // create a data file
@@ -172,7 +177,7 @@ public class TestMultiStorageCompression
 
       String query2 = "STORE A INTO '" + Util.encodeEscape(outputPath)
             + "' USING org.apache.pig.piggybank.storage.MultiStorage" + "('"
-            + Util.encodeEscape(outputPath) + "','0', '" + compressionType + "', '\\t');";
+            + Util.encodeEscape(outputPath) + "','"+keyColIndices+"', '" + compressionType + "', '\\t');";
 
       // Run Pig
       pig.setBatchOn();
@@ -182,5 +187,32 @@ public class TestMultiStorageCompression
       pig.executeBatch();
    }
 
+   public void testMultiStorageShouldSupportMultiLevelAndGz() throws Exception {
+      String type = "gz";
+      String outputDir = "output001.multi." + type;
+      List<String> filesToDelete = new ArrayList<String>();
+
+      String tmpDir = System.getProperty("java.io.tmpdir");
+      String outputPath = tmpDir + File.separator + outputDir;
+
+      filesToDelete.add(outputPath);
+      try {
+         runQuery(outputPath, "1,0", type);
+         Collection<File> fileList = FileUtils.listFiles(new File(outputPath),null,true);
+         Set<String> expectedPaths = Sets.newHashSet( "output001.multi.gz/a.gz/f1.gz/a-f1-0,000.gz",
+                                                      "output001.multi.gz/b.gz/f2.gz/b-f2-0,000.gz",
+                                                      "output001.multi.gz/c.gz/f3.gz/c-f3-0,000.gz",
+                                                      "output001.multi.gz/d.gz/f4.gz/d-f4-0,000.gz");
+         for (File file : fileList){
+            String foundPath = file.getAbsolutePath().substring(file.getAbsolutePath().indexOf(outputDir));
+            if (expectedPaths.contains(foundPath)){
+               expectedPaths.remove(foundPath);
+            }
+         }
+         Assert.assertTrue(expectedPaths.isEmpty());
+      } finally {
+         cleanUpDirs(filesToDelete);
+      }
+   }
 
 }

Added: pig/branches/spark/dev-support/docker/Dockerfile
URL: http://svn.apache.org/viewvc/pig/branches/spark/dev-support/docker/Dockerfile?rev=1783988&view=auto
==============================================================================
--- pig/branches/spark/dev-support/docker/Dockerfile (added)
+++ pig/branches/spark/dev-support/docker/Dockerfile Wed Feb 22 09:43:41 2017
@@ -0,0 +1,94 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Dockerfile for installing the necessary dependencies for building Apache Pig.
+# See BUILDING.md.
+
+FROM ubuntu:trusty
+
+# Define working directory.
+WORKDIR /root
+
+RUN apt-get update
+
+# Install dependencies from packages
+RUN sed -i 's/# \(.*multiverse$\)/\1/g' /etc/apt/sources.list && \
+    apt-get install -y build-essential && \
+    apt-get install -y software-properties-common && \
+    apt-get install --no-install-recommends -y \
+            git subversion \
+            byobu htop man unzip vim \
+            cabal-install \
+            curl wget \
+            openjdk-7-jdk \
+            ant ant-contrib ant-optional make maven \
+            cmake gcc g++ protobuf-compiler \
+            build-essential libtool \
+            zlib1g-dev pkg-config libssl-dev \
+            snappy libsnappy-dev \
+            bzip2 libbz2-dev \
+            libjansson-dev \
+            fuse libfuse-dev \
+            libcurl4-openssl-dev \
+            python python2.7 && \
+    rm -rf /var/lib/apt/lists/*
+
+# Define commonly used JAVA_HOME variable
+ENV JAVA_HOME /usr/lib/jvm/java-7-openjdk-amd64
+
+# Fixing the Apache commons / Maven dependency problem under Ubuntu:
+# See http://wiki.apache.org/commons/VfsProblems
+RUN cd /usr/share/maven/lib && ln -s ../../java/commons-lang.jar .
+
+# Avoid out of memory errors in builds
+ENV MAVEN_OPTS -Xms256m -Xmx512m
+
+# Install findbugs
+RUN mkdir -p /opt/findbugs && \
+    wget http://sourceforge.net/projects/findbugs/files/findbugs/3.0.1/findbugs-noUpdateChecks-3.0.1.tar.gz/download \
+         -O /opt/findbugs.tar.gz && \
+    tar xzf /opt/findbugs.tar.gz --strip-components 1 -C /opt/findbugs
+ENV FINDBUGS_HOME /opt/findbugs
+
+# Install Forrest in /usr/local/apache-forrest
+# Screenscrape the download page for a local mirror URL
+RUN cd /usr/local/ && \
+    curl https://forrest.apache.org/mirrors.cgi | \
+    fgrep href | fgrep apache-forrest-0.9 | \
+    sed 's@^.*"\(http[^"]*apache-forrest-[^"]*.tar.gz\)".*@\1@' | \
+    xargs -n1 -r wget
+
+# Unpack Apache Forrest
+RUN cd /usr/local/ && \
+    tar xzf apache-forrest-0.9-sources.tar.gz && \
+    tar xzf apache-forrest-0.9-dependencies.tar.gz && \
+    mv apache-forrest-0.9 apache-forrest
+RUN cd /usr/local/apache-forrest/main && ./build.sh
+
+# The solution for https://issues.apache.org/jira/browse/PIG-3906
+RUN mkdir -p /usr/local/apache-forrest/plugins       && chmod a+rwX -R /usr/local/apache-forrest/plugins
+RUN mkdir -p /usr/local/apache-forrest/build/plugins && chmod a+rwX -R /usr/local/apache-forrest/build/plugins
+
+# Configure where forrest can be found
+RUN echo 'forrest.home=/usr/local/apache-forrest' > build.properties
+ENV FORREST_HOME /usr/local/apache-forrest
+
+# Add a welcome message and environment checks.
+ADD build_env_checks.sh /root/build_env_checks.sh
+RUN chmod 755 /root/build_env_checks.sh
+ADD configure-for-user.sh /root/configure-for-user.sh
+RUN chmod 755 /root/configure-for-user.sh
+RUN echo '~/build_env_checks.sh' >> /root/.bashrc

Added: pig/branches/spark/dev-support/docker/build_env_checks.sh
URL: http://svn.apache.org/viewvc/pig/branches/spark/dev-support/docker/build_env_checks.sh?rev=1783988&view=auto
==============================================================================
--- pig/branches/spark/dev-support/docker/build_env_checks.sh (added)
+++ pig/branches/spark/dev-support/docker/build_env_checks.sh Wed Feb 22 09:43:41 2017
@@ -0,0 +1,120 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# -------------------------------------------------------
+function showWelcome {
+
+# http://patorjk.com/software/taag/#p=display&f=Doom&t=Pig%20Builder
+cat <<Welcome-message
+
+______ _        ______       _ _     _
+| ___ (_)       | ___ \\     (_) |   | |
+| |_/ /_  __ _  | |_/ /_   _ _| | __| | ___ _ __
+|  __/| |/ _\` | | ___ \\ | | | | |/ _\` |/ _ \\ '__|
+| |   | | (_| | | |_/ / |_| | | | (_| |  __/ |_
+\\_|   |_|\\__, | \\____/ \\__,_|_|_|\\__,_|\\___|_|
+          __/ |
+         |___/
+
+This is the standard Apache Pig Developer build environment.
+This has all the right tools installed required to build
+Pig from source.
+
+Welcome-message
+}
+
+# -------------------------------------------------------
+
+function showAbort {
+  cat <<Abort-message
+
+  ___  _                _   _
+ / _ \\| |              | | (_)
+/ /_\\ \\ |__   ___  _ __| |_ _ _ __   __ _
+|  _  | '_ \\ / _ \\| '__| __| | '_ \\ / _\` |
+| | | | |_) | (_) | |  | |_| | | | | (_| |
+\\_| |_/_.__/ \\___/|_|   \\__|_|_| |_|\\__, |
+                                     __/ |
+                                    |___/
+
+Abort-message
+}
+
+# -------------------------------------------------------
+
+function failIfUserIsRoot {
+    if [ "$(id -u)" -eq "0" ]; # If you are root then something went wrong.
+    then
+        cat <<End-of-message
+
+Apparently you are inside this docker container as the user root.
+Putting it simply:
+
+   This should not occur.
+
+Known possible causes of this are:
+1) Running this script as the root user ( Just don't )
+2) Running an old docker version ( upgrade to 1.4.1 or higher )
+
+End-of-message
+
+    showAbort
+
+    logout
+
+    fi
+}
+
+# -------------------------------------------------------
+
+# Configurable low water mark in GiB
+MINIMAL_MEMORY_GiB=2
+
+function warnIfLowMemory {
+    MINIMAL_MEMORY=$((MINIMAL_MEMORY_GiB*1024*1024)) # Convert to KiB
+    INSTALLED_MEMORY=$(fgrep MemTotal /proc/meminfo | awk '{print $2}')
+    if [ $((INSTALLED_MEMORY)) -le $((MINIMAL_MEMORY)) ];
+    then
+        cat <<End-of-message
+
+ _                    ___  ___
+| |                   |  \\/  |
+| |     _____      __ | .  . | ___ _ __ ___   ___  _ __ _   _
+| |    / _ \\ \\ /\\ / / | |\\/| |/ _ \\ '_ \` _ \\ / _ \\| '__| | | |
+| |___| (_) \\ V  V /  | |  | |  __/ | | | | | (_) | |  | |_| |
+\\_____/\\___/ \\_/\\_/   \\_|  |_/\\___|_| |_| |_|\\___/|_|   \\__, |
+                                                         __/ |
+                                                        |___/
+
+Your system is running on very little memory.
+This means it may work but it wil most likely be slower than needed.
+
+If you are running this via boot2docker you can simply increase
+the available memory to atleast ${MINIMAL_MEMORY_GiB} GiB (you have $((INSTALLED_MEMORY/(1024*1024))) GiB )
+
+End-of-message
+    fi
+}
+
+# -------------------------------------------------------
+
+showWelcome
+warnIfLowMemory
+failIfUserIsRoot
+
+# -------------------------------------------------------

Added: pig/branches/spark/dev-support/docker/configure-for-user.sh
URL: http://svn.apache.org/viewvc/pig/branches/spark/dev-support/docker/configure-for-user.sh?rev=1783988&view=auto
==============================================================================
--- pig/branches/spark/dev-support/docker/configure-for-user.sh (added)
+++ pig/branches/spark/dev-support/docker/configure-for-user.sh Wed Feb 22 09:43:41 2017
@@ -0,0 +1,40 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script is used to tweak the environment at the moment we know 
+# the real username of the person using this.
+# By making this script a part of the image we can extend and update 
+# it to fit future needs more easily.
+
+# Native Linux (direct or via sudo)
+USER_NAME=$1
+USER_ID=$2
+GROUP_ID=$3
+
+groupadd --non-unique -g ${GROUP_ID} ${USER_NAME}
+useradd -g ${GROUP_ID} -u ${USER_ID} -k /root -m ${USER_NAME}
+echo "export HOME=/home/${USER_NAME}" >> ~/.bashrc
+echo "export USER=${USER_NAME}" >> ~/.bashrc
+
+VBOXSF_GROUP_LINE=$4
+if [ -n ${VBOXSF_GROUP_LINE} ];
+then
+    echo ${VBOXSF_GROUP_LINE} >> /etc/group
+    usermod -aG vboxsf ${USER_NAME}
+fi
+
+echo "${USER_NAME}    ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers.d/${USER_NAME}

Modified: pig/branches/spark/ivy.xml
URL: http://svn.apache.org/viewvc/pig/branches/spark/ivy.xml?rev=1783988&r1=1783987&r2=1783988&view=diff
==============================================================================
--- pig/branches/spark/ivy.xml (original)
+++ pig/branches/spark/ivy.xml Wed Feb 22 09:43:41 2017
@@ -38,10 +38,8 @@
     <conf name="jdiff" visibility="private"/>
     <conf name="checkstyle" visibility="private"/>
     <conf name="buildJar" extends="compile,test" visibility="private"/>
-    <conf name="hadoop20" visibility="private"/>
-    <conf name="hadoop23" visibility="private"/>
-    <conf name="hbase94" visibility="private"/>
-    <conf name="hbase95" visibility="private"/>
+    <conf name="hadoop2" visibility="private"/>
+    <conf name="hbase1" visibility="private"/>
     <conf name="spark" visibility="private" />
   </configurations>
   <publications>
@@ -61,17 +59,17 @@
     <dependency org="commons-beanutils" name="commons-beanutils-core" rev="${commons-beanutils.version}"
       conf="checkstyle->master"/>
     <dependency org="xmlenc" name="xmlenc" rev="${xmlenc.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="com.sun.jersey" name="jersey-bundle" rev="${jersey.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="com.sun.jersey" name="jersey-server" rev="${jersey.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="com.sun.jersey.contribs" name="jersey-guice" rev="${jersey.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="commons-codec" name="commons-codec" rev="${commons-codec.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="commons-httpclient" name="commons-httpclient" rev="${commons-httpclient.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="commons-el" name="commons-el" rev="${commons-el.version}"
       conf="compile->master"/>
     <dependency org="commons-io" name="commons-io" rev="${commons-io.version}"
@@ -89,92 +87,86 @@
     <dependency org="nl.basjes.parse" name="parser-core" rev="${basjes-httpdlog-pigloader.version}"
       conf="compile->master"/>
     <dependency org="commons-configuration" name="commons-configuration" rev="${commons-configuration.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="commons-collections" name="commons-collections" rev="${commons-collections.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="javax.servlet" name="servlet-api" rev="${servlet-api.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="javax.ws.rs" name="jsr311-api" rev="${jsr311-api.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="com.google.protobuf" name="protobuf-java" rev="${protobuf-java.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="javax.inject" name="javax.inject" rev="${javax-inject.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="javax.xml.bind" name="jaxb-api" rev="${jaxb-api.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="com.sun.xml.bind" name="jaxb-impl" rev="${jaxb-impl.version}"
-      conf="hadoop23->master"/> 
+      conf="hadoop2->master"/>
     <dependency org="com.google.inject" name="guice" rev="${guice.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="com.google.inject.extensions" name="guice-servlet" rev="${guice-servlet.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="aopalliance" name="aopalliance" rev="${aopalliance.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="org.mortbay.jetty" name="jsp-2.1" rev="${jasper.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="org.mortbay.jetty" name="jsp-api-2.1" rev="${jasper.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="log4j" name="log4j" rev="${log4j.version}"
       conf="compile->master"/>
-    <dependency org="com.sun.jersey" name="jersey-core" rev="${jersey-core.version}"
-      conf="hadoop20->default"/>
-    <dependency org="org.apache.hadoop" name="hadoop-core" rev="${hadoop-core.version}"
-      conf="hadoop20->default"/>
-    <dependency org="org.apache.hadoop" name="hadoop-test" rev="${hadoop-test.version}"
-      conf="hadoop20->default"/>
-    <dependency org="org.apache.hadoop" name="hadoop-annotations" 
-      rev="${hadoop-common.version}" conf="hadoop23->master"/>
+    <dependency org="org.apache.hadoop" name="hadoop-annotations"
+      rev="${hadoop-common.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-auth" 
-      rev="${hadoop-common.version}" conf="hadoop23->master"/>
+      rev="${hadoop-common.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-common" 
-      rev="${hadoop-common.version}" conf="hadoop23->master">
+      rev="${hadoop-common.version}" conf="hadoop2->master">
       <artifact name="hadoop-common" ext="jar" />
       <artifact name="hadoop-common" type="tests" ext="jar" m:classifier="tests" />
     </dependency>
     <dependency org="org.apache.hadoop" name="hadoop-hdfs"
-      rev="${hadoop-hdfs.version}" conf="hadoop23->master">
+      rev="${hadoop-hdfs.version}" conf="hadoop2->master">
       <artifact name="hadoop-hdfs" ext="jar" />
       <artifact name="hadoop-hdfs" type="tests" ext="jar" m:classifier="tests" />
     </dependency>
     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-core" rev="${hadoop-mapreduce.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-jobclient" rev="${hadoop-mapreduce.version}"
-      conf="hadoop23->master">
+      conf="hadoop2->master">
         <artifact name="hadoop-mapreduce-client-jobclient" ext="jar" />
         <artifact name="hadoop-mapreduce-client-jobclient" type="tests" ext="jar" m:classifier="tests"/>
         <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
         <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
     </dependency>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-server-tests" rev="${hadoop-mapreduce.version}"
-      conf="hadoop23->master">
+      conf="hadoop2->master">
       <artifact name="hadoop-yarn-server-tests" type="jar" m:classifier="tests"/>
     </dependency>
     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-app" rev="${hadoop-mapreduce.version}"
-      conf="hadoop23->master" />
+      conf="hadoop2->master" />
     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-shuffle" rev="${hadoop-mapreduce.version}"
-      conf="hadoop23->master" />
+      conf="hadoop2->master" />
     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-common" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-api" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-common" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-server" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-server-web-proxy" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-server-common" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-server-nodemanager" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-server-resourcemanager" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-client" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-server-applicationhistoryservice" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-hs" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.mortbay.jetty" name="jetty" rev="${jetty.version}"
       conf="compile->master">
       <artifact name="jetty" ext="jar" />
@@ -193,13 +185,7 @@
       <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/>
     </dependency>
     <dependency org="org.apache.avro" name="avro-mapred" rev="${avro.version}"
-      conf="hadoop20->default;checkstyle->master">
-      <exclude org="org.codehaus.jackson" module="jackson-core-asl"/>
-      <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/>
-      <exclude org="io.netty" module="netty"/>
-    </dependency>
-    <dependency org="org.apache.avro" name="avro-mapred" rev="${avro.version}"
-      conf="hadoop23->default;checkstyle->master">
+      conf="hadoop2->default;checkstyle->master">
       <artifact name="avro-mapred" type="jar" m:classifier="hadoop2"/>
       <exclude org="org.codehaus.jackson" module="jackson-core-asl"/>
       <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/>
@@ -261,37 +247,14 @@
     <dependency org="org.antlr" name="ST4" rev="${stringtemplate.version}" conf="compile->default"/>
     <dependency org="org.apache.zookeeper" name="zookeeper" rev="${zookeeper.version}" conf="compile->master"/>
     <dependency org="io.netty" name="netty" rev="${netty.version}" conf="test->master"/>
+    <dependency org="io.netty" name="netty-all" rev="${netty-all.version}" conf="test->master" />
     <dependency org="dk.brics.automaton" name="automaton" rev="1.11-8" conf="compile->default"/>
 
     <dependency org="org.jruby" name="jruby-complete" rev="${jruby.version}" conf="compile->master"/>
     <dependency org="asm" name="asm" rev="${asm.version}" conf="compile->default"/>
 
-    <!-- HBase dependency in format for releases up to 0.94 (including) -->
-    <dependency org="org.apache.hbase" name="hbase" rev="${hbase94.version}" conf="hbase94->master">
-      <artifact name="hbase" type="jar"/>
-      <artifact name="hbase" type="test-jar" ext="jar" m:classifier="tests"/>
-      <exclude org="org.apache.thrift" module="thrift"/>
-      <exclude org="org.apache.hadoop" module="hadoop-core"/>
-      <exclude org="org.apache.ant" module="ant" />
-      <exclude org="org.slf4j" module="slf4j"/>
-      <exclude org="org.slf4j" module="slf4j-api"/>
-      <exclude org="org.slf4j" module="slf4j-log4j12" />
-      <exclude org="org.slf4j" module="log4j12"/>
-      <exclude org="org.slf4j" module="log4j-over-slf4j"/>
-      <exclude org="stax" module="stax-api" />
-      <exclude org="javax.xml.bind" module="jaxb-api" />
-      <exclude org="javax.ws.rs" module="jsr311-api" />
-      <exclude org="tomcat" module="jasper-runtime"/>
-      <exclude org="tomcat" module="jasper-compiler"/>
-      <exclude org="com.google.protobuf" module="protobuf-java"/>
-      <exclude org="com.sun.jersey" module="jersey-core"/>
-      <exclude org="com.sun.jersey" module="jersey-server"/>
-      <exclude org="com.sun.jersey" module="jersey-json"/>
-      <exclude org="asm" module="asm"/>
-    </dependency>
-
     <!-- HBase dependency in format for releases higher or equal to 0.95 -->
-    <dependency org="org.apache.hbase" name="hbase-client" rev="${hbase95.version}" conf="hbase95->master">
+    <dependency org="org.apache.hbase" name="hbase-client" rev="${hbase1.version}" conf="hbase1->master">
       <artifact name="hbase-client" type="jar"/>
       <artifact name="hbase-client" type="test-jar" ext="jar" m:classifier="tests"/>
       <exclude org="org.slf4j" module="slf4j-api"/>
@@ -307,7 +270,7 @@
       <exclude org="asm" module="asm"/>
     </dependency>
 
-    <dependency org="org.apache.hbase" name="hbase-common" rev="${hbase95.version}" conf="hbase95->master">
+    <dependency org="org.apache.hbase" name="hbase-common" rev="${hbase1.version}" conf="hbase1->master">
       <artifact name="hbase-common" type="jar"/>
       <artifact name="hbase-common" type="test-jar" ext="jar" m:classifier="tests"/>
       <exclude org="org.apache.hadoop" module="hadoop-core"/>
@@ -322,7 +285,7 @@
       <exclude org="asm" module="asm"/>
     </dependency>
 
-    <dependency org="org.apache.hbase" name="hbase-server" rev="${hbase95.version}" conf="hbase95->master">
+    <dependency org="org.apache.hbase" name="hbase-server" rev="${hbase1.version}" conf="hbase1->master">
       <artifact name="hbase-server" type="jar"/>
       <artifact name="hbase-server" type="test-jar" ext="jar" m:classifier="tests"/>
       <exclude org="org.apache.hadoop" module="hadoop-core"/>
@@ -339,20 +302,20 @@
       <exclude org="asm" module="asm"/>
     </dependency>
 
-    <dependency org="org.apache.hbase" name="hbase-protocol" rev="${hbase95.version}" conf="hbase95->master">
+    <dependency org="org.apache.hbase" name="hbase-protocol" rev="${hbase1.version}" conf="hbase1->master">
       <artifact name="hbase-protocol" type="jar"/>
       <artifact name="hbase-protocol" type="test-jar" ext="jar" m:classifier="tests"/>
       <exclude org="com.google.protobuf" module="protobuf-java"/>
     </dependency>
 
-    <dependency org="org.apache.hbase" name="hbase-hadoop-compat" rev="${hbase95.version}" conf="hbase95->master">
+    <dependency org="org.apache.hbase" name="hbase-hadoop-compat" rev="${hbase1.version}" conf="hbase1->master">
       <artifact name="hbase-hadoop-compat" type="jar"/>
       <artifact name="hbase-hadoop-compat" type="test-jar" ext="jar" m:classifier="tests"/>
     </dependency>
 
-    <dependency org="org.apache.hbase" name="hbase-${hbase.hadoop.version}-compat" rev="${hbase95.version}" conf="hbase95->master">
-      <artifact name="hbase-${hbase.hadoop.version}-compat" type="jar"/>
-      <artifact name="hbase-${hbase.hadoop.version}-compat" type="test-jar" ext="jar" m:classifier="tests"/>
+    <dependency org="org.apache.hbase" name="hbase-hadoop2-compat" rev="${hbase1.version}" conf="hbase1->master">
+      <artifact name="hbase-hadoop2-compat" type="jar"/>
+      <artifact name="hbase-hadoop2-compat" type="test-jar" ext="jar" m:classifier="tests"/>
       <exclude org="org.apache.hadoop" module="hadoop-core"/>
       <exclude org="org.slf4j" module="slf4j-api"/>
       <exclude org="stax" module="stax-api" />
@@ -365,20 +328,17 @@
       <exclude org="asm" module="asm"/>
     </dependency>
 
-    <dependency org="org.htrace" name="htrace-core" rev="3.0.4" conf="hadoop23->master"/>
-    <dependency org="org.apache.htrace" name="htrace-core" rev="${htrace.version}" conf="hadoop23->master"/>
+    <dependency org="org.htrace" name="htrace-core" rev="3.0.4" conf="hadoop2->master"/>
+    <dependency org="org.apache.htrace" name="htrace-core" rev="${htrace.version}" conf="hadoop2->master"/>
     <dependency org="org.fusesource.leveldbjni" name="leveldbjni-all" rev="${leveldbjni.version}"
-      conf="hadoop23->master"/>
-    <dependency org="org.cloudera.htrace" name="htrace-core" rev="2.00" conf="hbase95->master">
+      conf="hadoop2->master"/>
+    <dependency org="org.cloudera.htrace" name="htrace-core" rev="2.00" conf="hbase1->master">
       <artifact name="htrace-core" type="jar"/>
     </dependency>
-    <dependency org="com.lmax" name="disruptor" rev="3.3.0" conf="hbase95->master"/>
+    <dependency org="com.lmax" name="disruptor" rev="3.3.0" conf="hbase1->master"/>
 
     <!-- for TestHBaseStorage -->
-    <dependency org="com.github.stephenc.high-scale-lib" name="high-scale-lib" rev="${high-scale-lib.version}"
-       conf="test->default"/>
-    <dependency org="com.google.protobuf" name="protobuf-java" rev="${protobuf-java.version}"
-       conf="test->default"/>
+    <dependency org="org.apache.hbase" name="hbase-procedure" rev="${hbase1.version}" conf="test->master"/>
     <dependency org="com.yammer.metrics" name="metrics-core" rev="${metrics-core.version}"
        conf="test->default"/>
 
@@ -428,10 +388,10 @@
       conf="compile->master" />
     <dependency org="org.apache.hive.shims" name="hive-shims-common" rev="${hive.version}" changing="true"
       conf="compile->master" />
+    <dependency org="org.apache.hive" name="hive-contrib" rev="${hive.version}" changing="true"
+                conf="test->master" />
     <dependency org="org.apache.hive.shims" name="hive-shims-0.23" rev="${hive.version}" changing="true"
-      conf="hadoop23->master" />
-    <dependency org="org.apache.hive.shims" name="hive-shims-0.20S" rev="${hive.version}" changing="true"
-      conf="hadoop20->master" />
+      conf="hadoop2->master" />
     <dependency org="org.iq80.snappy" name="snappy" rev="${snappy.version}"
       conf="test->master" />
     <dependency org="com.esotericsoftware.kryo" name="kryo" rev="${kryo.version}"
@@ -466,31 +426,31 @@
 
     <!-- for Tez integration -->
     <dependency org="org.apache.tez" name="tez" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.tez" name="tez-common" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.tez" name="tez-api" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.tez" name="tez-dag" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.tez" name="tez-runtime-internals" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.tez" name="tez-runtime-library" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.tez" name="tez-mapreduce" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.tez" name="tez-yarn-timeline-history-with-acls" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.commons" name="commons-collections4" rev="${commons-collections4.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="org.codehaus.jettison" name="jettison" rev="${jettison.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="org.apache.commons" name="commons-math3" rev="${commons-math3.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="org.apache.curator" name="curator-framework" rev="${curator.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="org.apache.curator" name="curator-client" rev="${curator.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
   </dependencies>
 </ivy-module>
 

Added: pig/branches/spark/ivy/ant-contrib-1.0b3.jar
URL: http://svn.apache.org/viewvc/pig/branches/spark/ivy/ant-contrib-1.0b3.jar?rev=1783988&view=auto
==============================================================================
Binary file - no diff available.

Propchange: pig/branches/spark/ivy/ant-contrib-1.0b3.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Modified: pig/branches/spark/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/pig/branches/spark/ivy/libraries.properties?rev=1783988&r1=1783987&r2=1783988&view=diff
==============================================================================
--- pig/branches/spark/ivy/libraries.properties (original)
+++ pig/branches/spark/ivy/libraries.properties Wed Feb 22 09:43:41 2017
@@ -40,14 +40,10 @@ ivy.version=2.2.0
 jasper.version=6.1.14
 groovy.version=2.4.5
 guava.version=11.0
-jersey-core.version=1.8
-hadoop-core.version=1.0.4
-hadoop-test.version=1.0.4
-hadoop-common.version=2.6.0
-hadoop-hdfs.version=2.6.0
-hadoop-mapreduce.version=2.6.0
-hbase94.version=0.94.1
-hbase95.version=0.98.12-${hbase.hadoop.version}
+hadoop-common.version=2.7.3
+hadoop-hdfs.version=2.7.3
+hadoop-mapreduce.version=2.7.3
+hbase1.version=1.2.4
 hsqldb.version=1.8.0.10
 hive.version=1.2.1
 httpcomponents.version=4.1
@@ -61,19 +57,20 @@ jdeb.version=0.8
 jdiff.version=1.0.9
 jettison.version=1.3.4
 jetty.version=6.1.26
-jline.version=1.0
-joda-time.version=2.8.2
+jline.version=2.11
+joda-time.version=2.9.3
 jopt.version=4.1
 json-simple.version=1.1
 junit.version=4.11
 jruby.version=1.6.7
-jython.version=2.5.3
-kryo.version=2.21
+jython.version=2.7.0
+kryo.version=2.22
 rhino.version=1.7R2
 antlr.version=3.4
 stringtemplate.version=4.0.4
 log4j.version=1.2.16
 netty.version=3.6.6.Final
+netty-all.version=4.0.23.Final
 rats-lib.version=0.5.1
 slf4j-api.version=1.6.1
 slf4j-log4j12.version=1.6.1
@@ -85,7 +82,6 @@ zookeeper.version=3.4.5
 servlet.version=4.0.6
 servlet-api.version=2.5
 protobuf-java.version=2.5.0
-high-scale-lib.version=1.1.1
 metrics-core.version=2.1.2
 guice.version=3.0
 guice-servlet.version=3.0

Modified: pig/branches/spark/ivy/pig-template.xml
URL: http://svn.apache.org/viewvc/pig/branches/spark/ivy/pig-template.xml?rev=1783988&r1=1783987&r2=1783988&view=diff
==============================================================================
--- pig/branches/spark/ivy/pig-template.xml (original)
+++ pig/branches/spark/ivy/pig-template.xml Wed Feb 22 09:43:41 2017
@@ -46,7 +46,7 @@
     <dependency>
       <groupId>commons-net</groupId>
       <artifactId>commons-net</artifactId>
-      <version>1.4.1</version>
+      <version>3.1</version>
     </dependency>
     <dependency>
       <groupId>org.mortbay.jetty</groupId>
@@ -59,16 +59,6 @@
       <version>6.1.26</version>
     </dependency>
     <dependency>
-      <groupId>tomcat</groupId>
-      <artifactId>jasper-runtime</artifactId>
-      <version>5.5.12</version>
-    </dependency>
-    <dependency>
-      <groupId>tomcat</groupId>
-      <artifactId>jasper-compiler</artifactId>
-      <version>5.5.12</version>
-    </dependency>
-    <dependency>
       <groupId>org.mortbay.jetty</groupId>
       <artifactId>jsp-api-2.1</artifactId>
       <version>6.1.14</version>
@@ -86,27 +76,17 @@
     <dependency>
       <groupId>net.java.dev.jets3t</groupId>
       <artifactId>jets3t</artifactId>
-      <version>0.7.1</version>
+      <version>0.9.0</version>
     </dependency>
     <dependency>
-      <groupId>commons-net</groupId>
-      <artifactId>commons-net</artifactId>
-      <version>1.4.1</version>
-    </dependency>
-    <dependency>
-      <groupId>org.mortbay.jetty</groupId>
-      <artifactId>servlet-api-2.5</artifactId>
-      <version>6.1.14</version>
-    </dependency>
-    <dependency>
-      <groupId>net.sf.kosmosfs</groupId>
-      <artifactId>kfs</artifactId>
-      <version>0.3</version>
+      <groupId>javax.servlet</groupId>
+      <artifactId>servlet-api</artifactId>
+      <version>2.5</version>
     </dependency>
     <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
-      <version>4.8.1</version>
+      <version>4.11</version>
     </dependency>
     <dependency>
       <groupId>hsqldb</groupId>
@@ -114,14 +94,9 @@
       <version>1.8.0.10</version>
     </dependency>
     <dependency>
-      <groupId>oro</groupId>
-      <artifactId>oro</artifactId>
-      <version>2.0.8</version>
-    </dependency>
-    <dependency>
       <groupId>jline</groupId>
       <artifactId>jline</artifactId>
-      <version>1.0</version>
+      <version>2.11</version>
     </dependency>
     <dependency>
       <groupId>org.antlr</groupId>
@@ -146,22 +121,22 @@
     <dependency>
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-mapper-asl</artifactId>
-      <version>1.8.8</version>
+      <version>1.9.13</version>
     </dependency>
     <dependency>
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-core-asl</artifactId>
-      <version>1.8.8</version>
+      <version>1.9.13</version>
     </dependency>
     <dependency>
       <groupId>joda-time</groupId>
       <artifactId>joda-time</artifactId>
-      <version>2.1</version>
+      <version>2.9.3</version>
     </dependency>
     <dependency>
       <groupId>org.apache.avro</groupId>
       <artifactId>avro</artifactId>
-      <version>1.7.4</version>
+      <version>1.7.5</version>
       <exclusions>
         <exclusion>
           <!-- Don't pull in Avro's (later) version of Jetty.-->
@@ -183,7 +158,7 @@
     <dependency>
       <groupId>org.codehaus.groovy</groupId>
       <artifactId>groovy-all</artifactId>
-      <version>1.8.6</version>
+      <version>2.4.5</version>
     </dependency>
   </dependencies>
 </project>

Modified: pig/branches/spark/ivy/piggybank-template.xml
URL: http://svn.apache.org/viewvc/pig/branches/spark/ivy/piggybank-template.xml?rev=1783988&r1=1783987&r2=1783988&view=diff
==============================================================================
--- pig/branches/spark/ivy/piggybank-template.xml (original)
+++ pig/branches/spark/ivy/piggybank-template.xml Wed Feb 22 09:43:41 2017
@@ -51,7 +51,7 @@
     <dependency>
       <groupId>joda-time</groupId>
       <artifactId>joda-time</artifactId>
-      <version>2.1</version>
+      <version>2.9.3</version>
     </dependency>
     <dependency>
       <groupId>com.googlecode.json-simple</groupId>
@@ -61,7 +61,7 @@
     <dependency>
       <groupId>org.apache.avro</groupId>
       <artifactId>avro</artifactId>
-      <version>1.7.4</version>
+      <version>1.7.5</version>
       <exclusions>
         <exclusion>
           <!-- Don't pull in Avro's (later) version of Jetty.-->
@@ -78,7 +78,7 @@
     <dependency>
       <groupId>nl.basjes.parse.httpdlog</groupId>
       <artifactId>httpdlog-pigloader</artifactId>
-      <version>2.2</version>
+      <version>2.4</version>
     </dependency>
     <dependency>
       <groupId>org.apache.pig</groupId>

Added: pig/branches/spark/shims/src/hadoop2/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/pig/branches/spark/shims/src/hadoop2/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java?rev=1783988&view=auto
==============================================================================
--- pig/branches/spark/shims/src/hadoop2/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java (added)
+++ pig/branches/spark/shims/src/hadoop2/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java Wed Feb 22 09:43:41 2017
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.pig.backend.hadoop.executionengine.shims;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.ContextFactory;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.task.JobContextImpl;
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+
+public class HadoopShims {
+
+    private static Log LOG = LogFactory.getLog(HadoopShims.class);
+
+    static public JobContext cloneJobContext(JobContext original) throws IOException, InterruptedException {
+        JobContext newContext = ContextFactory.cloneContext(original,
+                new JobConf(original.getConfiguration()));
+        return newContext;
+    }
+
+    static public TaskAttemptContext createTaskAttemptContext(Configuration conf,
+            TaskAttemptID taskId) {
+        if (conf instanceof JobConf) {
+            return new TaskAttemptContextImpl(new JobConf(conf), taskId);
+        } else {
+            return new TaskAttemptContextImpl(conf, taskId);
+        }
+    }
+
+    static public JobContext createJobContext(Configuration conf,
+            JobID jobId) {
+        if (conf instanceof JobConf) {
+            return new JobContextImpl(new JobConf(conf), jobId);
+        } else {
+            return new JobContextImpl(conf, jobId);
+        }
+    }
+
+    static public boolean isMap(TaskAttemptID taskAttemptID) {
+        TaskType type = taskAttemptID.getTaskType();
+        if (type==TaskType.MAP)
+            return true;
+
+        return false;
+    }
+
+    static public TaskAttemptID getNewTaskAttemptID() {
+        TaskAttemptID taskAttemptID = new TaskAttemptID("", 1, TaskType.MAP,
+                1, 1);
+        return taskAttemptID;
+    }
+
+    static public TaskAttemptID createTaskAttemptID(String jtIdentifier, int jobId, boolean isMap,
+            int taskId, int id) {
+        if (isMap) {
+            return new TaskAttemptID(jtIdentifier, jobId, TaskType.MAP, taskId, id);
+        } else {
+            return new TaskAttemptID(jtIdentifier, jobId, TaskType.REDUCE, taskId, id);
+        }
+    }
+
+    /**
+     * Returns whether the give path has a FileSystem implementation.
+     *
+     * @param path path
+     * @param conf configuration
+     * @return true if the give path's scheme has a FileSystem implementation,
+     *         false otherwise
+     */
+    public static boolean hasFileSystemImpl(Path path, Configuration conf) {
+        String scheme = path.toUri().getScheme();
+        if (scheme != null) {
+            // Hadoop 0.23
+            if (conf.get("fs.file.impl") != null) {
+                String fsImpl = conf.get("fs." + scheme + ".impl");
+                if (fsImpl == null) {
+                    return false;
+                }
+            } else {
+                try {
+                    Object fs = FileSystem.getFileSystemClass(scheme,conf);
+                    return fs == null ? false : true;
+                } catch (Exception e) {
+                    return false;
+                }
+            }
+        }
+        return true;
+    }
+}

Modified: pig/branches/spark/src/META-INF/services/org.apache.pig.ExecType
URL: http://svn.apache.org/viewvc/pig/branches/spark/src/META-INF/services/org.apache.pig.ExecType?rev=1783988&r1=1783987&r2=1783988&view=diff
==============================================================================
--- pig/branches/spark/src/META-INF/services/org.apache.pig.ExecType (original)
+++ pig/branches/spark/src/META-INF/services/org.apache.pig.ExecType Wed Feb 22 09:43:41 2017
@@ -13,7 +13,7 @@
 #
 org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.LocalExecType
 org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRExecType
-#org.apache.pig.backend.hadoop.executionengine.tez.TezLocalExecType
-#org.apache.pig.backend.hadoop.executionengine.tez.TezExecType
+org.apache.pig.backend.hadoop.executionengine.tez.TezLocalExecType
+org.apache.pig.backend.hadoop.executionengine.tez.TezExecType
 org.apache.pig.backend.hadoop.executionengine.spark.SparkExecType
 org.apache.pig.backend.hadoop.executionengine.spark.SparkLocalExecType