You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/04/01 09:01:00 UTC

svn commit: r1463091 [16/16] - in /hive/branches/HIVE-4115: ./ bin/ bin/ext/ common/src/gen/ common/src/java/org/apache/hadoop/hive/conf/ conf/ contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ contrib/src/java/org/apache/hadoop/hive/contrib/serd...

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java Mon Apr  1 07:00:00 2013
@@ -17,24 +17,24 @@
  */
 package org.apache.hadoop.hive.serde2.avro;
 
+import java.util.List;
+import java.util.Properties;
+
 import org.apache.avro.Schema;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.io.Writable;
 
-import java.util.List;
-import java.util.Properties;
-
 /**
  * Read or write Avro data from Hive.
  */
-public class AvroSerDe implements SerDe {
+public class AvroSerDe extends AbstractSerDe {
   private static final Log LOG = LogFactory.getLog(AvroSerDe.class);
   private ObjectInspector oi;
   private List<String> columnNames;
@@ -48,8 +48,9 @@ public class AvroSerDe implements SerDe 
   @Override
   public void initialize(Configuration configuration, Properties properties) throws SerDeException {
     // Reset member variables so we don't get in a half-constructed state
-    if(schema != null)
+    if(schema != null) {
       LOG.info("Resetting already initialized AvroSerDe");
+    }
 
     schema = null;
     oi = null;
@@ -80,13 +81,17 @@ public class AvroSerDe implements SerDe 
 
   @Override
   public Writable serialize(Object o, ObjectInspector objectInspector) throws SerDeException {
-    if(badSchema) throw new BadSchemaException();
+    if(badSchema) {
+      throw new BadSchemaException();
+    }
     return getSerializer().serialize(o, objectInspector, columnNames, columnTypes, schema);
   }
 
   @Override
   public Object deserialize(Writable writable) throws SerDeException {
-    if(badSchema) throw new BadSchemaException();
+    if(badSchema) {
+      throw new BadSchemaException();
+    }
     return getDeserializer().deserialize(columnNames, columnTypes, writable, schema);
   }
 
@@ -102,13 +107,17 @@ public class AvroSerDe implements SerDe 
   }
 
   private AvroDeserializer getDeserializer() {
-    if(avroDeserializer == null) avroDeserializer = new AvroDeserializer();
+    if(avroDeserializer == null) {
+      avroDeserializer = new AvroDeserializer();
+    }
 
     return avroDeserializer;
   }
 
   private AvroSerializer getSerializer() {
-    if(avroSerializer == null) avroSerializer = new AvroSerializer();
+    if(avroSerializer == null) {
+      avroSerializer = new AvroSerializer();
+    }
 
     return avroSerializer;
   }

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java Mon Apr  1 07:00:00 2013
@@ -33,7 +33,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
@@ -99,7 +99,7 @@ import org.apache.hadoop.io.Writable;
  * fields in the same top-level field will have the same sort order.
  *
  */
-public class BinarySortableSerDe implements SerDe {
+public class BinarySortableSerDe extends AbstractSerDe {
 
   public static final Log LOG = LogFactory.getLog(BinarySortableSerDe.class
       .getName());
@@ -111,7 +111,7 @@ public class BinarySortableSerDe impleme
   StructObjectInspector rowObjectInspector;
 
   boolean[] columnSortOrderIsDesc;
-  
+
   private static byte[] decimalBuffer = null;
   private static Charset decimalCharSet = Charset.forName("US-ASCII");
 
@@ -186,7 +186,7 @@ public class BinarySortableSerDe impleme
 
   static Object deserialize(InputByteBuffer buffer, TypeInfo type,
       boolean invert, Object reuse) throws IOException {
-      
+
     // Is this field a null?
     byte isNull = buffer.read(invert);
     if (isNull == 0) {
@@ -378,33 +378,33 @@ public class BinarySortableSerDe impleme
         }
         t.setBinarySortable(bytes, 0);
         return t;
-        
+
       case DECIMAL: {
         // See serialization of decimal for explanation (below)
 
         BigDecimalWritable bdw = (reuse == null ? new BigDecimalWritable() :
           (BigDecimalWritable) reuse);
-        
+
         int b = buffer.read(invert) - 1;
         assert (b == 1 || b == -1 || b == 0);
         boolean positive = b != -1;
-        
+
         int factor = buffer.read(invert) ^ 0x80;
         for (int i = 0; i < 3; i++) {
           factor = (factor << 8) + (buffer.read(invert) & 0xff);
         }
-        
+
         if (!positive) {
           factor = -factor;
         }
-        
+
         int start = buffer.tell();
         int length = 0;
-        
+
         do {
           b = buffer.read(positive ? invert : !invert);
           assert(b != 1);
-          
+
           if (b == 0) {
             // end of digits
             break;
@@ -412,7 +412,7 @@ public class BinarySortableSerDe impleme
 
           length++;
         } while (true);
-        
+
         if(decimalBuffer == null || decimalBuffer.length < length) {
           decimalBuffer = new byte[length];
         }
@@ -428,11 +428,11 @@ public class BinarySortableSerDe impleme
         String digits = new String(decimalBuffer, 0, length, decimalCharSet);
         BigInteger bi = new BigInteger(digits);
         BigDecimal bd = new BigDecimal(bi).scaleByPowerOfTen(factor-length);
-        
+
         if (!positive) {
           bd = bd.negate();
         }
-        
+
         bdw.set(bd);
         return bdw;
       }
@@ -443,7 +443,7 @@ public class BinarySortableSerDe impleme
       }
       }
     }
-    
+
     case LIST: {
       ListTypeInfo ltype = (ListTypeInfo) type;
       TypeInfo etype = ltype.getListElementTypeInfo();
@@ -690,32 +690,32 @@ public class BinarySortableSerDe impleme
 
         BigDecimalObjectInspector boi = (BigDecimalObjectInspector) poi;
         BigDecimal dec = boi.getPrimitiveJavaObject(o).stripTrailingZeros();
-        
+
         // get the sign of the big decimal
         int sign = dec.compareTo(BigDecimal.ZERO);
-        
+
         // we'll encode the absolute value (sign is separate)
         dec = dec.abs();
-        
+
         // get the scale factor to turn big decimal into a decimal < 1
         int factor = dec.precision() - dec.scale();
         factor = sign == 1 ? factor : -factor;
-        
+
         // convert the absolute big decimal to string
         dec.scaleByPowerOfTen(Math.abs(dec.scale()));
         String digits = dec.unscaledValue().toString();
-        
+
         // finally write out the pieces (sign, scale, digits)
         buffer.write((byte) ( sign + 1), invert);
         buffer.write((byte) ((factor >> 24) ^ 0x80), invert);
         buffer.write((byte) ( factor >> 16), invert);
         buffer.write((byte) ( factor >> 8), invert);
         buffer.write((byte)   factor, invert);
-        serializeBytes(buffer, digits.getBytes(decimalCharSet), 
+        serializeBytes(buffer, digits.getBytes(decimalCharSet),
             digits.length(), sign == -1 ? !invert : invert);
         return;
       }
-        
+
       default: {
         throw new RuntimeException("Unrecognized type: "
             + poi.getPrimitiveCategory());

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDeBase.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDeBase.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDeBase.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDeBase.java Mon Apr  1 07:00:00 2013
@@ -18,14 +18,14 @@
 
 package org.apache.hadoop.hive.serde2.columnar;
 
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.Writable;
 
-public abstract class ColumnarSerDeBase implements SerDe {
+public abstract class ColumnarSerDeBase extends AbstractSerDe {
 
   // The object for storing row data
   ColumnarStructBase cachedLazyStruct;

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java Mon Apr  1 07:00:00 2013
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.serde2.dynamic_type;
 
 import java.io.ByteArrayInputStream;
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
@@ -28,8 +27,8 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -50,7 +49,7 @@ import org.apache.thrift.transport.TIOSt
  * DynamicSerDe.
  *
  */
-public class DynamicSerDe implements SerDe, Serializable {
+public class DynamicSerDe extends AbstractSerDe {
 
   public static final Log LOG = LogFactory.getLog(DynamicSerDe.class.getName());
 

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyArray.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyArray.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyArray.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyArray.java Mon Apr  1 07:00:00 2013
@@ -187,7 +187,7 @@ public class LazyArray extends LazyNonPr
         && 0 == LazyUtils
         .compare(bytes.getData(), startPosition[index], elementLength,
         nullSequence.getBytes(), 0, nullSequence.getLength())) {
-      return null;
+      return arrayElements[index] = null;
     }
     arrayElements[index] = LazyFactory
         .createLazyObject(oi.getListElementObjectInspector());

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java Mon Apr  1 07:00:00 2013
@@ -28,6 +28,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -35,11 +36,11 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -59,7 +60,7 @@ import org.apache.hadoop.io.Writable;
  * Also LazySimpleSerDe outputs typed columns instead of treating all columns as
  * String like MetadataTypedColumnsetSerDe.
  */
-public class LazySimpleSerDe implements SerDe {
+public class LazySimpleSerDe extends AbstractSerDe {
 
   public static final Log LOG = LogFactory.getLog(LazySimpleSerDe.class
       .getName());

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java Mon Apr  1 07:00:00 2013
@@ -66,12 +66,17 @@ public class LazyTimestamp extends LazyP
       s = "";
     }
 
-    Timestamp t;
+    Timestamp t = null;
     if (s.compareTo("NULL") == 0) {
-      t = null;
+      isNull = true;
       logExceptionMessage(bytes, start, length, "TIMESTAMP");
     } else {
-      t = Timestamp.valueOf(s);
+      try {
+        t = Timestamp.valueOf(s);
+      } catch (IllegalArgumentException e) {
+        isNull = true;
+        logExceptionMessage(bytes, start, length, "TIMESTAMP");
+      }
     }
     data.set(t);
   }

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyObjectInspectorFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyObjectInspectorFactory.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyObjectInspectorFactory.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyObjectInspectorFactory.java Mon Apr  1 07:00:00 2013
@@ -19,8 +19,8 @@
 package org.apache.hadoop.hive.serde2.lazy.objectinspector;
 
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.Text;
@@ -28,18 +28,18 @@ import org.apache.hadoop.io.Text;
 /**
  * ObjectInspectorFactory is the primary way to create new ObjectInspector
  * instances.
- * 
+ *
  * SerDe classes should call the static functions in this library to create an
  * ObjectInspector to return to the caller of SerDe2.getObjectInspector().
- * 
+ *
  * The reason of having caches here is that ObjectInspectors do not have an
  * internal state - so ObjectInspectors with the same construction parameters
  * should result in exactly the same ObjectInspector.
  */
 public final class LazyObjectInspectorFactory {
 
-  static HashMap<ArrayList<Object>, LazySimpleStructObjectInspector> cachedLazySimpleStructObjectInspector =
-      new HashMap<ArrayList<Object>, LazySimpleStructObjectInspector>();
+  static ConcurrentHashMap<ArrayList<Object>, LazySimpleStructObjectInspector> cachedLazySimpleStructObjectInspector =
+      new ConcurrentHashMap<ArrayList<Object>, LazySimpleStructObjectInspector>();
 
   public static LazySimpleStructObjectInspector getLazySimpleStructObjectInspector(
       List<String> structFieldNames,
@@ -78,7 +78,8 @@ public final class LazyObjectInspectorFa
     return result;
   }
 
-  static HashMap<ArrayList<Object>, LazyListObjectInspector> cachedLazySimpleListObjectInspector = new HashMap<ArrayList<Object>, LazyListObjectInspector>();
+  static ConcurrentHashMap<ArrayList<Object>, LazyListObjectInspector> cachedLazySimpleListObjectInspector =
+      new ConcurrentHashMap<ArrayList<Object>, LazyListObjectInspector>();
 
   public static LazyListObjectInspector getLazySimpleListObjectInspector(
       ObjectInspector listElementObjectInspector, byte separator,
@@ -99,7 +100,8 @@ public final class LazyObjectInspectorFa
     return result;
   }
 
-  static HashMap<ArrayList<Object>, LazyMapObjectInspector> cachedLazySimpleMapObjectInspector = new HashMap<ArrayList<Object>, LazyMapObjectInspector>();
+  static ConcurrentHashMap<ArrayList<Object>, LazyMapObjectInspector> cachedLazySimpleMapObjectInspector =
+      new ConcurrentHashMap<ArrayList<Object>, LazyMapObjectInspector>();
 
   public static LazyMapObjectInspector getLazySimpleMapObjectInspector(
       ObjectInspector mapKeyObjectInspector,
@@ -125,9 +127,9 @@ public final class LazyObjectInspectorFa
     return result;
   }
 
-  static HashMap<List<Object>, LazyUnionObjectInspector>
+  static ConcurrentHashMap<List<Object>, LazyUnionObjectInspector>
     cachedLazyUnionObjectInspector =
-      new HashMap<List<Object>, LazyUnionObjectInspector>();
+      new ConcurrentHashMap<List<Object>, LazyUnionObjectInspector>();
 
   public static LazyUnionObjectInspector getLazyUnionObjectInspector(
       List<ObjectInspector> ois, byte separator, Text nullSequence,

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java Mon Apr  1 07:00:00 2013
@@ -28,9 +28,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
-import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.io.BigDecimalWritable;
@@ -67,7 +67,7 @@ import org.apache.hadoop.io.Writable;
  * deserialized until required. Binary means a field is serialized in binary
  * compact format.
  */
-public class LazyBinarySerDe implements SerDe {
+public class LazyBinarySerDe extends AbstractSerDe {
 
   public static final Log LOG = LogFactory.getLog(LazyBinarySerDe.class
       .getName());

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/objectinspector/LazyBinaryObjectInspectorFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/objectinspector/LazyBinaryObjectInspectorFactory.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/objectinspector/LazyBinaryObjectInspectorFactory.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/objectinspector/LazyBinaryObjectInspectorFactory.java Mon Apr  1 07:00:00 2013
@@ -18,18 +18,18 @@
 package org.apache.hadoop.hive.serde2.lazybinary.objectinspector;
 
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
 /**
  * ObjectInspectorFactory is the primary way to create new ObjectInspector
  * instances.
- * 
+ *
  * SerDe classes should call the static functions in this library to create an
  * ObjectInspector to return to the caller of SerDe2.getObjectInspector().
- * 
+ *
  * The reason of having caches here is that ObjectInspectors do not have an
  * internal state - so ObjectInspectors with the same construction parameters
  * should result in exactly the same ObjectInspector.
@@ -37,7 +37,8 @@ import org.apache.hadoop.hive.serde2.obj
 
 public final class LazyBinaryObjectInspectorFactory {
 
-  static HashMap<ArrayList<Object>, LazyBinaryStructObjectInspector> cachedLazyBinaryStructObjectInspector = new HashMap<ArrayList<Object>, LazyBinaryStructObjectInspector>();
+  static ConcurrentHashMap<ArrayList<Object>, LazyBinaryStructObjectInspector> cachedLazyBinaryStructObjectInspector =
+      new ConcurrentHashMap<ArrayList<Object>, LazyBinaryStructObjectInspector>();
 
   public static LazyBinaryStructObjectInspector getLazyBinaryStructObjectInspector(
       List<String> structFieldNames,
@@ -65,7 +66,8 @@ public final class LazyBinaryObjectInspe
     return result;
   }
 
-  static HashMap<ArrayList<Object>, LazyBinaryListObjectInspector> cachedLazyBinaryListObjectInspector = new HashMap<ArrayList<Object>, LazyBinaryListObjectInspector>();
+  static ConcurrentHashMap<ArrayList<Object>, LazyBinaryListObjectInspector> cachedLazyBinaryListObjectInspector =
+      new ConcurrentHashMap<ArrayList<Object>, LazyBinaryListObjectInspector>();
 
   public static LazyBinaryListObjectInspector getLazyBinaryListObjectInspector(
       ObjectInspector listElementObjectInspector) {
@@ -80,7 +82,8 @@ public final class LazyBinaryObjectInspe
     return result;
   }
 
-  static HashMap<ArrayList<Object>, LazyBinaryMapObjectInspector> cachedLazyBinaryMapObjectInspector = new HashMap<ArrayList<Object>, LazyBinaryMapObjectInspector>();
+  static ConcurrentHashMap<ArrayList<Object>, LazyBinaryMapObjectInspector> cachedLazyBinaryMapObjectInspector =
+      new ConcurrentHashMap<ArrayList<Object>, LazyBinaryMapObjectInspector>();
 
   public static LazyBinaryMapObjectInspector getLazyBinaryMapObjectInspector(
       ObjectInspector mapKeyObjectInspector,

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorFactory.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorFactory.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorFactory.java Mon Apr  1 07:00:00 2013
@@ -24,9 +24,9 @@ import java.lang.reflect.ParameterizedTy
 import java.lang.reflect.Type;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
@@ -34,10 +34,10 @@ import org.apache.hadoop.hive.serde2.obj
 /**
  * ObjectInspectorFactory is the primary way to create new ObjectInspector
  * instances.
- * 
+ *
  * SerDe classes should call the static functions in this library to create an
  * ObjectInspector to return to the caller of SerDe2.getObjectInspector().
- * 
+ *
  * The reason of having caches here is that ObjectInspector is because
  * ObjectInspectors do not have an internal state - so ObjectInspectors with the
  * same construction parameters should result in exactly the same
@@ -60,7 +60,7 @@ public final class ObjectInspectorFactor
     JAVA, THRIFT, PROTOCOL_BUFFERS
   };
 
-  private static HashMap<Type, ObjectInspector> objectInspectorCache = new HashMap<Type, ObjectInspector>();
+  private static ConcurrentHashMap<Type, ObjectInspector> objectInspectorCache = new ConcurrentHashMap<Type, ObjectInspector>();
 
   public static ObjectInspector getReflectionObjectInspector(Type t,
       ObjectInspectorOptions options) {
@@ -197,7 +197,8 @@ public final class ObjectInspectorFactor
     return oi;
   }
 
-  static HashMap<ObjectInspector, StandardListObjectInspector> cachedStandardListObjectInspector = new HashMap<ObjectInspector, StandardListObjectInspector>();
+  static ConcurrentHashMap<ObjectInspector, StandardListObjectInspector> cachedStandardListObjectInspector =
+      new ConcurrentHashMap<ObjectInspector, StandardListObjectInspector>();
 
   public static StandardListObjectInspector getStandardListObjectInspector(
       ObjectInspector listElementObjectInspector) {
@@ -216,7 +217,8 @@ public final class ObjectInspectorFactor
   }
 
 
-  static HashMap<List<ObjectInspector>, StandardMapObjectInspector> cachedStandardMapObjectInspector = new HashMap<List<ObjectInspector>, StandardMapObjectInspector>();
+  static ConcurrentHashMap<List<ObjectInspector>, StandardMapObjectInspector> cachedStandardMapObjectInspector =
+      new ConcurrentHashMap<List<ObjectInspector>, StandardMapObjectInspector>();
 
   public static StandardMapObjectInspector getStandardMapObjectInspector(
       ObjectInspector mapKeyObjectInspector,
@@ -242,9 +244,9 @@ public final class ObjectInspectorFactor
           mapValueObjectInspector, constantValue);
   }
 
-  static HashMap<List<ObjectInspector>, StandardUnionObjectInspector>
+  static ConcurrentHashMap<List<ObjectInspector>, StandardUnionObjectInspector>
     cachedStandardUnionObjectInspector =
-      new HashMap<List<ObjectInspector>, StandardUnionObjectInspector>();
+      new ConcurrentHashMap<List<ObjectInspector>, StandardUnionObjectInspector>();
 
   public static StandardUnionObjectInspector getStandardUnionObjectInspector(
       List<ObjectInspector> unionObjectInspectors) {
@@ -257,7 +259,8 @@ public final class ObjectInspectorFactor
     return result;
   }
 
-  static HashMap<ArrayList<List<?>>, StandardStructObjectInspector> cachedStandardStructObjectInspector = new HashMap<ArrayList<List<?>>, StandardStructObjectInspector>();
+  static ConcurrentHashMap<ArrayList<List<?>>, StandardStructObjectInspector> cachedStandardStructObjectInspector =
+      new ConcurrentHashMap<ArrayList<List<?>>, StandardStructObjectInspector>();
 
   public static StandardStructObjectInspector getStandardStructObjectInspector(
       List<String> structFieldNames,
@@ -283,7 +286,8 @@ public final class ObjectInspectorFactor
     return result;
   }
 
-  static HashMap<List<StructObjectInspector>, UnionStructObjectInspector> cachedUnionStructObjectInspector = new HashMap<List<StructObjectInspector>, UnionStructObjectInspector>();
+  static ConcurrentHashMap<List<StructObjectInspector>, UnionStructObjectInspector> cachedUnionStructObjectInspector =
+      new ConcurrentHashMap<List<StructObjectInspector>, UnionStructObjectInspector>();
 
   public static UnionStructObjectInspector getUnionStructObjectInspector(
       List<StructObjectInspector> structObjectInspectors) {
@@ -296,7 +300,8 @@ public final class ObjectInspectorFactor
     return result;
   }
 
-  static HashMap<ArrayList<Object>, ColumnarStructObjectInspector> cachedColumnarStructObjectInspector = new HashMap<ArrayList<Object>, ColumnarStructObjectInspector>();
+  static ConcurrentHashMap<ArrayList<Object>, ColumnarStructObjectInspector> cachedColumnarStructObjectInspector =
+      new ConcurrentHashMap<ArrayList<Object>, ColumnarStructObjectInspector>();
 
   public static ColumnarStructObjectInspector getColumnarStructObjectInspector(
       List<String> structFieldNames,

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBigDecimalObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBigDecimalObjectInspector.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBigDecimalObjectInspector.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBigDecimalObjectInspector.java Mon Apr  1 07:00:00 2013
@@ -32,7 +32,14 @@ public class JavaBigDecimalObjectInspect
 
   @Override
   public BigDecimalWritable getPrimitiveWritableObject(Object o) {
-    return o == null ? null : new BigDecimalWritable((BigDecimal) o);
+    if (o == null) {
+      return null;
+    }
+
+    if (o instanceof String) {
+      o = new BigDecimal((String)o);
+    }
+    return new BigDecimalWritable((BigDecimal) o);
   }
 
   @Override

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftDeserializer.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftDeserializer.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftDeserializer.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftDeserializer.java Mon Apr  1 07:00:00 2013
@@ -21,7 +21,7 @@ package org.apache.hadoop.hive.serde2.th
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.serde2.Deserializer;
+import org.apache.hadoop.hive.serde2.AbstractDeserializer;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -32,13 +32,14 @@ import org.apache.thrift.protocol.TProto
  * ThriftDeserializer.
  *
  */
-public class ThriftDeserializer implements Deserializer {
+public class ThriftDeserializer extends AbstractDeserializer {
 
   private ThriftByteStreamTypedSerDe tsd;
 
   public ThriftDeserializer() {
   }
 
+  @Override
   public void initialize(Configuration job, Properties tbl)
       throws SerDeException {
     try {
@@ -68,14 +69,17 @@ public class ThriftDeserializer implemen
     }
   }
 
+  @Override
   public Object deserialize(Writable field) throws SerDeException {
     return tsd.deserialize(field);
   }
 
+  @Override
   public ObjectInspector getObjectInspector() throws SerDeException {
     return tsd.getObjectInspector();
   }
 
+  @Override
   public SerDeStats getSerDeStats() {
     // no support for statistics
     return null;

Modified: hive/branches/HIVE-4115/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java (original)
+++ hive/branches/HIVE-4115/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java Mon Apr  1 07:00:00 2013
@@ -95,6 +95,20 @@ public class TestLazyArrayMapStruct exte
       assertNull((b.getListElementObject(5)));
       assertEquals(5, b.getList().size());
 
+      // -- HIVE-4149
+      b = (LazyArray) LazyFactory.createLazyObject(oi);
+
+      data = new byte[] {'a', '\t', '\\', 'N'};
+      TestLazyPrimitive.initLazyObject(b, data, 0, data.length);
+      assertEquals(new Text("a"), ((LazyString) b.getListElementObject(0)).getWritableObject());
+      assertNull(b.getListElementObject(1));
+
+      data = new byte[] {'\\', 'N', '\t', 'a'};
+      TestLazyPrimitive.initLazyObject(b, data, 0, data.length);
+      assertNull(b.getListElementObject(0));
+      assertNull(b.getListElementObject(0));  // twice (returns not cleaned cache)
+      assertEquals(new Text("a"), ((LazyString) b.getListElementObject(1)).getWritableObject());
+
     } catch (Throwable e) {
       e.printStackTrace();
       throw e;

Modified: hive/branches/HIVE-4115/shims/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/shims/ivy.xml?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/shims/ivy.xml (original)
+++ hive/branches/HIVE-4115/shims/ivy.xml Mon Apr  1 07:00:00 2013
@@ -91,6 +91,36 @@
       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
     </dependency>
 
+    <!-- jobclient tests dependency -->
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-jobclient" rev="${hadoop-0.23.version}"
+                conf="hadoop0.23.shim->default" transitive="false">
+      <artifact name="hadoop-mapreduce-client-jobclient" ext="jar" />
+      <artifact name="hadoop-mapreduce-client-jobclient" type="tests" ext="jar" m:classifier="tests"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-tests"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop0.23.shim->default">
+      <artifact name="hadoop-yarn-server-tests" type="tests" ext="jar" m:classifier="tests"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-app"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop0.23.shim->default">
+      <include type="jar"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-hs"
+                rev="${hadoop-0.23.version}"
+                conf="hadoop0.23.shim->default">
+      <include type="jar"/>
+      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+
     <!-- Hadoop 0.20 shim dependencies. Used for building 0.20 shims. -->
     <dependency org="com.google.guava" name="guava" rev="${guava-hadoop20.version}"
                 conf="hadoop0.20.shim->default" transitive="false"/>

Modified: hive/branches/HIVE-4115/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/branches/HIVE-4115/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Mon Apr  1 07:00:00 2013
@@ -52,6 +52,7 @@ import org.apache.hadoop.mapred.InputSpl
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobContext;
 import org.apache.hadoop.mapred.JobStatus;
+import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.OutputCommitter;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
@@ -100,6 +101,43 @@ public class Hadoop20Shims implements Ha
     // gone in 20+
   }
 
+
+  /**
+   * Returns a shim to wrap MiniMrCluster
+   */
+  public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers,
+                                     String nameNode, int numDir) throws IOException {
+    return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir);
+  }
+
+  /**
+   * Shim for MiniMrCluster
+   */
+  public class MiniMrShim implements HadoopShims.MiniMrShim {
+
+    private final MiniMRCluster mr;
+
+    public MiniMrShim(Configuration conf, int numberOfTaskTrackers,
+        String nameNode, int numDir) throws IOException {
+      this.mr = new MiniMRCluster(numberOfTaskTrackers, nameNode, numDir);
+    }
+
+    @Override
+    public int getJobTrackerPort() throws UnsupportedOperationException {
+      return mr.getJobTrackerPort();
+    }
+
+    @Override
+    public void shutdown() throws IOException {
+      mr.shutdown();
+    }
+
+    @Override
+    public void setupConfiguration(Configuration conf) {
+      setJobLauncherRpcAddress(conf, "localhost:" + mr.getJobTrackerPort());
+    }
+  }
+
   public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
       int numDataNodes,
       boolean format,

Modified: hive/branches/HIVE-4115/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/branches/HIVE-4115/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Mon Apr  1 07:00:00 2013
@@ -26,6 +26,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Trash;
 import org.apache.hadoop.hive.shims.HadoopShimsSecure;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TaskLogServlet;
@@ -119,4 +121,71 @@ public class Hadoop20SShims extends Hado
   public short getDefaultReplication(FileSystem fs, Path path) {
     return fs.getDefaultReplication();
   }
+
+  /**
+   * Returns a shim to wrap MiniMrCluster
+   */
+  public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers, 
+                                     String nameNode, int numDir) throws IOException {
+    return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir);
+  }
+
+  /**
+   * Shim for MiniMrCluster
+   */
+  public class MiniMrShim implements HadoopShims.MiniMrShim {
+
+    private final MiniMRCluster mr;
+
+    public MiniMrShim(Configuration conf, int numberOfTaskTrackers,
+        String nameNode, int numDir) throws IOException {
+      this.mr = new MiniMRCluster(numberOfTaskTrackers, nameNode, numDir);
+    }
+
+    @Override
+    public int getJobTrackerPort() throws UnsupportedOperationException {
+      return mr.getJobTrackerPort();
+    }
+
+    @Override
+    public void shutdown() throws IOException {
+      mr.shutdown();
+    }
+
+    @Override
+    public void setupConfiguration(Configuration conf) {
+      setJobLauncherRpcAddress(conf, "localhost:" + mr.getJobTrackerPort());
+    }
+  }
+
+  // Don't move this code to the parent class. There's a binary
+  // incompatibility between hadoop 1 and 2 wrt MiniDFSCluster and we
+  // need to have two different shim classes even though they are
+  // exactly the same.
+  public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
+      int numDataNodes,
+      boolean format,
+      String[] racks) throws IOException {
+    return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
+  }
+
+  /**
+   * MiniDFSShim.
+   *
+   */
+  public class MiniDFSShim implements HadoopShims.MiniDFSShim {
+    private final MiniDFSCluster cluster;
+
+    public MiniDFSShim(MiniDFSCluster cluster) {
+      this.cluster = cluster;
+    }
+
+    public FileSystem getFileSystem() throws IOException {
+      return cluster.getFileSystem();
+    }
+
+    public void shutdown() {
+      cluster.shutdown();
+    }
+  }
 }

Modified: hive/branches/HIVE-4115/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/branches/HIVE-4115/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Mon Apr  1 07:00:00 2013
@@ -21,13 +21,17 @@ import java.io.IOException;
 import java.lang.Integer;
 import java.net.MalformedURLException;
 import java.net.URL;
+import java.util.Map;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Trash;
 import org.apache.hadoop.hive.shims.HadoopShims.JobTrackerState;
 import org.apache.hadoop.hive.shims.HadoopShimsSecure;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.Job;
@@ -132,4 +136,91 @@ public class Hadoop23Shims extends Hadoo
           throws IOException {
     return Trash.moveToAppropriateTrash(fs, path, conf);
   }
+
+  /**
+   * Returns a shim to wrap MiniMrCluster
+   */
+  public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers, 
+                                     String nameNode, int numDir) throws IOException {
+    return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir);
+  }
+
+  /**
+   * Shim for MiniMrCluster
+   */
+  public class MiniMrShim implements HadoopShims.MiniMrShim {
+
+    private final MiniMRCluster mr;
+    private final Configuration conf;
+
+    public MiniMrShim(Configuration conf, int numberOfTaskTrackers, 
+                      String nameNode, int numDir) throws IOException {
+      this.conf = conf;
+
+      JobConf jConf = new JobConf(conf);
+      jConf.set("yarn.scheduler.capacity.root.queues", "default");
+      jConf.set("yarn.scheduler.capacity.root.default.capacity", "100");
+
+      mr = new MiniMRCluster(numberOfTaskTrackers, nameNode, numDir, null, null, jConf);
+    }
+
+    @Override
+    public int getJobTrackerPort() throws UnsupportedOperationException {
+      String address = conf.get("yarn.resourcemanager.address");
+      address = StringUtils.substringAfterLast(address, ":");
+
+      if (StringUtils.isBlank(address)) {
+        throw new IllegalArgumentException("Invalid YARN resource manager port.");
+      }
+      
+      return Integer.parseInt(address);
+    }
+
+    @Override
+    public void shutdown() throws IOException {
+      mr.shutdown();
+    }
+    
+    @Override
+    public void setupConfiguration(Configuration conf) {
+      JobConf jConf = mr.createJobConf();
+      for (Map.Entry<String, String> pair: jConf) {
+	//System.out.println("XXX Var: "+pair.getKey() +"="+pair.getValue());
+        //if (conf.get(pair.getKey()) == null) {
+          conf.set(pair.getKey(), pair.getValue());
+	  //}
+      }
+    }
+  }
+  
+  // Don't move this code to the parent class. There's a binary
+  // incompatibility between hadoop 1 and 2 wrt MiniDFSCluster and we
+  // need to have two different shim classes even though they are
+  // exactly the same.
+  public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
+      int numDataNodes,
+      boolean format,
+      String[] racks) throws IOException {
+    return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
+  }
+
+  /**
+   * MiniDFSShim.
+   *
+   */
+  public class MiniDFSShim implements HadoopShims.MiniDFSShim {
+    private final MiniDFSCluster cluster;
+
+    public MiniDFSShim(MiniDFSCluster cluster) {
+      this.cluster = cluster;
+    }
+
+    public FileSystem getFileSystem() throws IOException {
+      return cluster.getFileSystem();
+    }
+
+    public void shutdown() {
+      cluster.shutdown();
+    }
+  }
 }

Modified: hive/branches/HIVE-4115/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java (original)
+++ hive/branches/HIVE-4115/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java Mon Apr  1 07:00:00 2013
@@ -37,7 +37,6 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
 import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier;
 import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
@@ -106,33 +105,6 @@ public abstract class HadoopShimsSecure 
     // gone in 20+
   }
 
-  public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
-      int numDataNodes,
-      boolean format,
-      String[] racks) throws IOException {
-    return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
-  }
-
-  /**
-   * MiniDFSShim.
-   *
-   */
-  public class MiniDFSShim implements HadoopShims.MiniDFSShim {
-    private final MiniDFSCluster cluster;
-
-    public MiniDFSShim(MiniDFSCluster cluster) {
-      this.cluster = cluster;
-    }
-
-    public FileSystem getFileSystem() throws IOException {
-      return cluster.getFileSystem();
-    }
-
-    public void shutdown() {
-      cluster.shutdown();
-    }
-  }
-
   /**
    * We define this function here to make the code compatible between
    * hadoop 0.17 and hadoop 0.20.

Modified: hive/branches/HIVE-4115/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/branches/HIVE-4115/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java Mon Apr  1 07:00:00 2013
@@ -113,6 +113,21 @@ public interface HadoopShims {
   long getAccessTime(FileStatus file);
 
   /**
+   * Returns a shim to wrap MiniMrCluster
+   */
+  public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers,
+                                     String nameNode, int numDir) throws IOException;
+
+  /**
+   * Shim for MiniMrCluster
+   */
+  public interface MiniMrShim {
+    public int getJobTrackerPort() throws UnsupportedOperationException;
+    public void shutdown() throws IOException;
+    public void setupConfiguration(Configuration conf);
+  }
+
+  /**
    * Returns a shim to wrap MiniDFSCluster. This is necessary since this class
    * was moved from org.apache.hadoop.dfs to org.apache.hadoop.hdfs
    */

Modified: hive/branches/HIVE-4115/testutils/ptest/hivetest.py
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/testutils/ptest/hivetest.py?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/testutils/ptest/hivetest.py (original)
+++ hive/branches/HIVE-4115/testutils/ptest/hivetest.py Mon Apr  1 07:00:00 2013
@@ -342,6 +342,8 @@ def run_other_tests():
               'sed -e "s:[^/]*/::g"',
               'grep -v TestSerDe.class',
               'grep -v TestHiveMetaStore.class',
+              'grep -v TestBeeLineDriver.class',
+              'grep -v TestHiveServer2Concurrency.class',
               'grep -v TestCliDriver.class',
               'grep -v TestNegativeCliDriver.class',
               'grep -v ".*\$.*\.class"',
@@ -354,6 +356,8 @@ def run_other_tests():
               'sed -e "s:[^/]*/::g"',
               'grep -v TestSerDe.class',
               'grep -v TestHiveMetaStore.class',
+              'grep -v TestBeeLineDriver.class',
+              'grep -v TestHiveServer2Concurrency.class',
               'grep -v TestCliDriver.class',
               'grep -v TestNegativeCliDriver.class',
               'grep -v ".*\$.*\.class"',