You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mm...@apache.org on 2018/04/22 04:58:14 UTC

hive git commit: HIVE-19167: Map data type doesn't keep the order of the key/values pairs as read (Part 2, The Sequel or SQL) (Matt McCline, reviewed by Deepak Jaiswal)

Repository: hive
Updated Branches:
  refs/heads/master 9fddd6d0c -> 84481cf95


HIVE-19167: Map data type doesn't keep the order of the key/values pairs as read (Part 2, The Sequel or SQL) (Matt McCline, reviewed by Deepak Jaiswal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/84481cf9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/84481cf9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/84481cf9

Branch: refs/heads/master
Commit: 84481cf95693f8f6c83674e0494dba30e53ae945
Parents: 9fddd6d
Author: Matt McCline <mm...@hortonworks.com>
Authored: Sat Apr 21 23:58:07 2018 -0500
Committer: Matt McCline <mm...@hortonworks.com>
Committed: Sat Apr 21 23:58:07 2018 -0500

----------------------------------------------------------------------
 .../java/org/apache/hive/beeline/BeeLine.java   |  3 +-
 .../hive/hcatalog/data/HCatRecordSerDe.java     |  4 +--
 .../apache/hive/hcatalog/data/ReaderWriter.java |  4 +--
 .../hive/hcatalog/pig/HCatBaseStorer.java       |  3 +-
 .../hadoop/hive/ql/io/BatchToRowReader.java     | 15 ++++-----
 .../apache/hadoop/hive/ql/io/orc/OrcStruct.java |  4 +--
 .../hadoop/hive/ql/io/orc/RecordReaderImpl.java | 19 ++++++------
 .../serde/AbstractParquetMapInspector.java      |  9 +++---
 .../hive/ql/parse/TypeCheckProcFactory.java     |  2 +-
 .../exec/vector/mapjoin/fast/VerifyFastRow.java |  6 ++--
 .../clientpositive/llap/orc_create.q.out        |  4 +--
 .../llap/udaf_collect_set_2.q.out               | 32 ++++++++++----------
 .../clientpositive/orc_int_type_promotion.q.out |  6 ++--
 .../orc_nested_column_pruning.q.out             |  2 +-
 .../results/clientpositive/udf_sort_array.q.out |  2 +-
 .../hadoop/hive/serde2/avro/AvroSerializer.java |  4 +--
 .../binarysortable/BinarySortableSerDe.java     |  8 ++---
 .../dynamic_type/DynamicSerDeTypeMap.java       | 10 +++---
 .../objectinspector/ObjectInspectorUtils.java   |  4 +--
 .../StandardMapObjectInspector.java             | 10 +++---
 20 files changed, 77 insertions(+), 74 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/beeline/src/java/org/apache/hive/beeline/BeeLine.java
----------------------------------------------------------------------
diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
index bdb65b1..4ae2e3f 100644
--- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java
+++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
@@ -60,6 +60,7 @@ import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
+import java.util.LinkedHashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.ListIterator;
@@ -1892,7 +1893,7 @@ public class BeeLine implements Closeable {
 
 
   static Map<Object, Object> map(Object[] obs) {
-    Map<Object, Object> m = new HashMap<Object, Object>();
+    Map<Object, Object> m = new LinkedHashMap<Object, Object>();
     for (int i = 0; i < obs.length - 1; i += 2) {
       m.put(obs[i], obs[i + 1]);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
index 7ac0be6..0b16b83 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
@@ -23,7 +23,7 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
@@ -219,7 +219,7 @@ public class HCatRecordSerDe extends AbstractSerDe {
   private static Map<?, ?> serializeMap(Object f, MapObjectInspector moi) throws SerDeException {
     ObjectInspector koi = moi.getMapKeyObjectInspector();
     ObjectInspector voi = moi.getMapValueObjectInspector();
-    Map<Object, Object> m = new HashMap<Object, Object>();
+    Map<Object, Object> m = new LinkedHashMap<Object, Object>();
 
     Map<?, ?> readMap = moi.getMap(f);
     if (readMap == null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
index d2954e0..cb1c459 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
@@ -24,7 +24,7 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.sql.Date;
 import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
@@ -95,7 +95,7 @@ public abstract class ReaderWriter {
 
     case DataType.MAP:
       int size = in.readInt();
-      Map<Object, Object> m = new HashMap<Object, Object>(size);
+      Map<Object, Object> m = new LinkedHashMap<Object, Object>(size);
       for (int i = 0; i < size; i++) {
         m.put(readDatum(in), readDatum(in));
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
index 9fca565..ec620d2 100644
--- a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
+++ b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
@@ -27,6 +27,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Iterator;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -353,7 +354,7 @@ abstract class HCatBaseStorer extends StoreFunc implements StoreMetadata {
         return bagContents;
       case MAP:
         Map<?, ?> pigMap = (Map<?, ?>) pigObj;
-        Map<Object, Object> typeMap = new HashMap<Object, Object>();
+        Map<Object, Object> typeMap = new LinkedHashMap<Object, Object>();
         for (Entry<?, ?> entry : pigMap.entrySet()) {
           // the value has a schema and not a FieldSchema
           typeMap.put(

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
index 5f7f6e6..2b005c4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
@@ -27,11 +27,12 @@ import java.util.Arrays;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Map;
+
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
@@ -541,7 +542,7 @@ public abstract class BatchToRowReader<StructType, UnionType>
     }
   }
 
-  private HashMap<Object,Object> nextMap(
+  private Map<Object,Object> nextMap(
       ColumnVector vector, int row, MapTypeInfo schema, Object previous) {
     if (vector.isRepeating) {
       row = 0;
@@ -552,11 +553,11 @@ public abstract class BatchToRowReader<StructType, UnionType>
       int offset = (int) map.offsets[row];
       TypeInfo keyType = schema.getMapKeyTypeInfo();
       TypeInfo valueType = schema.getMapValueTypeInfo();
-      HashMap<Object,Object> result;
-      if (previous == null || previous.getClass() != HashMap.class) {
-        result = new HashMap<Object,Object>(length);
+      LinkedHashMap<Object,Object> result;
+      if (previous == null || previous.getClass() != LinkedHashMap.class) {
+        result = new LinkedHashMap<Object,Object>(length);
       } else {
-        result = (HashMap<Object,Object>) previous;
+        result = (LinkedHashMap<Object,Object>) previous;
         // I couldn't think of a good way to reuse the keys and value objects
         // without even more allocations, so take the easy and safe approach.
         result.clear();

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
index c81bcfe..797008d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
@@ -40,7 +40,7 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
@@ -379,7 +379,7 @@ final public class OrcStruct implements Writable {
 
     @Override
     public Object create() {
-      return new HashMap<Object,Object>();
+      return new LinkedHashMap<Object,Object>();
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
index 308495a..5b001a0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
@@ -19,8 +19,9 @@ package org.apache.hadoop.hive.ql.io.orc;
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
@@ -534,10 +535,10 @@ public class RecordReaderImpl extends org.apache.orc.impl.RecordReaderImpl
     }
   }
 
-  static HashMap<Object,Object> nextMap(ColumnVector vector,
-                                        int row,
-                                        TypeDescription schema,
-                                        Object previous) {
+  static Map<Object,Object> nextMap(ColumnVector vector,
+                                    int row,
+                                    TypeDescription schema,
+                                    Object previous) {
     if (vector.isRepeating) {
       row = 0;
     }
@@ -547,11 +548,11 @@ public class RecordReaderImpl extends org.apache.orc.impl.RecordReaderImpl
       int offset = (int) map.offsets[row];
       TypeDescription keyType = schema.getChildren().get(0);
       TypeDescription valueType = schema.getChildren().get(1);
-      HashMap<Object,Object> result;
-      if (previous == null || previous.getClass() != HashMap.class) {
-        result = new HashMap<Object,Object>(length);
+      LinkedHashMap<Object,Object> result;
+      if (previous == null || previous.getClass() != LinkedHashMap.class) {
+        result = new LinkedHashMap<Object,Object>(length);
       } else {
-        result = (HashMap<Object,Object>) previous;
+        result = (LinkedHashMap<Object,Object>) previous;
         // I couldn't think of a good way to reuse the keys and value objects
         // without even more allocations, so take the easy and safe approach.
         result.clear();

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java
index 3f508bd..dfcb33f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java
@@ -13,7 +13,6 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet.serde;
 
-import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.Map;
 
@@ -106,27 +105,27 @@ public abstract class AbstractParquetMapInspector implements SettableMapObjectIn
 
   @Override
   public Object create() {
-    Map<Object, Object> m = new HashMap<Object, Object>();
+    Map<Object, Object> m = new LinkedHashMap<Object, Object>();
     return m;
   }
 
   @Override
   public Object put(Object map, Object key, Object value) {
-    Map<Object, Object> m = (HashMap<Object, Object>) map;
+    Map<Object, Object> m = (Map<Object, Object>) map;
     m.put(key, value);
     return m;
   }
 
   @Override
   public Object remove(Object map, Object key) {
-    Map<Object, Object> m = (HashMap<Object, Object>) map;
+    Map<Object, Object> m = (Map<Object, Object>) map;
     m.remove(key);
     return m;
   }
 
   @Override
   public Object clear(Object map) {
-    Map<Object, Object> m = (HashMap<Object, Object>) map;
+    Map<Object, Object> m = (Map<Object, Object>) map;
     m.clear();
     return m;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
index 3e7b3a1..2506172 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
@@ -771,7 +771,7 @@ public class TypeCheckProcFactory {
     PrimitiveObjectInspector keyPoi = (PrimitiveObjectInspector)keyOI;
     PrimitiveObjectInspector valuePoi = (PrimitiveObjectInspector)valueOI;
     Map<?,?> values = (Map<?,?>)((ConstantObjectInspector) inspector).getWritableConstantValue();
-    Map<Object, Object> constant = new HashMap<Object, Object>();
+    Map<Object, Object> constant = new LinkedHashMap<Object, Object>();
     for (Map.Entry<?, ?> e : values.entrySet()) {
       constant.put(keyPoi.getPrimitiveJavaObject(e.getKey()), valuePoi.getPrimitiveJavaObject(e.getValue()));
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
index 4412425..f257363 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
@@ -23,7 +23,7 @@ import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
@@ -457,7 +457,7 @@ public class VerifyFastRow {
         MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
         TypeInfo keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo();
         TypeInfo valueTypeInfo = mapTypeInfo.getMapValueTypeInfo();
-        HashMap<Object, Object> hashMap = (HashMap<Object, Object>) object;
+        Map<Object, Object> hashMap = (Map<Object, Object>) object;
         serializeWrite.beginMap(hashMap);
         boolean isFirst = true;
         for (Map.Entry<Object, Object> entry : hashMap.entrySet()) {
@@ -630,7 +630,7 @@ public class VerifyFastRow {
         MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
         TypeInfo keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo();
         TypeInfo valueTypeInfo = mapTypeInfo.getMapValueTypeInfo();
-        HashMap<Object, Object> hashMap = new HashMap<Object, Object>();
+        Map<Object, Object> hashMap = new LinkedHashMap<Object, Object>();
         Object keyObj;
         Object valueObj;
         boolean isNull;

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/ql/src/test/results/clientpositive/llap/orc_create.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_create.q.out b/ql/src/test/results/clientpositive/llap/orc_create.q.out
index 44373da..72d94e5 100644
--- a/ql/src/test/results/clientpositive/llap/orc_create.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_create.q.out
@@ -391,7 +391,7 @@ POSTHOOK: query: SELECT * from orc_create_complex
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_create_complex
 #### A masked pattern was here ####
-line1	{"key13":"value13","key11":"value11","key12":"value12"}	["a","b","c"]	{"A":"one","B":"two"}
+line1	{"key11":"value11","key12":"value12","key13":"value13"}	["a","b","c"]	{"A":"one","B":"two"}
 line2	{"key21":"value21","key22":"value22","key23":"value23"}	["d","e","f"]	{"A":"three","B":"four"}
 line3	{"key31":"value31","key32":"value32","key33":"value33"}	["g","h","i"]	{"A":"five","B":"six"}
 PREHOOK: query: SELECT str from orc_create_complex
@@ -413,7 +413,7 @@ POSTHOOK: query: SELECT mp from orc_create_complex
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_create_complex
 #### A masked pattern was here ####
-{"key13":"value13","key11":"value11","key12":"value12"}
+{"key11":"value11","key12":"value12","key13":"value13"}
 {"key21":"value21","key22":"value22","key23":"value23"}
 {"key31":"value31","key32":"value32","key33":"value33"}
 PREHOOK: query: SELECT lst from orc_create_complex

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/ql/src/test/results/clientpositive/llap/udaf_collect_set_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/udaf_collect_set_2.q.out b/ql/src/test/results/clientpositive/llap/udaf_collect_set_2.q.out
index aa55979..3b6e2bd 100644
--- a/ql/src/test/results/clientpositive/llap/udaf_collect_set_2.q.out
+++ b/ql/src/test/results/clientpositive/llap/udaf_collect_set_2.q.out
@@ -216,7 +216,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"name":"Chris","date":"2013-06-21","sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"name":"Chris","date":"2014-10-11","sub":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
+1	[{"name":"Chris","date":"2013-06-21","sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"name":"Chris","date":"2014-10-11","sub":{"\"rice\"":29.36,"\"grape\"":1200.5}}]
 2	[{"name":"John","date":"2013-08-10","sub":{"\"yogurt\"":126.57,"\"beef\"":210.57}},{"name":"John","date":"2014-06-25","sub":{"\"chocolate\"":3.65,"\"water\"":420.36}},{"name":"John","date":"2015-01-15","sub":{"\"milk\"":27.45}}]
 3	[{"name":"Martin","date":"2014-05-11","sub":{"\"apple\"":30.5,"\"orange\"":41.35}},{"name":"Martin","date":"2014-12-12","sub":{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}}]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(named_struct("name", c.name, "date", o.d, "sub", o.sub)))
@@ -235,7 +235,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"name":"Chris","date":"2013-06-21","sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"name":"Chris","date":"2013-06-21","sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"name":"Chris","date":"2014-10-11","sub":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
+1	[{"name":"Chris","date":"2013-06-21","sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"name":"Chris","date":"2013-06-21","sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"name":"Chris","date":"2014-10-11","sub":{"\"rice\"":29.36,"\"grape\"":1200.5}}]
 2	[{"name":"John","date":"2013-08-10","sub":{"\"yogurt\"":126.57,"\"beef\"":210.57}},{"name":"John","date":"2014-06-25","sub":{"\"chocolate\"":3.65,"\"water\"":420.36}},{"name":"John","date":"2015-01-15","sub":{"\"milk\"":27.45}}]
 3	[{"name":"Martin","date":"2014-05-11","sub":{"\"apple\"":30.5,"\"orange\"":41.35}},{"name":"Martin","date":"2014-12-12","sub":{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}}]
 PREHOOK: query: SELECT c.id, sort_array(collect_set(struct(c.name, o.d, o.sub)))
@@ -254,7 +254,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"col1":"Chris","col2":"2013-06-21","col3":{"\"juice\"":21.45,"\"bread\"":15.2}},{"col1":"Chris","col2":"2014-10-11","col3":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
+1	[{"col1":"Chris","col2":"2013-06-21","col3":{"\"juice\"":21.45,"\"bread\"":15.2}},{"col1":"Chris","col2":"2014-10-11","col3":{"\"rice\"":29.36,"\"grape\"":1200.5}}]
 2	[{"col1":"John","col2":"2013-08-10","col3":{"\"yogurt\"":126.57,"\"beef\"":210.57}},{"col1":"John","col2":"2014-06-25","col3":{"\"chocolate\"":3.65,"\"water\"":420.36}},{"col1":"John","col2":"2015-01-15","col3":{"\"milk\"":27.45}}]
 3	[{"col1":"Martin","col2":"2014-05-11","col3":{"\"apple\"":30.5,"\"orange\"":41.35}},{"col1":"Martin","col2":"2014-12-12","col3":{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}}]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(struct(c.name, o.d, o.sub)))
@@ -273,7 +273,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"col1":"Chris","col2":"2013-06-21","col3":{"\"juice\"":21.45,"\"bread\"":15.2}},{"col1":"Chris","col2":"2013-06-21","col3":{"\"juice\"":21.45,"\"bread\"":15.2}},{"col1":"Chris","col2":"2014-10-11","col3":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
+1	[{"col1":"Chris","col2":"2013-06-21","col3":{"\"juice\"":21.45,"\"bread\"":15.2}},{"col1":"Chris","col2":"2013-06-21","col3":{"\"juice\"":21.45,"\"bread\"":15.2}},{"col1":"Chris","col2":"2014-10-11","col3":{"\"rice\"":29.36,"\"grape\"":1200.5}}]
 2	[{"col1":"John","col2":"2013-08-10","col3":{"\"yogurt\"":126.57,"\"beef\"":210.57}},{"col1":"John","col2":"2014-06-25","col3":{"\"chocolate\"":3.65,"\"water\"":420.36}},{"col1":"John","col2":"2015-01-15","col3":{"\"milk\"":27.45}}]
 3	[{"col1":"Martin","col2":"2014-05-11","col3":{"\"apple\"":30.5,"\"orange\"":41.35}},{"col1":"Martin","col2":"2014-12-12","col3":{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}}]
 PREHOOK: query: SELECT c.id, sort_array(collect_set(named_struct("name", c.name, "date", o.d, "sub", map_values(o.sub))))
@@ -292,7 +292,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"name":"Chris","date":"2013-06-21","sub":[21.45,15.2]},{"name":"Chris","date":"2014-10-11","sub":[1200.5,29.36]}]
+1	[{"name":"Chris","date":"2013-06-21","sub":[21.45,15.2]},{"name":"Chris","date":"2014-10-11","sub":[29.36,1200.5]}]
 2	[{"name":"John","date":"2013-08-10","sub":[126.57,210.57]},{"name":"John","date":"2014-06-25","sub":[3.65,420.36]},{"name":"John","date":"2015-01-15","sub":[27.45]}]
 3	[{"name":"Martin","date":"2014-05-11","sub":[30.5,41.35]},{"name":"Martin","date":"2014-12-12","sub":[210.03,100.56,500.0]}]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(named_struct("name", c.name, "date", o.d, "sub", map_values(o.sub))))
@@ -311,7 +311,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"name":"Chris","date":"2013-06-21","sub":[21.45,15.2]},{"name":"Chris","date":"2013-06-21","sub":[21.45,15.2]},{"name":"Chris","date":"2014-10-11","sub":[1200.5,29.36]}]
+1	[{"name":"Chris","date":"2013-06-21","sub":[21.45,15.2]},{"name":"Chris","date":"2013-06-21","sub":[21.45,15.2]},{"name":"Chris","date":"2014-10-11","sub":[29.36,1200.5]}]
 2	[{"name":"John","date":"2013-08-10","sub":[126.57,210.57]},{"name":"John","date":"2014-06-25","sub":[3.65,420.36]},{"name":"John","date":"2015-01-15","sub":[27.45]}]
 3	[{"name":"Martin","date":"2014-05-11","sub":[30.5,41.35]},{"name":"Martin","date":"2014-12-12","sub":[210.03,100.56,500.0]}]
 PREHOOK: query: SELECT c.id, sort_array(collect_set(struct(c.name, o.d, map_values(o.sub))))
@@ -330,7 +330,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"col1":"Chris","col2":"2013-06-21","col3":[21.45,15.2]},{"col1":"Chris","col2":"2014-10-11","col3":[1200.5,29.36]}]
+1	[{"col1":"Chris","col2":"2013-06-21","col3":[21.45,15.2]},{"col1":"Chris","col2":"2014-10-11","col3":[29.36,1200.5]}]
 2	[{"col1":"John","col2":"2013-08-10","col3":[126.57,210.57]},{"col1":"John","col2":"2014-06-25","col3":[3.65,420.36]},{"col1":"John","col2":"2015-01-15","col3":[27.45]}]
 3	[{"col1":"Martin","col2":"2014-05-11","col3":[30.5,41.35]},{"col1":"Martin","col2":"2014-12-12","col3":[210.03,100.56,500.0]}]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(struct(c.name, o.d, map_values(o.sub))))
@@ -349,7 +349,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"col1":"Chris","col2":"2013-06-21","col3":[21.45,15.2]},{"col1":"Chris","col2":"2013-06-21","col3":[21.45,15.2]},{"col1":"Chris","col2":"2014-10-11","col3":[1200.5,29.36]}]
+1	[{"col1":"Chris","col2":"2013-06-21","col3":[21.45,15.2]},{"col1":"Chris","col2":"2013-06-21","col3":[21.45,15.2]},{"col1":"Chris","col2":"2014-10-11","col3":[29.36,1200.5]}]
 2	[{"col1":"John","col2":"2013-08-10","col3":[126.57,210.57]},{"col1":"John","col2":"2014-06-25","col3":[3.65,420.36]},{"col1":"John","col2":"2015-01-15","col3":[27.45]}]
 3	[{"col1":"Martin","col2":"2014-05-11","col3":[30.5,41.35]},{"col1":"Martin","col2":"2014-12-12","col3":[210.03,100.56,500.0]}]
 PREHOOK: query: SELECT c.id, sort_array(collect_set(array(o.amount)))
@@ -444,7 +444,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[[{"\"juice\"":21.45,"\"bread\"":15.2}],[{"\"grape\"":1200.5,"\"rice\"":29.36}]]
+1	[[{"\"juice\"":21.45,"\"bread\"":15.2}],[{"\"rice\"":29.36,"\"grape\"":1200.5}]]
 2	[[{"\"milk\"":27.45}],[{"\"yogurt\"":126.57,"\"beef\"":210.57}],[{"\"chocolate\"":3.65,"\"water\"":420.36}]]
 3	[[{"\"apple\"":30.5,"\"orange\"":41.35}],[{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}]]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(array(o.sub)))
@@ -463,7 +463,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[[{"\"juice\"":21.45,"\"bread\"":15.2}],[{"\"juice\"":21.45,"\"bread\"":15.2}],[{"\"grape\"":1200.5,"\"rice\"":29.36}]]
+1	[[{"\"juice\"":21.45,"\"bread\"":15.2}],[{"\"juice\"":21.45,"\"bread\"":15.2}],[{"\"rice\"":29.36,"\"grape\"":1200.5}]]
 2	[[{"\"milk\"":27.45}],[{"\"yogurt\"":126.57,"\"beef\"":210.57}],[{"\"chocolate\"":3.65,"\"water\"":420.36}]]
 3	[[{"\"apple\"":30.5,"\"orange\"":41.35}],[{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}]]
 PREHOOK: query: SELECT c.id, sort_array(collect_set(array(map_values(o.sub))))
@@ -482,7 +482,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[[[21.45,15.2]],[[1200.5,29.36]]]
+1	[[[21.45,15.2]],[[29.36,1200.5]]]
 2	[[[3.65,420.36]],[[27.45]],[[126.57,210.57]]]
 3	[[[30.5,41.35]],[[210.03,100.56,500.0]]]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(array(map_values(o.sub))))
@@ -501,7 +501,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[[[21.45,15.2]],[[21.45,15.2]],[[1200.5,29.36]]]
+1	[[[21.45,15.2]],[[21.45,15.2]],[[29.36,1200.5]]]
 2	[[[3.65,420.36]],[[27.45]],[[126.57,210.57]]]
 3	[[[30.5,41.35]],[[210.03,100.56,500.0]]]
 PREHOOK: query: SELECT c.id, sort_array(collect_set(map("amount", o.amount)))
@@ -596,7 +596,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"sub":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
+1	[{"sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"sub":{"\"rice\"":29.36,"\"grape\"":1200.5}}]
 2	[{"sub":{"\"milk\"":27.45}},{"sub":{"\"yogurt\"":126.57,"\"beef\"":210.57}},{"sub":{"\"chocolate\"":3.65,"\"water\"":420.36}}]
 3	[{"sub":{"\"apple\"":30.5,"\"orange\"":41.35}},{"sub":{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}}]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(map("sub", o.sub)))
@@ -615,7 +615,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"sub":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
+1	[{"sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"sub":{"\"rice\"":29.36,"\"grape\"":1200.5}}]
 2	[{"sub":{"\"milk\"":27.45}},{"sub":{"\"yogurt\"":126.57,"\"beef\"":210.57}},{"sub":{"\"chocolate\"":3.65,"\"water\"":420.36}}]
 3	[{"sub":{"\"apple\"":30.5,"\"orange\"":41.35}},{"sub":{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}}]
 PREHOOK: query: SELECT c.id, sort_array(collect_set(map("sub", map_values(o.sub))))
@@ -634,7 +634,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"sub":[21.45,15.2]},{"sub":[1200.5,29.36]}]
+1	[{"sub":[21.45,15.2]},{"sub":[29.36,1200.5]}]
 2	[{"sub":[3.65,420.36]},{"sub":[27.45]},{"sub":[126.57,210.57]}]
 3	[{"sub":[30.5,41.35]},{"sub":[210.03,100.56,500.0]}]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(map("sub", map_values(o.sub))))
@@ -653,7 +653,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"sub":[21.45,15.2]},{"sub":[21.45,15.2]},{"sub":[1200.5,29.36]}]
+1	[{"sub":[21.45,15.2]},{"sub":[21.45,15.2]},{"sub":[29.36,1200.5]}]
 2	[{"sub":[3.65,420.36]},{"sub":[27.45]},{"sub":[126.57,210.57]}]
 3	[{"sub":[30.5,41.35]},{"sub":[210.03,100.56,500.0]}]
 PREHOOK: query: DROP TABLE customer

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/ql/src/test/results/clientpositive/orc_int_type_promotion.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_int_type_promotion.q.out b/ql/src/test/results/clientpositive/orc_int_type_promotion.q.out
index d3837a3..4b7b0b0 100644
--- a/ql/src/test/results/clientpositive/orc_int_type_promotion.q.out
+++ b/ql/src/test/results/clientpositive/orc_int_type_promotion.q.out
@@ -126,7 +126,7 @@ POSTHOOK: query: select * from alltypes_orc
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypes_orc
 #### A masked pattern was here ####
-true	10	100	1000	10000	4.0	20.0	4.222	1969-12-31 15:59:58.174	1970-01-01	string	hello	hello	{"k2":"v2","k1":"v1"}	[100,200]	{"c1":null,"c2":" \"foo\"}"}
+true	10	100	1000	10000	4.0	20.0	4.222	1969-12-31 15:59:58.174	1970-01-01	string	hello	hello	{"k1":"v1","k2":"v2"}	[100,200]	{"c1":null,"c2":" \"foo\"}"}
 false	20	200	2000	20000	8.0	40.0	2.222	1970-12-31 15:59:58.174	1971-01-01	abcd	world	world	{"k3":"v3","k4":"v4"}	[200,300]	{"c1":null,"c2":" \"bar\"}"}
 PREHOOK: query: alter table alltypes_orc change si si int
 PREHOOK: type: ALTERTABLE_RENAMECOL
@@ -144,7 +144,7 @@ POSTHOOK: query: select * from alltypes_orc
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypes_orc
 #### A masked pattern was here ####
-true	10	100	1000	10000	4.0	20.0	4.222	1969-12-31 15:59:58.174	1970-01-01	string	hello	hello	{"k2":"v2","k1":"v1"}	[100,200]	{"c1":null,"c2":" \"foo\"}"}
+true	10	100	1000	10000	4.0	20.0	4.222	1969-12-31 15:59:58.174	1970-01-01	string	hello	hello	{"k1":"v1","k2":"v2"}	[100,200]	{"c1":null,"c2":" \"foo\"}"}
 false	20	200	2000	20000	8.0	40.0	2.222	1970-12-31 15:59:58.174	1971-01-01	abcd	world	world	{"k3":"v3","k4":"v4"}	[200,300]	{"c1":null,"c2":" \"bar\"}"}
 PREHOOK: query: alter table alltypes_orc change si si bigint
 PREHOOK: type: ALTERTABLE_RENAMECOL
@@ -170,7 +170,7 @@ POSTHOOK: query: select * from alltypes_orc
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypes_orc
 #### A masked pattern was here ####
-true	10	100	1000	10000	4.0	20.0	4.222	1969-12-31 15:59:58.174	1970-01-01	string	hello	hello	{"k2":"v2","k1":"v1"}	[100,200]	{"c1":null,"c2":" \"foo\"}"}
+true	10	100	1000	10000	4.0	20.0	4.222	1969-12-31 15:59:58.174	1970-01-01	string	hello	hello	{"k1":"v1","k2":"v2"}	[100,200]	{"c1":null,"c2":" \"foo\"}"}
 false	20	200	2000	20000	8.0	40.0	2.222	1970-12-31 15:59:58.174	1971-01-01	abcd	world	world	{"k3":"v3","k4":"v4"}	[200,300]	{"c1":null,"c2":" \"bar\"}"}
 PREHOOK: query: explain select ti, si, i, bi from alltypes_orc
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/ql/src/test/results/clientpositive/orc_nested_column_pruning.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_nested_column_pruning.q.out b/ql/src/test/results/clientpositive/orc_nested_column_pruning.q.out
index e75ba9d..55a8463 100644
--- a/ql/src/test/results/clientpositive/orc_nested_column_pruning.q.out
+++ b/ql/src/test/results/clientpositive/orc_nested_column_pruning.q.out
@@ -515,7 +515,7 @@ POSTHOOK: query: SELECT s2.f8 FROM nested_tbl_1 WHERE s1.f2 = 'foo' AND size(s2.
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@nested_tbl_1
 #### A masked pattern was here ####
-{"f9":true,"f10":[10,11],"f11":{"key2":false,"key1":true}}
+{"f9":true,"f10":[10,11],"f11":{"key1":true,"key2":false}}
 PREHOOK: query: EXPLAIN SELECT col1, col2 FROM nested_tbl_1
 LATERAL VIEW explode(s2.f8.f10) tbl1 AS col1
 LATERAL VIEW explode(s3.f12) tbl2 AS col2

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/ql/src/test/results/clientpositive/udf_sort_array.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_sort_array.q.out b/ql/src/test/results/clientpositive/udf_sort_array.q.out
index f375e85..0989a16 100644
--- a/ql/src/test/results/clientpositive/udf_sort_array.q.out
+++ b/ql/src/test/results/clientpositive/udf_sort_array.q.out
@@ -105,7 +105,7 @@ POSTHOOK: query: SELECT sort_array(array(map("b", 2, "a", 9, "c", 7), map("c", 3
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-[{"a":1,"b":5,"c":3},{"a":1,"b":8,"c":6},{"a":9,"b":2,"c":7}]
+[{"c":3,"b":5,"a":1},{"a":1,"c":6,"b":8},{"b":2,"a":9,"c":7}]
 PREHOOK: query: CREATE TABLE dest1 (
 	tinyints ARRAY<TINYINT>,
 	smallints ARRAY<SMALLINT>,

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
index 83e5d68..b4c9c22 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.serde2.avro;
 import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -262,7 +262,7 @@ class AvroSerializer {
     Map<?,?> map = fieldOI.getMap(structFieldData);
     Schema valueType = schema.getValueType();
 
-    Map<Object, Object> deserialized = new HashMap<Object, Object>(fieldOI.getMapSize(structFieldData));
+    Map<Object, Object> deserialized = new LinkedHashMap<Object, Object>(fieldOI.getMapSize(structFieldData));
 
     for (Map.Entry<?, ?> entry : map.entrySet()) {
       deserialized.put(serialize(mapKeyTypeInfo, mapKeyObjectInspector, entry.getKey(), STRING_SCHEMA),

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
index e5b90c2..a48d4fe 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
@@ -23,7 +23,7 @@ import java.math.BigInteger;
 import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -550,10 +550,10 @@ public class BinarySortableSerDe extends AbstractSerDe {
 
       // Create the map if needed
       Map<Object, Object> r;
-      if (reuse == null) {
-        r = new HashMap<Object, Object>();
+      if (reuse == null || reuse.getClass() != LinkedHashMap.class) {
+        r = new LinkedHashMap<Object, Object>();
       } else {
-        r = (HashMap<Object, Object>) reuse;
+        r = (Map<Object, Object>) reuse;
         r.clear();
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java
index c7f4930..3f086cd 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDeTypeMap.java
@@ -19,7 +19,7 @@
 package org.apache.hadoop.hive.serde2.dynamic_type;
 
 import java.util.Collections;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.Map;
 
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -93,12 +93,12 @@ public class DynamicSerDeTypeMap extends DynamicSerDeTypeBase {
   @Override
   public Map<Object, Object> deserialize(Object reuse, TProtocol iprot)
       throws SerDeException, TException, IllegalAccessException {
-    HashMap<Object, Object> deserializeReuse;
-    if (reuse != null) {
-      deserializeReuse = (HashMap<Object, Object>) reuse;
+    Map<Object, Object> deserializeReuse;
+    if (reuse != null && reuse.getClass() == LinkedHashMap.class) {
+      deserializeReuse = (Map<Object, Object>) reuse;
       deserializeReuse.clear();
     } else {
-      deserializeReuse = new HashMap<Object, Object>();
+      deserializeReuse = new LinkedHashMap<Object, Object>();
     }
     TMap themap = iprot.readMapBegin();
     if (themap == null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
index 8823d41..635ea04 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
@@ -23,8 +23,8 @@ import java.lang.reflect.Modifier;
 import java.lang.reflect.Type;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashMap;
 import java.util.Iterator;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
@@ -445,7 +445,7 @@ public final class ObjectInspectorUtils {
     }
     case MAP: {
       MapObjectInspector moi = (MapObjectInspector) oi;
-      HashMap<Object, Object> map = new HashMap<Object, Object>();
+      Map<Object, Object> map = new LinkedHashMap<Object, Object>();
       Map<? extends Object, ? extends Object> omap = moi.getMap(o);
       for (Map.Entry<? extends Object, ? extends Object> entry : omap
           .entrySet()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/84481cf9/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardMapObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardMapObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardMapObjectInspector.java
index de41b97..bb28e65 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardMapObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardMapObjectInspector.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.hive.serde2.objectinspector;
 
-import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.Map;
 
 /**
@@ -100,27 +100,27 @@ public class StandardMapObjectInspector implements SettableMapObjectInspector {
   // SettableMapObjectInspector
   @Override
   public Object create() {
-    Map<Object, Object> m = new HashMap<Object, Object>();
+    Map<Object, Object> m = new LinkedHashMap<Object, Object>();
     return m;
   }
 
   @Override
   public Object clear(Object map) {
-    Map<Object, Object> m = (HashMap<Object, Object>) map;
+    Map<Object, Object> m = (Map<Object, Object>) map;
     m.clear();
     return m;
   }
 
   @Override
   public Object put(Object map, Object key, Object value) {
-    Map<Object, Object> m = (HashMap<Object, Object>) map;
+    Map<Object, Object> m = (Map<Object, Object>) map;
     m.put(key, value);
     return m;
   }
 
   @Override
   public Object remove(Object map, Object key) {
-    Map<Object, Object> m = (HashMap<Object, Object>) map;
+    Map<Object, Object> m = (Map<Object, Object>) map;
     m.remove(key);
     return m;
   }