You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by ti...@apache.org on 2016/02/02 06:19:14 UTC

[1/3] incubator-asterixdb git commit: Some exception cleanup

Repository: incubator-asterixdb
Updated Branches:
  refs/heads/master f79a896da -> 9dcba3c9f


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java b/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java
index a36238d..ffef923 100644
--- a/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java
+++ b/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java
@@ -26,7 +26,6 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.util.Arrays;
 
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.dataflow.data.nontagged.serde.SerializerDeserializerUtil;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
@@ -177,29 +176,20 @@ public class RecordBuilder implements IARecordBuilder {
     }
 
     @Override
-    public void addField(IValueReference name, IValueReference value) throws AsterixException {
+    public void addField(IValueReference name, IValueReference value) throws HyracksDataException {
         if (numberOfOpenFields == openPartOffsets.length) {
             openPartOffsets = Arrays.copyOf(openPartOffsets, openPartOffsets.length + DEFAULT_NUM_OPEN_FIELDS);
             openFieldNameLengths = Arrays.copyOf(openFieldNameLengths,
                     openFieldNameLengths.length + DEFAULT_NUM_OPEN_FIELDS);
         }
-        int fieldNameHashCode;
-        try {
-            fieldNameHashCode = utf8HashFunction.hash(name.getByteArray(), name.getStartOffset() + 1,
+        int fieldNameHashCode = utf8HashFunction.hash(name.getByteArray(), name.getStartOffset() + 1,
                     name.getLength() - 1);
-        } catch (HyracksDataException e1) {
-            throw new AsterixException(e1);
-        }
         if (recType != null) {
             int cFieldPos;
-            try {
                 cFieldPos = recTypeInfo.getFieldIndex(name.getByteArray(), name.getStartOffset() + 1,
                         name.getLength() - 1);
-            } catch (HyracksDataException e) {
-                throw new AsterixException(e);
-            }
             if (cFieldPos >= 0) {
-                throw new AsterixException("Open field \"" + recType.getFieldNames()[cFieldPos]
+                throw new HyracksDataException("Open field \"" + recType.getFieldNames()[cFieldPos]
                         + "\" has the same field name as closed field at index " + cFieldPos);
             }
         }
@@ -212,7 +202,7 @@ public class RecordBuilder implements IARecordBuilder {
     }
 
     @Override
-    public void write(DataOutput out, boolean writeTypeTag) throws IOException, AsterixException {
+    public void write(DataOutput out, boolean writeTypeTag) throws HyracksDataException {
         int h = headerSize;
         int recordLength;
         // prepare the open part
@@ -231,7 +221,7 @@ public class RecordBuilder implements IARecordBuilder {
                             openBytes, (int) openPartOffsets[i], openFieldNameLengths[i]) == 0) {
                         String field = utf8SerDer.deserialize(new DataInputStream(new ByteArrayInputStream(openBytes,
                                 (int) openPartOffsets[i], openFieldNameLengths[i])));
-                        throw new AsterixException(
+                        throw new HyracksDataException(
                                 "Open fields " + (i - 1) + " and " + i + " have the same field name \"" + field + "\"");
                     }
                 }
@@ -248,37 +238,46 @@ public class RecordBuilder implements IARecordBuilder {
                 offsetPosition += 8;
             }
             recordLength = openPartOffset + 4 + openPartOffsetArraySize + openPartOutputStream.size();
-        } else
+        } else {
             recordLength = h + numberOfSchemaFields * 4 + closedPartOutputStream.size();
-
-        // write the record header
-        if (writeTypeTag) {
-            out.writeByte(ATypeTag.SERIALIZED_RECORD_TYPE_TAG);
-        }
-        out.writeInt(recordLength);
-        if (isOpen) {
-            if (this.numberOfOpenFields > 0) {
-                out.writeBoolean(true);
-                out.writeInt(openPartOffset);
-            } else
-                out.writeBoolean(false);
         }
+        writeRecord(out, writeTypeTag, h, recordLength);
+    }
 
-        // write the closed part
-        if (numberOfSchemaFields > 0) {
-            out.writeInt(numberOfClosedFields);
-            if (isNullable)
-                out.write(nullBitMap, 0, nullBitMapSize);
-            for (int i = 0; i < numberOfSchemaFields; i++)
-                out.writeInt(closedPartOffsets[i] + h + (numberOfSchemaFields * 4));
-            out.write(closedPartOutputStream.toByteArray());
-        }
+    private void writeRecord(DataOutput out, boolean writeTypeTag, int headerSize, int recordLength)
+            throws HyracksDataException {
+        try {
+            // write the record header
+            if (writeTypeTag) {
+                out.writeByte(ATypeTag.SERIALIZED_RECORD_TYPE_TAG);
+            }
+            out.writeInt(recordLength);
+            if (isOpen) {
+                if (this.numberOfOpenFields > 0) {
+                    out.writeBoolean(true);
+                    out.writeInt(openPartOffset);
+                } else
+                    out.writeBoolean(false);
+            }
 
-        // write the open part
-        if (numberOfOpenFields > 0) {
-            out.writeInt(numberOfOpenFields);
-            out.write(openPartOffsetArray, 0, openPartOffsetArraySize);
-            out.write(openPartOutputStream.toByteArray());
+            // write the closed part
+            if (numberOfSchemaFields > 0) {
+                out.writeInt(numberOfClosedFields);
+                if (isNullable)
+                    out.write(nullBitMap, 0, nullBitMapSize);
+                for (int i = 0; i < numberOfSchemaFields; i++)
+                    out.writeInt(closedPartOffsets[i] + headerSize + (numberOfSchemaFields * 4));
+                out.write(closedPartOutputStream.toByteArray());
+            }
+
+            // write the open part
+            if (numberOfOpenFields > 0) {
+                out.writeInt(numberOfOpenFields);
+                out.write(openPartOffsetArray, 0, openPartOffsetArraySize);
+                out.write(openPartOutputStream.toByteArray());
+            }
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java b/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
index 543b20e..53943b8 100644
--- a/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
+++ b/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
@@ -182,11 +182,7 @@ public class ARecordSerializerDeserializer implements ISerializerDeserializer<AR
                 serializers[fieldIndex].serialize(instance.getValueByPos(fieldIndex), fieldValue.getDataOutput());
                 recordBuilder.addField(fieldIndex, fieldValue);
             }
-            try {
-                recordBuilder.write(out, writeTypeTag);
-            } catch (IOException | AsterixException e) {
-                throw new HyracksDataException(e);
-            }
+            recordBuilder.write(out, writeTypeTag);
         } else {
             throw new NotImplementedException("Serializer for schemaless records is not implemented.");
         }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableInterval.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableInterval.java b/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableInterval.java
index fa9b587..15bda6b 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableInterval.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableInterval.java
@@ -18,7 +18,7 @@
  */
 package org.apache.asterix.om.base;
 
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public class AMutableInterval extends AInterval {
 
@@ -26,9 +26,9 @@ public class AMutableInterval extends AInterval {
         super(intervalStart, intervalEnd, typetag);
     }
 
-    public void setValue(long intervalStart, long intervalEnd, byte typetag) throws AlgebricksException {
+    public void setValue(long intervalStart, long intervalEnd, byte typetag) throws HyracksDataException {
         if (intervalStart >= intervalEnd) {
-            throw new AlgebricksException("Invalid interval: the starting time should be less than the ending time.");
+            throw new HyracksDataException("Invalid interval: the starting time should be less than the ending time.");
         }
         this.intervalStart = intervalStart;
         this.intervalEnd = intervalEnd;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordAddFieldsTypeComputer.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordAddFieldsTypeComputer.java b/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordAddFieldsTypeComputer.java
index 4f8d1f2..05002a6 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordAddFieldsTypeComputer.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordAddFieldsTypeComputer.java
@@ -19,7 +19,6 @@
 
 package org.apache.asterix.om.typecomputer.impl;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -85,16 +84,12 @@ public class RecordAddFieldsTypeComputer implements IResultTypeComputer {
         Collections.sort(resultFieldNames);
 
         for (String fieldName : resultFieldNames) {
-            try {
-                if (inputRecordType.getFieldType(fieldName).getTypeTag() == ATypeTag.RECORD) {
-                    ARecordType nestedType = (ARecordType) inputRecordType.getFieldType(fieldName);
-                    //Deep Copy prevents altering of input types
-                    resultFieldTypes.add(nestedType.deepCopy(nestedType));
-                } else {
-                    resultFieldTypes.add(inputRecordType.getFieldType(fieldName));
-                }
-            } catch (IOException e) {
-                throw new IllegalStateException(e);
+            if (inputRecordType.getFieldType(fieldName).getTypeTag() == ATypeTag.RECORD) {
+                ARecordType nestedType = (ARecordType) inputRecordType.getFieldType(fieldName);
+                //Deep Copy prevents altering of input types
+                resultFieldTypes.add(nestedType.deepCopy(nestedType));
+            } else {
+                resultFieldTypes.add(inputRecordType.getFieldType(fieldName));
             }
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordMergeTypeComputer.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordMergeTypeComputer.java b/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordMergeTypeComputer.java
index 653b26e..9611b9c 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordMergeTypeComputer.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordMergeTypeComputer.java
@@ -19,10 +19,10 @@
 
 package org.apache.asterix.om.typecomputer.impl;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.om.typecomputer.base.IResultTypeComputer;
 import org.apache.asterix.om.types.ARecordType;
@@ -54,8 +54,8 @@ public class RecordMergeTypeComputer implements IResultTypeComputer {
         ARecordType recType1 = TypeComputerUtils.extractRecordType(t1);
 
         if (recType0 == null || recType1 == null) {
-            throw new AlgebricksException("record-merge expects possibly NULL records as arguments, but got (" + t0
-                    + ", " + t1 + ")");
+            throw new AlgebricksException(
+                    "record-merge expects possibly NULL records as arguments, but got (" + t0 + ", " + t1 + ")");
         }
 
         List<String> resultFieldNames = new ArrayList<>();
@@ -66,16 +66,12 @@ public class RecordMergeTypeComputer implements IResultTypeComputer {
 
         List<IAType> resultFieldTypes = new ArrayList<>();
         for (String fieldName : resultFieldNames) {
-            try {
-                if (recType0.getFieldType(fieldName).getTypeTag() == ATypeTag.RECORD) {
-                    ARecordType nestedType = (ARecordType) recType0.getFieldType(fieldName);
-                    //Deep Copy prevents altering of input types
-                    resultFieldTypes.add(nestedType.deepCopy(nestedType));
-                } else {
-                    resultFieldTypes.add(recType0.getFieldType(fieldName));
-                }
-            } catch (IOException e) {
-                throw new IllegalStateException(e);
+            if (recType0.getFieldType(fieldName).getTypeTag() == ATypeTag.RECORD) {
+                ARecordType nestedType = (ARecordType) recType0.getFieldType(fieldName);
+                //Deep Copy prevents altering of input types
+                resultFieldTypes.add(nestedType.deepCopy(nestedType));
+            } else {
+                resultFieldTypes.add(recType0.getFieldType(fieldName));
             }
         }
 
@@ -111,7 +107,7 @@ public class RecordMergeTypeComputer implements IResultTypeComputer {
         boolean isOpen = recType0.isOpen() || recType1.isOpen();
 
         IAType resultType = new ARecordType(resultTypeName, resultFieldNames.toArray(new String[] {}),
-                    resultFieldTypes.toArray(new IAType[] {}), isOpen);
+                resultFieldTypes.toArray(new IAType[] {}), isOpen);
 
         if (nullable) {
             resultType = AUnionType.createNullableType(resultType);
@@ -142,12 +138,12 @@ public class RecordMergeTypeComputer implements IResultTypeComputer {
                 } else {
                     IAType[] combinedFieldTypes = ArrayUtils.addAll(resultType.getFieldTypes().clone(),
                             fieldType1Copy.getFieldTypes()[i]);
-                    resultType = new ARecordType(resultType.getTypeName(), ArrayUtils.addAll(
-                            resultType.getFieldNames(), fieldType1Copy.getFieldNames()[i]), combinedFieldTypes,
-                            resultType.isOpen());
+                    resultType = new ARecordType(resultType.getTypeName(),
+                            ArrayUtils.addAll(resultType.getFieldNames(), fieldType1Copy.getFieldNames()[i]),
+                            combinedFieldTypes, resultType.isOpen());
                 }
 
-            } catch (IOException | AsterixException e) {
+            } catch (AsterixException e) {
                 throw new AlgebricksException(e);
             }
         }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordRemoveFieldsTypeComputer.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordRemoveFieldsTypeComputer.java b/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordRemoveFieldsTypeComputer.java
index 9f4d155..69a4068 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordRemoveFieldsTypeComputer.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordRemoveFieldsTypeComputer.java
@@ -19,7 +19,6 @@
 
 package org.apache.asterix.om.typecomputer.impl;
 
-import java.io.IOException;
 import java.util.ArrayDeque;
 import java.util.ArrayList;
 import java.util.Deque;
@@ -196,21 +195,15 @@ public class RecordRemoveFieldsTypeComputer implements IResultTypeComputer {
         return false;
     }
 
-    private void addField(ARecordType inputRecordType,  String fieldName, List<String> resultFieldNames, List<IAType>
-            resultFieldTypes)
-            throws AlgebricksException {
-        try {
-            resultFieldNames.add(fieldName);
-            if (inputRecordType.getFieldType(fieldName).getTypeTag() == ATypeTag.RECORD) {
-                ARecordType nestedType = (ARecordType) inputRecordType.getFieldType(fieldName);
-                //Deep Copy prevents altering of input types
-                resultFieldTypes.add(nestedType.deepCopy(nestedType));
-            } else {
-                resultFieldTypes.add(inputRecordType.getFieldType(fieldName));
-            }
-
-        } catch (IOException e) {
-            throw new AlgebricksException(e);
+    private void addField(ARecordType inputRecordType, String fieldName, List<String> resultFieldNames,
+            List<IAType> resultFieldTypes) throws AlgebricksException {
+        resultFieldNames.add(fieldName);
+        if (inputRecordType.getFieldType(fieldName).getTypeTag() == ATypeTag.RECORD) {
+            ARecordType nestedType = (ARecordType) inputRecordType.getFieldType(fieldName);
+            //Deep Copy prevents altering of input types
+            resultFieldTypes.add(nestedType.deepCopy(nestedType));
+        } else {
+            resultFieldTypes.add(inputRecordType.getFieldType(fieldName));
         }
     }
 
@@ -245,7 +238,7 @@ public class RecordRemoveFieldsTypeComputer implements IResultTypeComputer {
         String resultTypeName = "result-record(" + inputRecordType.getTypeName() + ")";
 
         return new ARecordType(resultTypeName, resultFieldNames.toArray(new String[n]),
-                    resultFieldTypes.toArray(new IAType[n]), true); // Make the output type open always
+                resultFieldTypes.toArray(new IAType[n]), true); // Make the output type open always
 
     }
 
@@ -285,9 +278,8 @@ public class RecordRemoveFieldsTypeComputer implements IResultTypeComputer {
         A method to deep copy a record the path validation
              i.e., keep only fields that are valid
      */
-    private ARecordType deepCheckAndCopy(Deque<String> fieldPath, ARecordType srcRecType, List<List<String>>
-            pathList, boolean isOpen)
-            throws AlgebricksException {
+    private ARecordType deepCheckAndCopy(Deque<String> fieldPath, ARecordType srcRecType, List<List<String>> pathList,
+            boolean isOpen) throws AlgebricksException {
         // Make sure the current path is valid before going further
         if (isRemovePath(fieldPath, pathList)) {
             return null;
@@ -322,7 +314,7 @@ public class RecordRemoveFieldsTypeComputer implements IResultTypeComputer {
             return null;
         }
         return new ARecordType(srcRecType.getTypeName(), destFieldNames.toArray(new String[n]),
-                    destFieldTypes.toArray(new IAType[n]), isOpen);
+                destFieldTypes.toArray(new IAType[n]), isOpen);
     }
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-om/src/main/java/org/apache/asterix/om/types/AOrderedListType.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/types/AOrderedListType.java b/asterix-om/src/main/java/org/apache/asterix/om/types/AOrderedListType.java
index 23e6cf5..e16633e 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/types/AOrderedListType.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/types/AOrderedListType.java
@@ -18,11 +18,10 @@
  */
 package org.apache.asterix.om.types;
 
+import org.apache.asterix.om.base.IAObject;
 import org.json.JSONException;
 import org.json.JSONObject;
 
-import org.apache.asterix.om.base.IAObject;
-
 public class AOrderedListType extends AbstractCollectionType {
 
     private static final long serialVersionUID = 1L;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-om/src/main/java/org/apache/asterix/om/types/ARecordType.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/types/ARecordType.java b/asterix-om/src/main/java/org/apache/asterix/om/types/ARecordType.java
index 1062b88..c2eae36 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/types/ARecordType.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/types/ARecordType.java
@@ -31,8 +31,6 @@ import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.om.base.IAObject;
 import org.apache.asterix.om.util.NonTaggedFormatUtil;
 import org.apache.asterix.om.visitors.IOMVisitor;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.json.JSONArray;
 import org.json.JSONException;
 import org.json.JSONObject;
@@ -61,9 +59,6 @@ public class ARecordType extends AbstractComplexType {
      *            the types of the closed fields
      * @param isOpen
      *            whether the record is open
-     * @throws AsterixException
-     *             if there are duplicate field names or if there is an error serializing the field names
-     * @throws HyracksDataException
      */
     public ARecordType(String typeName, String[] fieldNames, IAType[] fieldTypes, boolean isOpen) {
         super(typeName);
@@ -128,7 +123,7 @@ public class ARecordType extends AbstractComplexType {
      *            the name of the field whose position is sought
      * @return the position of the field in the closed schema or -1 if the field does not exist.
      */
-    public int getFieldIndex(String fieldName) throws IOException {
+    public int getFieldIndex(String fieldName) {
         if (fieldNames == null) {
             return -1;
         }
@@ -146,10 +141,9 @@ public class ARecordType extends AbstractComplexType {
      * @param parent
      *            The type of the parent
      * @return the type of the child
-     * @throws IOException
      */
 
-    public IAType getSubFieldType(List<String> subFieldName, IAType parent) throws IOException {
+    public IAType getSubFieldType(List<String> subFieldName, IAType parent) {
         ARecordType subRecordType = (ARecordType) parent;
         for (int i = 0; i < subFieldName.size() - 1; i++) {
             subRecordType = (ARecordType) subRecordType.getFieldType(subFieldName.get(i));
@@ -189,10 +183,8 @@ public class ARecordType extends AbstractComplexType {
      * @param fieldName
      *            the fieldName whose type is sought
      * @return the field type of the field name if it exists, otherwise null
-     * @throws IOException
-     *             if an error occurs while serializing the field name
      */
-    public IAType getFieldType(String fieldName) throws IOException {
+    public IAType getFieldType(String fieldName) {
         int fieldPos = getFieldIndex(fieldName);
         if (fieldPos < 0 || fieldPos >= fieldTypes.length) {
             return null;
@@ -206,9 +198,8 @@ public class ARecordType extends AbstractComplexType {
      * @param fieldName
      *            the name of the field to check
      * @return true if fieldName is a closed field, otherwise false
-     * @throws IOException
      */
-    public boolean isClosedField(String fieldName) throws IOException {
+    public boolean isClosedField(String fieldName) {
         return getFieldIndex(fieldName) != -1;
     }
 
@@ -221,7 +212,7 @@ public class ARecordType extends AbstractComplexType {
         return false;
     }
 
-    public ARecordType deepCopy(ARecordType type) throws AlgebricksException {
+    public ARecordType deepCopy(ARecordType type) {
         IAType[] newTypes = new IAType[type.fieldNames.length];
         for (int i = 0; i < type.fieldTypes.length; i++) {
             if (type.fieldTypes[i].getTypeTag() == ATypeTag.RECORD) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-om/src/main/java/org/apache/asterix/om/types/AUnorderedListType.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/types/AUnorderedListType.java b/asterix-om/src/main/java/org/apache/asterix/om/types/AUnorderedListType.java
index ab7bba5..80b13b5 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/types/AUnorderedListType.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/types/AUnorderedListType.java
@@ -18,11 +18,10 @@
  */
 package org.apache.asterix.om.types;
 
+import org.apache.asterix.om.base.IAObject;
 import org.json.JSONException;
 import org.json.JSONObject;
 
-import org.apache.asterix.om.base.IAObject;
-
 public class AUnorderedListType extends AbstractCollectionType {
 
     private static final long serialVersionUID = 1L;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-om/src/main/java/org/apache/asterix/om/types/EnumDeserializer.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/types/EnumDeserializer.java b/asterix-om/src/main/java/org/apache/asterix/om/types/EnumDeserializer.java
index cf516af..8ec8989 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/types/EnumDeserializer.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/types/EnumDeserializer.java
@@ -23,7 +23,8 @@ import java.util.Map;
 
 public class EnumDeserializer<E extends Enum<E> & IEnumSerializer> {
 
-    public static final EnumDeserializer<ATypeTag> ATYPETAGDESERIALIZER = new EnumDeserializer<ATypeTag>(ATypeTag.class);
+    public static final EnumDeserializer<ATypeTag> ATYPETAGDESERIALIZER = new EnumDeserializer<ATypeTag>(
+            ATypeTag.class);
 
     private Map<Byte, E> enumvalMap = new HashMap<Byte, E>();
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-om/src/main/java/org/apache/asterix/om/types/TypeHierarchy.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/types/TypeHierarchy.java b/asterix-om/src/main/java/org/apache/asterix/om/types/TypeHierarchy.java
deleted file mode 100644
index ded3d5c..0000000
--- a/asterix-om/src/main/java/org/apache/asterix/om/types/TypeHierarchy.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.om.types;
-
-import java.util.Hashtable;
-
-/*
- * Author: Guangqiang Li
- * Created on Sep 24, 2009
- */
-public class TypeHierarchy {
-    private static Hashtable<String, String> parentMap = new Hashtable<String, String>();
-    static {
-        parentMap.put("integer", "decimal");
-        parentMap.put("double", "decimal");
-        parentMap.put("decimal", "numeric");
-    }
-
-    public static boolean isSubType(String sub, String par) {
-        String parent = parentMap.get(sub);
-        if (parent != null)
-            if (parent.equals(par))
-                return true;
-            else
-                return isSubType(parent, par);
-        return false;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java
----------------------------------------------------------------------
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java
index 55e7d15..ed8ce03 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java
@@ -30,6 +30,7 @@ import org.apache.asterix.om.types.ATypeTag;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.runtime.base.ICopyEvaluator;
 import org.apache.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.api.IDataOutputProvider;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
@@ -91,7 +92,7 @@ public class ClosedRecordConstructorEvalFactory implements ICopyEvaluatorFactory
                     }
                 }
                 recBuilder.write(out, true);
-            } catch (IOException | AsterixException e) {
+            } catch (HyracksDataException e) {
                 throw new AlgebricksException(e);
             }
         }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/OpenRecordConstructorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/OpenRecordConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/OpenRecordConstructorDescriptor.java
index 6117452..66631d5 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/OpenRecordConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/OpenRecordConstructorDescriptor.java
@@ -34,6 +34,7 @@ import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.algebricks.runtime.base.ICopyEvaluator;
 import org.apache.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.api.IDataOutputProvider;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
@@ -107,7 +108,7 @@ public class OpenRecordConstructorDescriptor extends AbstractScalarFunctionDynam
                                 }
                             }
                             recBuilder.write(out, true);
-                        } catch (IOException | AsterixException e) {
+                        } catch (HyracksDataException e) {
                             throw new AlgebricksException(e);
                         }
                     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordAddFieldsDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordAddFieldsDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordAddFieldsDescriptor.java
index d9323a5..9e7c4ac 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordAddFieldsDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordAddFieldsDescriptor.java
@@ -184,7 +184,7 @@ public class RecordAddFieldsDescriptor extends AbstractScalarFunctionDynamicDesc
                             }
                             addFields(recordPointable, listPointable);
                             recordBuilder.write(output.getDataOutput(), true);
-                        } catch (IOException | AsterixException e) {
+                        } catch (HyracksDataException e) {
                             throw new AlgebricksException(e);
                         }
                     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/IntervalBinDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/IntervalBinDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/IntervalBinDescriptor.java
index 73d5cd9..2e493c5 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/IntervalBinDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/IntervalBinDescriptor.java
@@ -168,8 +168,8 @@ public class IntervalBinDescriptor extends AbstractScalarFunctionDynamicDescript
                         switch (type2) {
                             case YEARMONTHDURATION:
 
-                                yearMonth = AYearMonthDurationSerializerDeserializer.getYearMonth(
-                                        argOut2.getByteArray(), 1);
+                                yearMonth = AYearMonthDurationSerializerDeserializer
+                                        .getYearMonth(argOut2.getByteArray(), 1);
 
                                 int yearStart = GREG_CAL.getYear(chrononToStart);
                                 int monthStart = GREG_CAL.getMonthOfYear(chrononToStart, yearStart);
@@ -182,13 +182,13 @@ public class IntervalBinDescriptor extends AbstractScalarFunctionDynamicDescript
                                         + ((totalMonths < 0 && totalMonths % yearMonth != 0) ? -1 : 0);
 
                                 if (binIndex > Integer.MAX_VALUE) {
-                                    throw new AlgebricksException(getIdentifier().getName()
-                                            + ": Overflowing time value to be binned!");
+                                    throw new AlgebricksException(
+                                            getIdentifier().getName() + ": Overflowing time value to be binned!");
                                 }
 
                                 if (binIndex < Integer.MIN_VALUE) {
-                                    throw new AlgebricksException(getIdentifier().getName()
-                                            + ": Underflowing time value to be binned!");
+                                    throw new AlgebricksException(
+                                            getIdentifier().getName() + ": Underflowing time value to be binned!");
                                 }
 
                                 break;
@@ -210,43 +210,42 @@ public class IntervalBinDescriptor extends AbstractScalarFunctionDynamicDescript
                                 }
                                 return;
                             default:
-                                throw new AlgebricksException(
-                                        getIdentifier().getName()
-                                                + ": expecting YEARMONTHDURATION/DAYTIMEDURATION for the thrid argument but got "
-                                                + type2);
+                                throw new AlgebricksException(getIdentifier().getName()
+                                        + ": expecting YEARMONTHDURATION/DAYTIMEDURATION for the thrid argument but got "
+                                        + type2);
                         }
 
                         switch (type0) {
                             case DATE:
-                                binStartChronon = DurationArithmeticOperations.addDuration(chrononToStart, yearMonth
-                                        * (int) binIndex, dayTime * binIndex, false);
-                                binEndChronon = DurationArithmeticOperations.addDuration(chrononToStart, yearMonth
-                                        * ((int) binIndex + 1), dayTime * (binIndex + 1), false);
-
-                                binStartChronon = binStartChronon
-                                        / GregorianCalendarSystem.CHRONON_OF_DAY
-                                        + ((binStartChronon < 0 && binStartChronon
-                                                % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1 : 0);
-                                binEndChronon = binEndChronon
-                                        / GregorianCalendarSystem.CHRONON_OF_DAY
-                                        + ((binEndChronon < 0 && binEndChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1
-                                                : 0);
+                                binStartChronon = DurationArithmeticOperations.addDuration(chrononToStart,
+                                        yearMonth * (int) binIndex, dayTime * binIndex, false);
+                                binEndChronon = DurationArithmeticOperations.addDuration(chrononToStart,
+                                        yearMonth * ((int) binIndex + 1), dayTime * (binIndex + 1), false);
+
+                                binStartChronon = binStartChronon / GregorianCalendarSystem.CHRONON_OF_DAY
+                                        + ((binStartChronon < 0
+                                                && binStartChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1
+                                                        : 0);
+                                binEndChronon = binEndChronon / GregorianCalendarSystem.CHRONON_OF_DAY
+                                        + ((binEndChronon < 0
+                                                && binEndChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1
+                                                        : 0);
                                 break;
                             case TIME:
                                 if (yearMonth != 0) {
                                     throw new AlgebricksException(getIdentifier().getName()
                                             + ": cannot create year-month bin for a time value");
                                 }
-                                binStartChronon = DurationArithmeticOperations.addDuration(chrononToStart, yearMonth
-                                        * (int) binIndex, dayTime * binIndex, true);
-                                binEndChronon = DurationArithmeticOperations.addDuration(chrononToStart, yearMonth
-                                        * ((int) binIndex + 1), dayTime * (binIndex + 1), true);
+                                binStartChronon = DurationArithmeticOperations.addDuration(chrononToStart,
+                                        yearMonth * (int) binIndex, dayTime * binIndex, true);
+                                binEndChronon = DurationArithmeticOperations.addDuration(chrononToStart,
+                                        yearMonth * ((int) binIndex + 1), dayTime * (binIndex + 1), true);
                                 break;
                             case DATETIME:
-                                binStartChronon = DurationArithmeticOperations.addDuration(chrononToStart, yearMonth
-                                        * (int) binIndex, dayTime * binIndex, false);
-                                binEndChronon = DurationArithmeticOperations.addDuration(chrononToStart, yearMonth
-                                        * ((int) binIndex + 1), dayTime * (binIndex + 1), false);
+                                binStartChronon = DurationArithmeticOperations.addDuration(chrononToStart,
+                                        yearMonth * (int) binIndex, dayTime * binIndex, false);
+                                binEndChronon = DurationArithmeticOperations.addDuration(chrononToStart,
+                                        yearMonth * ((int) binIndex + 1), dayTime * (binIndex + 1), false);
                                 break;
                             case NULL:
                                 try {
@@ -260,14 +259,13 @@ public class IntervalBinDescriptor extends AbstractScalarFunctionDynamicDescript
                                         + ": the first argument should be DATE/TIME/DATETIME/NULL but got " + type0);
 
                         }
-                        aInterval.setValue(binStartChronon, binEndChronon, type0.serialize());
                         try {
+                            aInterval.setValue(binStartChronon, binEndChronon, type0.serialize());
                             intervalSerde.serialize(aInterval, out);
                             return;
                         } catch (HyracksDataException ex) {
                             throw new AlgebricksException(ex);
                         }
-
                     }
                 };
             }



[2/3] incubator-asterixdb git commit: Some exception cleanup

Posted by ti...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
index ed586aa..4e6a3df 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
@@ -19,6 +19,7 @@
 
 package org.apache.asterix.metadata;
 
+import java.io.IOException;
 import java.rmi.RemoteException;
 import java.util.ArrayList;
 import java.util.List;
@@ -29,7 +30,6 @@ import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.DatasetConfig.IndexType;
 import org.apache.asterix.common.dataflow.AsterixLSMIndexUtil;
 import org.apache.asterix.common.exceptions.ACIDException;
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.common.transactions.AbstractOperationCallback;
 import org.apache.asterix.common.transactions.DatasetId;
@@ -95,6 +95,7 @@ import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
+import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.api.TreeIndexException;
 import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
@@ -169,7 +170,7 @@ public class MetadataNode implements IMetadataNode {
         } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException(
                     "A dataverse with this name " + dataverse.getDataverseName() + " already exists.", e);
-        } catch (Exception e) {
+        } catch (ACIDException|IndexException|IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -194,7 +195,7 @@ public class MetadataNode implements IMetadataNode {
         } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("A dataset with this name " + dataset.getDatasetName()
                     + " already exists in dataverse '" + dataset.getDataverseName() + "'.", e);
-        } catch (Exception e) {
+        } catch (ACIDException|IndexException|IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -207,7 +208,7 @@ public class MetadataNode implements IMetadataNode {
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.INDEX_DATASET, tuple);
         } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("An index with name '" + index.getIndexName() + "' already exists.", e);
-        } catch (Exception e) {
+        } catch (ACIDException|IndexException|IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -220,7 +221,7 @@ public class MetadataNode implements IMetadataNode {
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.NODE_DATASET, tuple);
         } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("A node with name '" + node.getNodeName() + "' already exists.", e);
-        } catch (Exception e) {
+        } catch (ACIDException|IndexException|IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -234,7 +235,7 @@ public class MetadataNode implements IMetadataNode {
         } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("A nodegroup with name '" + nodeGroup.getNodeGroupName() + "' already exists.",
                     e);
-        } catch (Exception e) {
+        } catch (ACIDException|IndexException|IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -247,7 +248,7 @@ public class MetadataNode implements IMetadataNode {
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, tuple);
         } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("A datatype with name '" + datatype.getDatatypeName() + "' already exists.", e);
-        } catch (Exception e) {
+        } catch (ACIDException|IndexException|IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -263,13 +264,13 @@ public class MetadataNode implements IMetadataNode {
         } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("A function with this name " + function.getName() + " and arity "
                     + function.getArity() + " already exists in dataverse '" + function.getDataverseName() + "'.", e);
-        } catch (Exception e) {
+        } catch (ACIDException|IndexException|IOException e) {
             throw new MetadataException(e);
         }
     }
 
     private void insertTupleIntoIndex(JobId jobId, IMetadataIndex metadataIndex, ITupleReference tuple)
-            throws Exception {
+            throws ACIDException, HyracksDataException, IndexException {
         long resourceID = metadataIndex.getResourceID();
         String resourceName = metadataIndex.getFile().toString();
         ILSMIndex lsmIndex = (ILSMIndex) datasetLifecycleManager.getIndex(resourceName);
@@ -292,15 +293,13 @@ public class MetadataNode implements IMetadataNode {
 
             // TODO: fix exceptions once new BTree exception model is in hyracks.
             indexAccessor.forceInsert(tuple);
-        } catch (Exception e) {
-            throw e;
         } finally {
             datasetLifecycleManager.close(resourceName);
         }
     }
 
     private IModificationOperationCallback createIndexModificationCallback(JobId jobId, long resourceId,
-            IMetadataIndex metadataIndex, ILSMIndex lsmIndex, IndexOperation indexOp) throws Exception {
+            IMetadataIndex metadataIndex, ILSMIndex lsmIndex, IndexOperation indexOp) throws ACIDException {
         ITransactionContext txnCtx = transactionSubsystem.getTransactionManager().getTransactionContext(jobId, false);
 
         if (metadataIndex.isPrimaryIndex()) {
@@ -383,7 +382,7 @@ public class MetadataNode implements IMetadataNode {
             // BTreeKeyDoesNotExistException.
         } catch (TreeIndexException e) {
             throw new MetadataException("Cannot drop dataverse '" + dataverseName + "' because it doesn't exist.", e);
-        } catch (Exception e) {
+        } catch (ACIDException | IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -523,7 +522,7 @@ public class MetadataNode implements IMetadataNode {
         }
     }
 
-    private void forceDropDatatype(JobId jobId, String dataverseName, String datatypeName) throws AsterixException {
+    private void forceDropDatatype(JobId jobId, String dataverseName, String datatypeName) throws MetadataException {
         try {
             ITupleReference searchKey = createTuple(dataverseName, datatypeName);
             // Searches the index for the tuple to be deleted. Acquires an S
@@ -533,16 +532,14 @@ public class MetadataNode implements IMetadataNode {
             // TODO: Change this to be a BTree specific exception, e.g.,
             // BTreeKeyDoesNotExistException.
         } catch (TreeIndexException e) {
-            throw new AsterixException("Cannot drop type '" + datatypeName + "' because it doesn't exist", e);
-        } catch (AsterixException e) {
-            throw e;
-        } catch (Exception e) {
-            throw new AsterixException(e);
+            throw new MetadataException("Cannot drop type '" + datatypeName + "' because it doesn't exist", e);
+        } catch (ACIDException | IndexException | IOException e) {
+            throw new MetadataException(e);
         }
     }
 
     private void deleteTupleFromIndex(JobId jobId, IMetadataIndex metadataIndex, ITupleReference tuple)
-            throws Exception {
+            throws ACIDException, HyracksDataException, IndexException {
         long resourceID = metadataIndex.getResourceID();
         String resourceName = metadataIndex.getFile().toString();
         ILSMIndex lsmIndex = (ILSMIndex) datasetLifecycleManager.getIndex(resourceName);
@@ -562,8 +559,6 @@ public class MetadataNode implements IMetadataNode {
             AsterixLSMIndexUtil.checkAndSetFirstLSN((AbstractLSMIndex) lsmIndex, transactionSubsystem.getLogManager());
 
             indexAccessor.forceDelete(tuple);
-        } catch (Exception e) {
-            throw e;
         } finally {
             datasetLifecycleManager.close(resourceName);
         }
@@ -580,15 +575,13 @@ public class MetadataNode implements IMetadataNode {
                 return null;
             }
             return results;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
-
     }
 
     @Override
     public Dataverse getDataverse(JobId jobId, String dataverseName) throws MetadataException, RemoteException {
-
         try {
             ITupleReference searchKey = createTuple(dataverseName);
             DataverseTupleTranslator tupleReaderWriter = new DataverseTupleTranslator(false);
@@ -599,10 +592,9 @@ public class MetadataNode implements IMetadataNode {
                 return null;
             }
             return results.get(0);
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
-
     }
 
     @Override
@@ -615,7 +607,7 @@ public class MetadataNode implements IMetadataNode {
             List<Dataset> results = new ArrayList<Dataset>();
             searchIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -629,7 +621,7 @@ public class MetadataNode implements IMetadataNode {
             List<Feed> results = new ArrayList<Feed>();
             searchIndex(jobId, MetadataPrimaryIndexes.FEED_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -644,7 +636,7 @@ public class MetadataNode implements IMetadataNode {
             List<Library> results = new ArrayList<Library>();
             searchIndex(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -658,7 +650,7 @@ public class MetadataNode implements IMetadataNode {
             List<Datatype> results = new ArrayList<Datatype>();
             searchIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -676,7 +668,7 @@ public class MetadataNode implements IMetadataNode {
                 return null;
             }
             return results.get(0);
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -689,7 +681,7 @@ public class MetadataNode implements IMetadataNode {
             List<Dataset> results = new ArrayList<Dataset>();
             searchIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -702,7 +694,7 @@ public class MetadataNode implements IMetadataNode {
             List<Datatype> results = new ArrayList<Datatype>();
             searchIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -802,7 +794,7 @@ public class MetadataNode implements IMetadataNode {
                 return null;
             }
             return results.get(0);
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -817,7 +809,7 @@ public class MetadataNode implements IMetadataNode {
             List<Index> results = new ArrayList<Index>();
             searchIndex(jobId, MetadataPrimaryIndexes.INDEX_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -835,8 +827,7 @@ public class MetadataNode implements IMetadataNode {
                 return null;
             }
             return results.get(0);
-        } catch (Exception e) {
-            e.printStackTrace();
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -853,7 +844,7 @@ public class MetadataNode implements IMetadataNode {
                 return null;
             }
             return results.get(0);
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -872,8 +863,7 @@ public class MetadataNode implements IMetadataNode {
                 return null;
             }
             return results.get(0);
-        } catch (Exception e) {
-            e.printStackTrace();
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -882,12 +872,8 @@ public class MetadataNode implements IMetadataNode {
     public void dropFunction(JobId jobId, FunctionSignature functionSignature)
             throws MetadataException, RemoteException {
 
-        Function function;
-        try {
-            function = getFunction(jobId, functionSignature);
-        } catch (Exception e) {
-            throw new MetadataException(e);
-        }
+        Function function = getFunction(jobId, functionSignature);
+
         if (function == null) {
             throw new MetadataException(
                     "Cannot drop function '" + functionSignature.toString() + "' because it doesn't exist.");
@@ -907,13 +893,13 @@ public class MetadataNode implements IMetadataNode {
         } catch (TreeIndexException e) {
             throw new MetadataException("There is no function with the name " + functionSignature.getName()
                     + " and arity " + functionSignature.getArity(), e);
-        } catch (Exception e) {
+        } catch (ACIDException | IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
 
     private ITupleReference getTupleToBeDeleted(JobId jobId, IMetadataIndex metadataIndex, ITupleReference searchKey)
-            throws Exception {
+            throws MetadataException, IndexException, IOException {
         IValueExtractor<ITupleReference> valueExtractor = new TupleCopyValueExtractor(metadataIndex.getTypeTraits());
         List<ITupleReference> results = new ArrayList<ITupleReference>();
         searchIndex(jobId, metadataIndex, searchKey, valueExtractor, results);
@@ -1011,7 +997,8 @@ public class MetadataNode implements IMetadataNode {
     }
 
     private <ResultType> void searchIndex(JobId jobId, IMetadataIndex index, ITupleReference searchKey,
-            IValueExtractor<ResultType> valueExtractor, List<ResultType> results) throws Exception {
+            IValueExtractor<ResultType> valueExtractor, List<ResultType> results)
+                    throws MetadataException, IndexException, IOException {
         IBinaryComparatorFactory[] comparatorFactories = index.getKeyBinaryComparatorFactory();
         String resourceName = index.getFile().toString();
         IIndex indexInstance = datasetLifecycleManager.getIndex(resourceName);
@@ -1083,7 +1070,7 @@ public class MetadataNode implements IMetadataNode {
                 datasetLifecycleManager.close(resourceName);
             }
 
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
 
@@ -1117,7 +1104,7 @@ public class MetadataNode implements IMetadataNode {
             List<Function> results = new ArrayList<Function>();
             searchIndex(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -1135,21 +1122,15 @@ public class MetadataNode implements IMetadataNode {
                     "A adapter with this name " + adapter.getAdapterIdentifier().getName()
                             + " already exists in dataverse '" + adapter.getAdapterIdentifier().getNamespace() + "'.",
                     e);
-        } catch (Exception e) {
+        } catch (ACIDException | IndexException | IOException e) {
             throw new MetadataException(e);
         }
-
     }
 
     @Override
     public void dropAdapter(JobId jobId, String dataverseName, String adapterName)
             throws MetadataException, RemoteException {
-        DatasourceAdapter adapter;
-        try {
-            adapter = getAdapter(jobId, dataverseName, adapterName);
-        } catch (Exception e) {
-            throw new MetadataException(e);
-        }
+        DatasourceAdapter adapter = getAdapter(jobId, dataverseName, adapterName);
         if (adapter == null) {
             throw new MetadataException("Cannot drop adapter '" + adapter + "' because it doesn't exist.");
         }
@@ -1166,7 +1147,7 @@ public class MetadataNode implements IMetadataNode {
             // BTreeKeyDoesNotExistException.
         } catch (TreeIndexException e) {
             throw new MetadataException("Cannot drop adapter '" + adapterName, e);
-        } catch (Exception e) {
+        } catch (ACIDException | IndexException | IOException e) {
             throw new MetadataException(e);
         }
 
@@ -1186,7 +1167,7 @@ public class MetadataNode implements IMetadataNode {
                 return null;
             }
             return results.get(0);
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -1203,16 +1184,14 @@ public class MetadataNode implements IMetadataNode {
         } catch (TreeIndexDuplicateKeyException e) {
             throw new MetadataException("A compcation policy with this name " + compactionPolicy.getPolicyName()
                     + " already exists in dataverse '" + compactionPolicy.getPolicyName() + "'.", e);
-        } catch (Exception e) {
+        } catch (ACIDException | IndexException | IOException e) {
             throw new MetadataException(e);
         }
-
     }
 
     @Override
     public CompactionPolicy getCompactionPolicy(JobId jobId, String dataverse, String policyName)
             throws MetadataException, RemoteException {
-
         try {
             ITupleReference searchKey = createTuple(dataverse, policyName);
             CompactionPolicyTupleTranslator tupleReaderWriter = new CompactionPolicyTupleTranslator(false);
@@ -1224,7 +1203,7 @@ public class MetadataNode implements IMetadataNode {
                 return results.get(0);
             }
             return null;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -1240,7 +1219,7 @@ public class MetadataNode implements IMetadataNode {
             List<DatasourceAdapter> results = new ArrayList<DatasourceAdapter>();
             searchIndex(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -1256,21 +1235,15 @@ public class MetadataNode implements IMetadataNode {
         } catch (TreeIndexException e) {
             throw new MetadataException("A library with this name " + library.getDataverseName()
                     + " already exists in dataverse '" + library.getDataverseName() + "'.", e);
-        } catch (Exception e) {
+        } catch (ACIDException | IndexException | IOException e) {
             throw new MetadataException(e);
         }
-
     }
 
     @Override
     public void dropLibrary(JobId jobId, String dataverseName, String libraryName)
             throws MetadataException, RemoteException {
-        Library library;
-        try {
-            library = getLibrary(jobId, dataverseName, libraryName);
-        } catch (Exception e) {
-            throw new MetadataException(e);
-        }
+        Library library = getLibrary(jobId, dataverseName, libraryName);
         if (library == null) {
             throw new MetadataException("Cannot drop library '" + library + "' because it doesn't exist.");
         }
@@ -1287,7 +1260,7 @@ public class MetadataNode implements IMetadataNode {
             // BTreeKeyDoesNotExistException.
         } catch (TreeIndexException e) {
             throw new MetadataException("Cannot drop library '" + libraryName, e);
-        } catch (Exception e) {
+        } catch (ACIDException | IndexException | IOException e) {
             throw new MetadataException(e);
         }
 
@@ -1306,7 +1279,7 @@ public class MetadataNode implements IMetadataNode {
                 return null;
             }
             return results.get(0);
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -1327,16 +1300,14 @@ public class MetadataNode implements IMetadataNode {
         } catch (TreeIndexException e) {
             throw new MetadataException("A feed policy with this name " + feedPolicy.getPolicyName()
                     + " already exists in dataverse '" + feedPolicy.getPolicyName() + "'.", e);
-        } catch (Exception e) {
+        } catch (ACIDException | IndexException | IOException e) {
             throw new MetadataException(e);
         }
-
     }
 
     @Override
     public FeedPolicyEntity getFeedPolicy(JobId jobId, String dataverse, String policyName)
             throws MetadataException, RemoteException {
-
         try {
             ITupleReference searchKey = createTuple(dataverse, policyName);
             FeedPolicyTupleTranslator tupleReaderWriter = new FeedPolicyTupleTranslator(false);
@@ -1348,7 +1319,7 @@ public class MetadataNode implements IMetadataNode {
                 return results.get(0);
             }
             return null;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -1364,7 +1335,7 @@ public class MetadataNode implements IMetadataNode {
         } catch (TreeIndexException e) {
             throw new MetadataException("A feed with this name " + feed.getFeedName() + " already exists in dataverse '"
                     + feed.getDataverseName() + "'.", e);
-        } catch (Exception e) {
+        } catch (ACIDException | IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -1381,14 +1352,13 @@ public class MetadataNode implements IMetadataNode {
                 return results.get(0);
             }
             return null;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
 
     @Override
     public void dropFeed(JobId jobId, String dataverse, String feedName) throws MetadataException, RemoteException {
-
         try {
             ITupleReference searchKey = createTuple(dataverse, feedName);
             // Searches the index for the tuple to be deleted. Acquires an S
@@ -1399,10 +1369,9 @@ public class MetadataNode implements IMetadataNode {
             // BTreeKeyDoesNotExistException.
         } catch (TreeIndexException e) {
             throw new MetadataException("Cannot drop feed '" + feedName + "' because it doesn't exist", e);
-        } catch (Exception e) {
+        } catch (ACIDException | IndexException | IOException e) {
             throw new MetadataException(e);
         }
-
     }
 
     @Override
@@ -1414,7 +1383,7 @@ public class MetadataNode implements IMetadataNode {
             deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, tuple);
         } catch (TreeIndexException e) {
             throw new MetadataException("Unknown feed policy " + policyName, e);
-        } catch (Exception e) {
+        } catch (ACIDException | IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -1430,7 +1399,7 @@ public class MetadataNode implements IMetadataNode {
             List<FeedPolicyEntity> results = new ArrayList<FeedPolicyEntity>();
             searchIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -1446,7 +1415,7 @@ public class MetadataNode implements IMetadataNode {
             throw new MetadataException("An externalFile with this number " + externalFile.getFileNumber()
                     + " already exists in dataset '" + externalFile.getDatasetName() + "' in dataverse '"
                     + externalFile.getDataverseName() + "'.", e);
-        } catch (Exception e) {
+        } catch (ACIDException | IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -1461,7 +1430,7 @@ public class MetadataNode implements IMetadataNode {
             List<ExternalFile> results = new ArrayList<ExternalFile>();
             searchIndex(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, searchKey, valueExtractor, results);
             return results;
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -1479,27 +1448,18 @@ public class MetadataNode implements IMetadataNode {
             deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, datasetTuple);
         } catch (TreeIndexException e) {
             throw new MetadataException("Couldn't drop externalFile.", e);
-        } catch (Exception e) {
+        } catch (ACIDException | IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
 
     @Override
     public void dropExternalFiles(JobId jobId, Dataset dataset) throws MetadataException, RemoteException {
-        List<ExternalFile> files;
-        try {
-            files = getExternalFiles(jobId, dataset);
-        } catch (Exception e) {
-            throw new MetadataException(e);
-        }
-        try {
-            //loop through files and delete them
-            for (int i = 0; i < files.size(); i++) {
-                dropExternalFile(jobId, files.get(i).getDataverseName(), files.get(i).getDatasetName(),
-                        files.get(i).getFileNumber());
-            }
-        } catch (Exception e) {
-            throw new MetadataException(e);
+        List<ExternalFile> files = getExternalFiles(jobId, dataset);
+        //loop through files and delete them
+        for (int i = 0; i < files.size(); i++) {
+            dropExternalFile(jobId, files.get(i).getDataverseName(), files.get(i).getDatasetName(),
+                    files.get(i).getFileNumber());
         }
     }
 
@@ -1548,7 +1508,7 @@ public class MetadataNode implements IMetadataNode {
                 return null;
             }
             return results.get(0);
-        } catch (Exception e) {
+        } catch (IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }
@@ -1570,7 +1530,7 @@ public class MetadataNode implements IMetadataNode {
             DatasetTupleTranslator tupleReaderWriter = new DatasetTupleTranslator(true);
             datasetTuple = tupleReaderWriter.getTupleFromMetadataEntity(dataset);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
-        } catch (Exception e) {
+        } catch (ACIDException | IndexException | IOException e) {
             throw new MetadataException(e);
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
index 3d07a00..84a75ef 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
@@ -99,7 +99,7 @@ public class MetadataTransactionContext extends MetadataCache {
         logAndApply(new MetadataLogicalOperation(datatype, true));
     }
 
-    public void addNogeGroup(NodeGroup nodeGroup) {
+    public void addNodeGroup(NodeGroup nodeGroup) {
         droppedCache.dropNodeGroup(nodeGroup);
         logAndApply(new MetadataLogicalOperation(nodeGroup, true));
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java
index 5effd7d..e90e29b 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java
@@ -46,5 +46,5 @@ public interface IValueExtractor<T> {
      * @throws HyracksDataException
      * @throws IOException
      */
-    public T getValue(JobId jobId, ITupleReference tuple) throws MetadataException, HyracksDataException, IOException;
+    public T getValue(JobId jobId, ITupleReference tuple) throws MetadataException, IOException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/ExternalDatasetDetails.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/ExternalDatasetDetails.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/ExternalDatasetDetails.java
index b16344d..ab1d2a5 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/ExternalDatasetDetails.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/ExternalDatasetDetails.java
@@ -20,7 +20,6 @@
 package org.apache.asterix.metadata.entities;
 
 import java.io.DataOutput;
-import java.io.IOException;
 import java.util.Date;
 import java.util.Map;
 
@@ -29,7 +28,6 @@ import org.apache.asterix.builders.OrderedListBuilder;
 import org.apache.asterix.builders.RecordBuilder;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.DatasetConfig.ExternalDatasetTransactionState;
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.IDatasetDetails;
 import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
@@ -124,12 +122,7 @@ public class ExternalDatasetDetails implements IDatasetDetails {
         intSerde.serialize(new AInt32(state.ordinal()), fieldValue.getDataOutput());
         externalRecordBuilder.addField(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_TRANSACTION_STATE_FIELD_INDEX,
                 fieldValue);
-        try {
-            externalRecordBuilder.write(out, true);
-        } catch (IOException | AsterixException e) {
-            throw new HyracksDataException(e);
-        }
-
+        externalRecordBuilder.write(out, true);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java
index 6d047a2..d401864 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java
@@ -144,20 +144,16 @@ public class Index implements IMetadataEntity<Index>, Comparable<Index> {
     public static Pair<IAType, Boolean> getNonNullableOpenFieldType(IAType fieldType, List<String> fieldName,
             ARecordType recType) throws AlgebricksException {
         Pair<IAType, Boolean> keyPairType = null;
-
-        try {
-            IAType subType = recType;
-            for (int i = 0; i < fieldName.size(); i++) {
-                subType = ((ARecordType) subType).getFieldType(fieldName.get(i));
-                if (subType == null) {
-                    keyPairType = Index.getNonNullableType(fieldType);
-                    break;
-                }
+        IAType subType = recType;
+        for (int i = 0; i < fieldName.size(); i++) {
+            subType = ((ARecordType) subType).getFieldType(fieldName.get(i));
+            if (subType == null) {
+                keyPairType = Index.getNonNullableType(fieldType);
+                break;
             }
-            if (subType != null)
-                keyPairType = Index.getNonNullableKeyFieldType(fieldName, recType);
-        } catch (IOException e) {
-            throw new AlgebricksException(e);
+        }
+        if (subType != null) {
+            keyPairType = Index.getNonNullableKeyFieldType(fieldName, recType);
         }
         return keyPairType;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/InternalDatasetDetails.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/InternalDatasetDetails.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/InternalDatasetDetails.java
index 73c263f..a772c39 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/InternalDatasetDetails.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/InternalDatasetDetails.java
@@ -20,14 +20,12 @@
 package org.apache.asterix.metadata.entities;
 
 import java.io.DataOutput;
-import java.io.IOException;
 import java.util.List;
 
 import org.apache.asterix.builders.IARecordBuilder;
 import org.apache.asterix.builders.OrderedListBuilder;
 import org.apache.asterix.builders.RecordBuilder;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.IDatasetDetails;
 import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
@@ -156,8 +154,8 @@ public class InternalDatasetDetails implements IDatasetDetails {
                 fieldValue);
 
         // write field 2
-        primaryKeyListBuilder
-                .reset((AOrderedListType) MetadataRecordTypes.INTERNAL_DETAILS_RECORDTYPE.getFieldTypes()[MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX]);
+        primaryKeyListBuilder.reset((AOrderedListType) MetadataRecordTypes.INTERNAL_DETAILS_RECORDTYPE
+                .getFieldTypes()[MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX]);
         for (List<String> field : partitioningKeys) {
             listBuilder.reset(stringList);
             for (String subField : field) {
@@ -176,8 +174,8 @@ public class InternalDatasetDetails implements IDatasetDetails {
                 fieldValue);
 
         // write field 3
-        primaryKeyListBuilder
-                .reset((AOrderedListType) MetadataRecordTypes.INTERNAL_DETAILS_RECORDTYPE.getFieldTypes()[MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_PRIMARYKEY_FIELD_INDEX]);
+        primaryKeyListBuilder.reset((AOrderedListType) MetadataRecordTypes.INTERNAL_DETAILS_RECORDTYPE
+                .getFieldTypes()[MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_PRIMARYKEY_FIELD_INDEX]);
         for (List<String> field : primaryKeys) {
             listBuilder.reset(stringList);
             for (String subField : field) {
@@ -216,18 +214,10 @@ public class InternalDatasetDetails implements IDatasetDetails {
             }
             fieldValue.reset();
             listBuilder.write(fieldValue.getDataOutput(), true);
-            try {
-                internalRecordBuilder.addField(nameValue, fieldValue);
-            } catch (AsterixException e) {
-                throw new HyracksDataException(e);
-            }
+            internalRecordBuilder.addField(nameValue, fieldValue);
         }
 
-        try {
-            internalRecordBuilder.write(out, true);
-        } catch (IOException | AsterixException e) {
-            throw new HyracksDataException(e);
-        }
+        internalRecordBuilder.write(out, true);
     }
 
     protected void writePropertyTypeRecord(String name, String value, DataOutput out, ARecordType recordType)
@@ -252,11 +242,7 @@ public class InternalDatasetDetails implements IDatasetDetails {
         stringSerde.serialize(aString, fieldValue.getDataOutput());
         propertyRecordBuilder.addField(1, fieldValue);
 
-        try {
-            propertyRecordBuilder.write(out, true);
-        } catch (IOException | AsterixException e) {
-            throw new HyracksDataException(e);
-        }
+        propertyRecordBuilder.write(out, true);
     }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
index 9ff90ca..e29b9dd 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
@@ -24,7 +24,6 @@ import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.IOException;
 
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
@@ -71,7 +70,7 @@ public class CompactionPolicyTupleTranslator extends AbstractTupleTranslator<Com
         CompactionPolicy compactionPolicy = null;
         String dataverseName = ((AString) compactionPolicyRecord
                 .getValueByPos(MetadataRecordTypes.COMPACTION_POLICY_ARECORD_DATAVERSE_NAME_FIELD_INDEX))
-                .getStringValue();
+                        .getStringValue();
         String policyName = ((AString) compactionPolicyRecord
                 .getValueByPos(MetadataRecordTypes.COMPACTION_POLICY_ARECORD_POLICY_NAME_FIELD_INDEX)).getStringValue();
         String className = ((AString) compactionPolicyRecord
@@ -82,8 +81,8 @@ public class CompactionPolicyTupleTranslator extends AbstractTupleTranslator<Com
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(CompactionPolicy compactionPolicy) throws IOException,
-            MetadataException {
+    public ITupleReference getTupleFromMetadataEntity(CompactionPolicy compactionPolicy)
+            throws IOException, MetadataException {
 
         tupleBuilder.reset();
         aString.setValue(compactionPolicy.getDataverseName());
@@ -115,11 +114,7 @@ public class CompactionPolicyTupleTranslator extends AbstractTupleTranslator<Com
         recordBuilder.addField(MetadataRecordTypes.COMPACTION_POLICY_ARECORD_CLASSNAME_FIELD_INDEX, fieldValue);
 
         // write record
-        try {
-            recordBuilder.write(tupleBuilder.getDataOutput(), true);
-        } catch (AsterixException e) {
-            throw new MetadataException(e);
-        }
+        recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
index 00f46d4..f283a83 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
@@ -38,7 +38,6 @@ import org.apache.asterix.builders.RecordBuilder;
 import org.apache.asterix.builders.UnorderedListBuilder;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.DatasetConfig.ExternalDatasetTransactionState;
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.IDatasetDetails;
 import org.apache.asterix.metadata.MetadataException;
@@ -343,11 +342,7 @@ public class DatasetTupleTranslator extends AbstractTupleTranslator<Dataset> {
         recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_PENDINGOP_FIELD_INDEX, fieldValue);
 
         // write record
-        try {
-            recordBuilder.write(tupleBuilder.getDataOutput(), true);
-        } catch (AsterixException e) {
-            throw new MetadataException(e);
-        }
+        recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
@@ -406,11 +401,7 @@ public class DatasetTupleTranslator extends AbstractTupleTranslator<Dataset> {
         stringSerde.serialize(aString, fieldValue.getDataOutput());
         propertyRecordBuilder.addField(1, fieldValue);
 
-        try {
-            propertyRecordBuilder.write(out, true);
-        } catch (IOException | AsterixException ioe) {
-            throw new HyracksDataException(ioe);
-        }
+        propertyRecordBuilder.write(out, true);
     }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
index b0d5f76..a824551 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
@@ -25,9 +25,7 @@ import java.io.DataInputStream;
 import java.io.IOException;
 import java.util.Calendar;
 
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.IDataSourceAdapter;
-import org.apache.asterix.external.api.IDataSourceAdapter.AdapterType;
 import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
@@ -72,13 +70,14 @@ public class DatasourceAdapterTupleTranslator extends AbstractTupleTranslator<Da
     private DatasourceAdapter createAdapterFromARecord(ARecord adapterRecord) {
         String dataverseName = ((AString) adapterRecord
                 .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_DATAVERSENAME_FIELD_INDEX))
-                .getStringValue();
+                        .getStringValue();
         String adapterName = ((AString) adapterRecord
                 .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_NAME_FIELD_INDEX)).getStringValue();
         String classname = ((AString) adapterRecord
                 .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_CLASSNAME_FIELD_INDEX)).getStringValue();
-        IDataSourceAdapter.AdapterType adapterType = IDataSourceAdapter.AdapterType.valueOf(((AString) adapterRecord
-                .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_TYPE_FIELD_INDEX)).getStringValue());
+        IDataSourceAdapter.AdapterType adapterType = IDataSourceAdapter.AdapterType.valueOf(
+                ((AString) adapterRecord.getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_TYPE_FIELD_INDEX))
+                        .getStringValue());
 
         return new DatasourceAdapter(new AdapterIdentifier(dataverseName, adapterName), classname, adapterType);
     }
@@ -129,11 +128,7 @@ public class DatasourceAdapterTupleTranslator extends AbstractTupleTranslator<Da
         recordBuilder.addField(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
 
         // write record
-        try {
-            recordBuilder.write(tupleBuilder.getDataOutput(), true);
-        } catch (AsterixException e) {
-            throw new MetadataException(e);
-        }
+        recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
index 8948f43..a1b23d6 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
@@ -24,12 +24,12 @@ import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.rmi.RemoteException;
 import java.util.Calendar;
 
 import org.apache.asterix.builders.IARecordBuilder;
 import org.apache.asterix.builders.OrderedListBuilder;
 import org.apache.asterix.builders.RecordBuilder;
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.transactions.JobId;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
@@ -207,11 +207,7 @@ public class DatatypeTupleTranslator extends AbstractTupleTranslator<Datatype> {
         // write field 3
         if (fieldType.getTypeTag().isDerivedType()) {
             fieldValue.reset();
-            try {
-                writeDerivedTypeRecord(dataType, (AbstractComplexType) fieldType, fieldValue.getDataOutput());
-            } catch (AsterixException e) {
-                throw new MetadataException(e);
-            }
+            writeDerivedTypeRecord(dataType, (AbstractComplexType) fieldType, fieldValue.getDataOutput());
             recordBuilder.addField(MetadataRecordTypes.DATATYPE_ARECORD_DERIVED_FIELD_INDEX, fieldValue);
         }
 
@@ -222,11 +218,7 @@ public class DatatypeTupleTranslator extends AbstractTupleTranslator<Datatype> {
         recordBuilder.addField(MetadataRecordTypes.DATATYPE_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
 
         // write record
-        try {
-            recordBuilder.write(tupleBuilder.getDataOutput(), true);
-        } catch (AsterixException e) {
-            throw new MetadataException(e);
-        }
+        recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
@@ -234,7 +226,7 @@ public class DatatypeTupleTranslator extends AbstractTupleTranslator<Datatype> {
     }
 
     private void writeDerivedTypeRecord(Datatype type, AbstractComplexType derivedDatatype, DataOutput out)
-            throws IOException, AsterixException {
+            throws HyracksDataException {
         DerivedTypeTag tag = null;
         IARecordBuilder derivedRecordBuilder = new RecordBuilder();
         ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
@@ -292,15 +284,16 @@ public class DatatypeTupleTranslator extends AbstractTupleTranslator<Datatype> {
             throws HyracksDataException {
         AbstractCollectionType listType = (AbstractCollectionType) type;
         IAType itemType = listType.getItemType();
-        if (itemType.getTypeTag().isDerivedType())
+        if (itemType.getTypeTag().isDerivedType()) {
             handleNestedDerivedType(itemType.getTypeName(), (AbstractComplexType) itemType, instance,
                     instance.getDataverseName(), instance.getDatatypeName());
+        }
         aString.setValue(listType.getItemType().getTypeName());
         stringSerde.serialize(aString, out);
     }
 
     private void writeRecordType(Datatype instance, AbstractComplexType type, DataOutput out)
-            throws IOException, AsterixException {
+            throws HyracksDataException {
 
         ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
         ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
@@ -369,13 +362,13 @@ public class DatatypeTupleTranslator extends AbstractTupleTranslator<Datatype> {
             String dataverseName, String datatypeName) throws HyracksDataException {
         try {
             metadataNode.addDatatype(jobId, new Datatype(dataverseName, typeName, nestedType, true));
-
         } catch (MetadataException e) {
             // The nested record type may have been inserted by a previous DDL statement or by
             // a previous nested type.
-            if (!e.getCause().getClass().equals(TreeIndexDuplicateKeyException.class))
+            if (!(e.getCause() instanceof TreeIndexDuplicateKeyException)) {
                 throw new HyracksDataException(e);
-        } catch (Exception e) {
+            }
+        } catch (RemoteException e) {
             // TODO: This should not be a HyracksDataException. Can't
             // fix this currently because of BTree exception model whose
             // fixes must get in.

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
index ecc53da..74f85ab 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
@@ -25,7 +25,6 @@ import java.io.DataInputStream;
 import java.io.IOException;
 import java.util.Calendar;
 
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
@@ -110,11 +109,7 @@ public class DataverseTupleTranslator extends AbstractTupleTranslator<Dataverse>
         aInt32Serde.serialize(aInt32, fieldValue.getDataOutput());
         recordBuilder.addField(MetadataRecordTypes.DATAVERSE_ARECORD_PENDINGOP_FIELD_INDEX, fieldValue);
 
-        try {
-            recordBuilder.write(tupleBuilder.getDataOutput(), true);
-        } catch (AsterixException e) {
-            throw new MetadataException(e);
-        }
+        recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java
index efb9318..1c3c7c9 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java
@@ -25,7 +25,6 @@ import java.io.IOException;
 import java.util.Date;
 
 import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.indexing.ExternalFile;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
@@ -97,14 +96,13 @@ public class ExternalFileTupleTranslator extends AbstractTupleTranslator<Externa
                 .getValueByPos(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_FILE_NAME_FIELD_INDEX)).getStringValue();
         long fileSize = ((AInt64) externalFileRecord
                 .getValueByPos(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_FILE_SIZE_FIELD_INDEX)).getLongValue();
-        Date lastMoDifiedDate = new Date(
-                ((ADateTime) externalFileRecord
-                        .getValueByPos(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_FILE_MOD_DATE_FIELD_INDEX))
-                        .getChrononTime());
+        Date lastMoDifiedDate = new Date(((ADateTime) externalFileRecord
+                .getValueByPos(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_FILE_MOD_DATE_FIELD_INDEX)).getChrononTime());
         ExternalFilePendingOp pendingOp = ExternalFilePendingOp.values()[((AInt32) externalFileRecord
                 .getValueByPos(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_FILE_PENDING_OP_FIELD_INDEX))
-                .getIntegerValue()];
-        return new ExternalFile(dataverseName, datasetName, fileNumber, fileName, lastMoDifiedDate, fileSize, pendingOp);
+                        .getIntegerValue()];
+        return new ExternalFile(dataverseName, datasetName, fileNumber, fileName, lastMoDifiedDate, fileSize,
+                pendingOp);
     }
 
     @Override
@@ -170,11 +168,7 @@ public class ExternalFileTupleTranslator extends AbstractTupleTranslator<Externa
         recordBuilder.addField(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_FILE_PENDING_OP_FIELD_INDEX, fieldValue);
 
         // write record
-        try {
-            recordBuilder.write(tupleBuilder.getDataOutput(), true);
-        } catch (AsterixException e) {
-            throw new MetadataException(e);
-        }
+        recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
index 00e3e63..4839065 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
@@ -30,7 +30,6 @@ import java.util.Map;
 import org.apache.asterix.builders.IARecordBuilder;
 import org.apache.asterix.builders.RecordBuilder;
 import org.apache.asterix.builders.UnorderedListBuilder;
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
@@ -111,7 +110,8 @@ public class FeedPolicyTupleTranslator extends AbstractTupleTranslator<FeedPolic
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(FeedPolicyEntity feedPolicy) throws IOException, MetadataException {
+    public ITupleReference getTupleFromMetadataEntity(FeedPolicyEntity feedPolicy)
+            throws IOException, MetadataException {
         // write the key in the first three fields of the tuple
         ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
 
@@ -161,11 +161,7 @@ public class FeedPolicyTupleTranslator extends AbstractTupleTranslator<FeedPolic
         recordBuilder.addField(MetadataRecordTypes.FEED_POLICY_ARECORD_PROPERTIES_FIELD_INDEX, fieldValue);
 
         // write record
-        try {
-            recordBuilder.write(tupleBuilder.getDataOutput(), true);
-        } catch (AsterixException e) {
-            throw new MetadataException(e);
-        }
+        recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
@@ -192,10 +188,6 @@ public class FeedPolicyTupleTranslator extends AbstractTupleTranslator<FeedPolic
         stringSerde.serialize(aString, fieldValue.getDataOutput());
         propertyRecordBuilder.addField(1, fieldValue);
 
-        try {
-            propertyRecordBuilder.write(out, true);
-        } catch (IOException | AsterixException e) {
-            throw new HyracksDataException(e);
-        }
+        propertyRecordBuilder.write(out, true);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
index dc9fb50..955249c 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
@@ -31,7 +31,6 @@ import java.util.Map;
 import org.apache.asterix.builders.IARecordBuilder;
 import org.apache.asterix.builders.OrderedListBuilder;
 import org.apache.asterix.builders.RecordBuilder;
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.external.feed.api.IFeed;
 import org.apache.asterix.external.feed.api.IFeed.FeedType;
@@ -198,11 +197,7 @@ public class FeedTupleTranslator extends AbstractTupleTranslator<Feed> {
         recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
 
         // write record
-        try {
-            recordBuilder.write(tupleBuilder.getDataOutput(), true);
-        } catch (AsterixException e) {
-            throw new MetadataException(e);
-        }
+        recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
@@ -250,11 +245,7 @@ public class FeedTupleTranslator extends AbstractTupleTranslator<Feed> {
                         MetadataRecordTypes.FEED_ARECORD_PRIMARY_FIELD_DETAILS_ADAPTOR_CONFIGURATION_FIELD_INDEX,
                         primaryRecordfieldValue);
 
-                try {
-                    primaryDetailsRecordBuilder.write(fieldValue.getDataOutput(), true);
-                } catch (IOException | AsterixException e) {
-                    throw new HyracksDataException(e);
-                }
+                primaryDetailsRecordBuilder.write(fieldValue.getDataOutput(), true);
 
                 recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_PRIMARY_TYPE_DETAILS_FIELD_INDEX, fieldValue);
             }
@@ -273,11 +264,7 @@ public class FeedTupleTranslator extends AbstractTupleTranslator<Feed> {
                         MetadataRecordTypes.FEED_ARECORD_SECONDARY_FIELD_DETAILS_SOURCE_FEED_NAME_FIELD_INDEX,
                         secondaryFieldValue);
 
-                try {
-                    secondaryDetailsRecordBuilder.write(fieldValue.getDataOutput(), true);
-                } catch (IOException | AsterixException e) {
-                    throw new HyracksDataException(e);
-                }
+                secondaryDetailsRecordBuilder.write(fieldValue.getDataOutput(), true);
                 recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_SECONDARY_TYPE_DETAILS_FIELD_INDEX, fieldValue);
                 break;
         }
@@ -305,10 +292,6 @@ public class FeedTupleTranslator extends AbstractTupleTranslator<Feed> {
         stringSerde.serialize(aString, fieldValue.getDataOutput());
         propertyRecordBuilder.addField(1, fieldValue);
 
-        try {
-            propertyRecordBuilder.write(out, true);
-        } catch (IOException | AsterixException e) {
-            throw new HyracksDataException(e);
-        }
+        propertyRecordBuilder.write(out, true);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
index 7850c3d..afa2f6e 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
@@ -27,7 +27,6 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.asterix.builders.OrderedListBuilder;
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
@@ -146,8 +145,8 @@ public class FunctionTupleTranslator extends AbstractTupleTranslator<Function> {
         // write field 3
         OrderedListBuilder listBuilder = new OrderedListBuilder();
         ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
-        listBuilder
-                .reset((AOrderedListType) MetadataRecordTypes.FUNCTION_RECORDTYPE.getFieldTypes()[MetadataRecordTypes.FUNCTION_ARECORD_FUNCTION_PARAM_LIST_FIELD_INDEX]);
+        listBuilder.reset((AOrderedListType) MetadataRecordTypes.FUNCTION_RECORDTYPE
+                .getFieldTypes()[MetadataRecordTypes.FUNCTION_ARECORD_FUNCTION_PARAM_LIST_FIELD_INDEX]);
         for (String param : function.getParams()) {
             itemValue.reset();
             aString.setValue(param);
@@ -183,11 +182,7 @@ public class FunctionTupleTranslator extends AbstractTupleTranslator<Function> {
         recordBuilder.addField(MetadataRecordTypes.FUNCTION_ARECORD_FUNCTION_KIND_FIELD_INDEX, fieldValue);
 
         // write record
-        try {
-            recordBuilder.write(tupleBuilder.getDataOutput(), true);
-        } catch (AsterixException e) {
-            throw new MetadataException(e);
-        }
+        recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
index 611c703..37f0e48 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
@@ -29,7 +29,6 @@ import java.util.List;
 
 import org.apache.asterix.builders.OrderedListBuilder;
 import org.apache.asterix.common.config.DatasetConfig.IndexType;
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.transactions.JobId;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
@@ -95,7 +94,7 @@ public class IndexTupleTranslator extends AbstractTupleTranslator<Index> {
     }
 
     @Override
-    public Index getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException, MetadataException {
+    public Index getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, IOException {
         byte[] serRecord = frameTuple.getFieldData(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = frameTuple.getFieldStart(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = frameTuple.getFieldLength(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -167,7 +166,7 @@ public class IndexTupleTranslator extends AbstractTupleTranslator<Index> {
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(Index instance) throws IOException, MetadataException {
+    public ITupleReference getTupleFromMetadataEntity(Index instance) throws IOException {
         // write the key in the first 3 fields of the tuple
         tupleBuilder.reset();
         aString.setValue(instance.getDataverseName());
@@ -253,11 +252,7 @@ public class IndexTupleTranslator extends AbstractTupleTranslator<Index> {
             aString.setValue(GRAM_LENGTH_FIELD_NAME);
             stringSerde.serialize(aString, nameValue.getDataOutput());
             intSerde.serialize(new AInt32(instance.getGramLength()), fieldValue.getDataOutput());
-            try {
-                recordBuilder.addField(nameValue, fieldValue);
-            } catch (AsterixException e) {
-                throw new MetadataException(e);
-            }
+            recordBuilder.addField(nameValue, fieldValue);
         }
 
         if (instance.isEnforcingKeyFileds()) {
@@ -279,11 +274,7 @@ public class IndexTupleTranslator extends AbstractTupleTranslator<Index> {
             }
             fieldValue.reset();
             typeListBuilder.write(fieldValue.getDataOutput(), true);
-            try {
-                recordBuilder.addField(nameValue, fieldValue);
-            } catch (AsterixException e) {
-                throw new MetadataException(e);
-            }
+            recordBuilder.addField(nameValue, fieldValue);
 
             // write optional field 10
             fieldValue.reset();
@@ -294,19 +285,11 @@ public class IndexTupleTranslator extends AbstractTupleTranslator<Index> {
 
             booleanSerde.serialize(ABoolean.TRUE, fieldValue.getDataOutput());
 
-            try {
-                recordBuilder.addField(nameValue, fieldValue);
-            } catch (AsterixException e) {
-                throw new MetadataException(e);
-            }
+            recordBuilder.addField(nameValue, fieldValue);
         }
 
         // write record
-        try {
-            recordBuilder.write(tupleBuilder.getDataOutput(), true);
-        } catch (AsterixException e) {
-            throw new MetadataException(e);
-        }
+        recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
index 77c81a1..e500575 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
@@ -25,7 +25,6 @@ import java.io.DataInputStream;
 import java.io.IOException;
 import java.util.Calendar;
 
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
@@ -111,11 +110,7 @@ public class LibraryTupleTranslator extends AbstractTupleTranslator<Library> {
         recordBuilder.addField(MetadataRecordTypes.LIBRARY_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
 
         // write record
-        try {
-            recordBuilder.write(tupleBuilder.getDataOutput(), true);
-        } catch (AsterixException e) {
-            throw new MetadataException(e);
-        }
+        recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
         return tuple;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
index 4cb0cf8..af83ee9 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
@@ -28,7 +28,6 @@ import java.util.Calendar;
 import java.util.List;
 
 import org.apache.asterix.builders.UnorderedListBuilder;
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
@@ -101,8 +100,8 @@ public class NodeGroupTupleTranslator extends AbstractTupleTranslator<NodeGroup>
         recordBuilder.addField(MetadataRecordTypes.NODEGROUP_ARECORD_GROUPNAME_FIELD_INDEX, fieldValue);
 
         // write field 1
-        listBuilder
-                .reset((AUnorderedListType) MetadataRecordTypes.NODEGROUP_RECORDTYPE.getFieldTypes()[MetadataRecordTypes.NODEGROUP_ARECORD_NODENAMES_FIELD_INDEX]);
+        listBuilder.reset((AUnorderedListType) MetadataRecordTypes.NODEGROUP_RECORDTYPE
+                .getFieldTypes()[MetadataRecordTypes.NODEGROUP_ARECORD_NODENAMES_FIELD_INDEX]);
         this.nodeNames = instance.getNodeNames();
         for (String nodeName : this.nodeNames) {
             itemValue.reset();
@@ -120,11 +119,7 @@ public class NodeGroupTupleTranslator extends AbstractTupleTranslator<NodeGroup>
         stringSerde.serialize(aString, fieldValue.getDataOutput());
         recordBuilder.addField(MetadataRecordTypes.NODEGROUP_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
 
-        try {
-            recordBuilder.write(tupleBuilder.getDataOutput(), true);
-        } catch (AsterixException e) {
-            throw new MetadataException(e);
-        }
+        recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
 
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
index a592460..55b61db 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
@@ -21,7 +21,6 @@ package org.apache.asterix.metadata.entitytupletranslators;
 
 import java.io.IOException;
 
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
@@ -127,11 +126,7 @@ public class NodeTupleTranslator extends AbstractTupleTranslator<Node> {
         // listBuilder.write(fieldValue.getDataOutput());
         // recordBuilder.addField(3, fieldValue);
 
-        try {
-            recordBuilder.write(tupleBuilder.getDataOutput(), true);
-        } catch (AsterixException e) {
-            throw new MetadataException(e);
-        }
+        recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
         tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
         return tuple;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
index f011f04..9919962 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
@@ -26,8 +26,8 @@ import java.util.Map;
 
 import org.apache.asterix.builders.IARecordBuilder;
 import org.apache.asterix.builders.RecordBuilder;
-import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.context.CorrelatedPrefixMergePolicyFactory;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.indexing.IndexingConstants;
@@ -276,10 +276,6 @@ public class DatasetUtils {
         stringSerde.serialize(aString, fieldValue.getDataOutput());
         propertyRecordBuilder.addField(1, fieldValue);
 
-        try {
-            propertyRecordBuilder.write(out, true);
-        } catch (IOException | AsterixException e) {
-            throw new HyracksDataException(e);
-        }
+        propertyRecordBuilder.write(out, true);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
index 969feea..1841e5d 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
@@ -25,7 +25,6 @@ import org.apache.asterix.common.transactions.JobId;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.api.IMetadataEntityTupleTranslator;
 import org.apache.asterix.metadata.api.IValueExtractor;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 /**
@@ -39,7 +38,7 @@ public class MetadataEntityValueExtractor<T> implements IValueExtractor<T> {
     }
 
     @Override
-    public T getValue(JobId jobId, ITupleReference tuple) throws MetadataException, HyracksDataException, IOException {
+    public T getValue(JobId jobId, ITupleReference tuple) throws MetadataException, IOException {
         return tupleReaderWriter.getMetadataEntityFromTuple(tuple);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java
index bdc36e8..9f63ebf 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java
@@ -27,7 +27,6 @@ import java.io.IOException;
 import org.apache.asterix.common.transactions.JobId;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.api.IValueExtractor;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.util.string.UTF8StringReader;
 
@@ -35,6 +34,8 @@ import org.apache.hyracks.util.string.UTF8StringReader;
  * Extracts the value of field 'DataypeName' of the first nested type from an
  * ITupleReference that contains a serialized representation of a Datatype
  * metadata entity.
+ *
+ * TODO Is this class used?
  */
 public class NestedDatatypeNameValueExtractor implements IValueExtractor<String> {
 
@@ -47,24 +48,20 @@ public class NestedDatatypeNameValueExtractor implements IValueExtractor<String>
     private final UTF8StringReader reader = new UTF8StringReader();
 
     @Override
-    public String getValue(JobId jobId, ITupleReference tuple) throws MetadataException, HyracksDataException {
+    public String getValue(JobId jobId, ITupleReference tuple) throws MetadataException, IOException {
         byte[] serRecord = tuple.getFieldData(2);
         int recordStartOffset = tuple.getFieldStart(2);
         int recordLength = tuple.getFieldLength(2);
         ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
         DataInput in = new DataInputStream(stream);
-        try {
-            String nestedType = reader.readUTF(in);
-            if (nestedType.equals(datatypeName)) {
-                recordStartOffset = tuple.getFieldStart(1);
-                recordLength = tuple.getFieldLength(1);
-                stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
-                in = new DataInputStream(stream);
-                return reader.readUTF(in);
-            }
-            return null;
-        } catch (IOException e) {
-            throw new HyracksDataException(e);
+        String nestedType = reader.readUTF(in);
+        if (nestedType.equals(datatypeName)) {
+            recordStartOffset = tuple.getFieldStart(1);
+            recordLength = tuple.getFieldLength(1);
+            stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
+            in = new DataInputStream(stream);
+            return reader.readUTF(in);
         }
+        return null;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java
index b9ca7bd..33f8969 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java
@@ -26,7 +26,6 @@ import org.apache.asterix.common.transactions.JobId;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.api.IValueExtractor;
 import org.apache.hyracks.api.dataflow.value.ITypeTraits;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleReference;
 import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
@@ -49,8 +48,7 @@ public class TupleCopyValueExtractor implements IValueExtractor<ITupleReference>
     }
 
     @Override
-    public ITupleReference getValue(JobId jobId, ITupleReference tuple) throws MetadataException, HyracksDataException,
-            IOException {
+    public ITupleReference getValue(JobId jobId, ITupleReference tuple) throws MetadataException, IOException {
         int numBytes = tupleWriter.bytesRequired(tuple);
         tupleBytes = new byte[numBytes];
         tupleWriter.writeTuple(tuple, tupleBytes, 0);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-om/src/main/java/org/apache/asterix/builders/IARecordBuilder.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/builders/IARecordBuilder.java b/asterix-om/src/main/java/org/apache/asterix/builders/IARecordBuilder.java
index ee15d5d..ea3bd51 100644
--- a/asterix-om/src/main/java/org/apache/asterix/builders/IARecordBuilder.java
+++ b/asterix-om/src/main/java/org/apache/asterix/builders/IARecordBuilder.java
@@ -24,6 +24,7 @@ import java.io.IOException;
 
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.om.types.ARecordType;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.api.IValueReference;
 
 /**
@@ -57,7 +58,7 @@ public interface IARecordBuilder {
      * @throws AsterixException
      *             if the field name conflicts with a closed field name
      */
-    public void addField(IValueReference name, IValueReference value) throws AsterixException;
+    public void addField(IValueReference name, IValueReference value) throws HyracksDataException;
 
     /**
      * @param out
@@ -69,7 +70,7 @@ public interface IARecordBuilder {
      * @throws AsterixException
      *             if any open field names conflict with each other
      */
-    public void write(DataOutput out, boolean writeTypeTag) throws IOException, AsterixException;
+    public void write(DataOutput out, boolean writeTypeTag) throws HyracksDataException;
 
     public int getFieldId(String fieldName);
 


[3/3] incubator-asterixdb git commit: Some exception cleanup

Posted by ti...@apache.org.
Some exception cleanup

- Use HyracksDataException and IOException for errors ingesting external
  data (instead of a mixture of AlgebricksException, AsterixException, and
  HyracksDataException).
- Avoid unnecessary wrapping of exceptions.
- Wrap exception at boundaries (e.g. HiveRecordParser)
- ADMLexerException and ADMDataParser.ParseException extend
  HyracksDataException
- Tighter exception declarations and handling for internal methods.

Change-Id: Icf9591046c44f5fa2281874ff0c98d780e741267
Reviewed-on: https://asterix-gerrit.ics.uci.edu/603
Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
Reviewed-by: Murtadha Hubail <hu...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/9dcba3c9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/9dcba3c9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/9dcba3c9

Branch: refs/heads/master
Commit: 9dcba3c9fec7c83b9f834c1289fe8dca70be4d32
Parents: f79a896
Author: Till Westmann <ti...@apache.org>
Authored: Mon Feb 1 19:42:32 2016 -0800
Committer: Till Westmann <ti...@apache.org>
Committed: Mon Feb 1 21:14:27 2016 -0800

----------------------------------------------------------------------
 .../optimizer/rules/ConstantFoldingRule.java    |  20 +-
 .../LangExpressionToPlanTranslator.java         |   6 +-
 .../queries/records/RecordsQueries.xml          |   4 +-
 .../queries_sqlpp/records/RecordsQueries.xml    |   4 +-
 .../src/test/resources/runtimets/testsuite.xml  |   2 +-
 .../asterix/external/api/IDataParser.java       |   8 +-
 .../asterix/external/api/IFunctionHelper.java   |   6 +-
 .../apache/asterix/external/api/IJObject.java   |   3 +-
 .../asterix/external/api/IRecordDataParser.java |   3 +-
 .../asterix/external/api/IStreamDataParser.java |   6 +-
 .../external/library/JavaFunctionHelper.java    |  22 +-
 .../external/library/java/JObjectAccessors.java |  19 +-
 .../external/library/java/JObjectUtil.java      |  13 +-
 .../asterix/external/library/java/JObjects.java |  17 +-
 .../asterix/external/parser/ADMDataParser.java  |  67 ++----
 .../external/parser/AbstractDataParser.java     | 232 +++++++------------
 .../external/parser/DelimitedDataParser.java    |   9 +-
 .../external/parser/HiveRecordParser.java       |  39 ++--
 .../asterix/external/parser/RSSParser.java      |   2 +-
 .../parser/RecordWithMetadataParser.java        |  28 ++-
 .../asterix/external/parser/TweetParser.java    |   2 +-
 .../src/main/resources/adm.grammar              |   5 +-
 .../asterix/lexergenerator/LexerGenerator.java  |   8 +-
 .../src/main/resources/LexerException.java      |   2 +-
 .../src/main/resources/default.config           |   7 +-
 .../asterix/metadata/MetadataManager.java       |   4 +-
 .../apache/asterix/metadata/MetadataNode.java   | 174 ++++++--------
 .../metadata/MetadataTransactionContext.java    |   2 +-
 .../asterix/metadata/api/IValueExtractor.java   |   2 +-
 .../entities/ExternalDatasetDetails.java        |   9 +-
 .../apache/asterix/metadata/entities/Index.java |  22 +-
 .../entities/InternalDatasetDetails.java        |  28 +--
 .../CompactionPolicyTupleTranslator.java        |  13 +-
 .../DatasetTupleTranslator.java                 |  13 +-
 .../DatasourceAdapterTupleTranslator.java       |  15 +-
 .../DatatypeTupleTranslator.java                |  27 +--
 .../DataverseTupleTranslator.java               |   7 +-
 .../ExternalFileTupleTranslator.java            |  18 +-
 .../FeedPolicyTupleTranslator.java              |  16 +-
 .../FeedTupleTranslator.java                    |  25 +-
 .../FunctionTupleTranslator.java                |  11 +-
 .../IndexTupleTranslator.java                   |  29 +--
 .../LibraryTupleTranslator.java                 |   7 +-
 .../NodeGroupTupleTranslator.java               |  11 +-
 .../NodeTupleTranslator.java                    |   7 +-
 .../asterix/metadata/utils/DatasetUtils.java    |   8 +-
 .../MetadataEntityValueExtractor.java           |   3 +-
 .../NestedDatatypeNameValueExtractor.java       |  25 +-
 .../TupleCopyValueExtractor.java                |   4 +-
 .../asterix/builders/IARecordBuilder.java       |   5 +-
 .../apache/asterix/builders/RecordBuilder.java  |  83 ++++---
 .../serde/ARecordSerializerDeserializer.java    |   6 +-
 .../asterix/om/base/AMutableInterval.java       |   6 +-
 .../impl/RecordAddFieldsTypeComputer.java       |  17 +-
 .../impl/RecordMergeTypeComputer.java           |  32 ++-
 .../impl/RecordRemoveFieldsTypeComputer.java    |  34 ++-
 .../asterix/om/types/AOrderedListType.java      |   3 +-
 .../apache/asterix/om/types/ARecordType.java    |  19 +-
 .../asterix/om/types/AUnorderedListType.java    |   3 +-
 .../asterix/om/types/EnumDeserializer.java      |   3 +-
 .../apache/asterix/om/types/TypeHierarchy.java  |  44 ----
 .../ClosedRecordConstructorEvalFactory.java     |   3 +-
 .../OpenRecordConstructorDescriptor.java        |   3 +-
 .../records/RecordAddFieldsDescriptor.java      |   2 +-
 .../temporal/IntervalBinDescriptor.java         |  64 +++--
 65 files changed, 490 insertions(+), 851 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
index f51d454..effb973 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
@@ -20,22 +20,19 @@
 package org.apache.asterix.optimizer.rules;
 
 import java.io.DataInputStream;
-import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.List;
 
-import org.apache.commons.lang3.mutable.Mutable;
-
 import org.apache.asterix.common.config.GlobalConfig;
 import org.apache.asterix.dataflow.data.common.AqlExpressionTypeComputer;
 import org.apache.asterix.dataflow.data.common.AqlNullableTypeComputer;
 import org.apache.asterix.dataflow.data.nontagged.AqlNullWriterFactory;
+import org.apache.asterix.formats.nontagged.AqlADMPrinterFactoryProvider;
 import org.apache.asterix.formats.nontagged.AqlBinaryBooleanInspectorImpl;
 import org.apache.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
 import org.apache.asterix.formats.nontagged.AqlBinaryHashFunctionFactoryProvider;
 import org.apache.asterix.formats.nontagged.AqlBinaryHashFunctionFamilyProvider;
 import org.apache.asterix.formats.nontagged.AqlBinaryIntegerInspector;
-import org.apache.asterix.formats.nontagged.AqlADMPrinterFactoryProvider;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.formats.nontagged.AqlTypeTraitProvider;
 import org.apache.asterix.jobgen.QueryLogicalExpressionJobGen;
@@ -47,6 +44,7 @@ import org.apache.asterix.om.typecomputer.base.TypeComputerUtilities;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.AbstractCollectionType;
+import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
@@ -123,7 +121,8 @@ public class ConstantFoldingRule implements IAlgebraicRewriteRule {
     private static final IOperatorSchema[] _emptySchemas = new IOperatorSchema[] {};
 
     @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
         return false;
     }
 
@@ -182,8 +181,8 @@ public class ConstantFoldingRule implements IAlgebraicRewriteRule {
             if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR)
                     || expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR)) {
                 AbstractCollectionType listType = (AbstractCollectionType) TypeComputerUtilities.getRequiredType(expr);
-                if (listType != null
-                        && (listType.getItemType().getTypeTag() == ATypeTag.ANY || listType.getItemType() instanceof AbstractCollectionType)) {
+                if (listType != null && (listType.getItemType().getTypeTag() == ATypeTag.ANY
+                        || listType.getItemType() instanceof AbstractCollectionType)) {
                     //case1: listType == null,  could be a nested list inside a list<ANY>
                     //case2: itemType = ANY
                     //case3: itemType = a nested list
@@ -194,12 +193,7 @@ public class ConstantFoldingRule implements IAlgebraicRewriteRule {
                 ARecordType rt = (ARecordType) _emptyTypeEnv.getType(expr.getArguments().get(0).getValue());
                 String str = ((AString) ((AsterixConstantValue) ((ConstantExpression) expr.getArguments().get(1)
                         .getValue()).getValue()).getObject()).getStringValue();
-                int k;
-                try {
-                    k = rt.getFieldIndex(str);
-                } catch (IOException e) {
-                    throw new AlgebricksException(e);
-                }
+                int k = rt.getFieldIndex(str);
                 if (k >= 0) {
                     // wait for the ByNameToByIndex rule to apply
                     return new Pair<Boolean, ILogicalExpression>(changed, expr);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index b8fe0e1..a1a6d4b 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@ -364,11 +364,7 @@ class LangExpressionToPlanTranslator
                     upsertOp.setPrevRecordType(recordType);
                     if (additionalFilteringField != null) {
                         upsertOp.setPrevFilterVar(context.newVar());
-                        try {
-                            upsertOp.setPrevFilterType(recordType.getFieldType(additionalFilteringField.get(0)));
-                        } catch (IOException e) {
-                            throw new AlgebricksException("unable to get the type of filter field");
-                        }
+                        upsertOp.setPrevFilterType(recordType.getFieldType(additionalFilteringField.get(0)));
                     }
                     leafOperator = new SinkOperator();
                     leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(upsertOp));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-app/src/test/resources/runtimets/queries/records/RecordsQueries.xml
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/records/RecordsQueries.xml b/asterix-app/src/test/resources/runtimets/queries/records/RecordsQueries.xml
index ad30289..0e89dd8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/records/RecordsQueries.xml
+++ b/asterix-app/src/test/resources/runtimets/queries/records/RecordsQueries.xml
@@ -120,13 +120,13 @@
         <test-case FilePath="records">
             <compilation-unit name="open-closed-fieldname-conflict_issue173">
                 <output-dir compare="Text">open-closed-fieldname-conflict_issue173</output-dir>
-                <expected-error>org.apache.asterix.common.exceptions.AsterixException</expected-error>
+                <expected-error>org.apache.hyracks.api.exceptions.HyracksDataException: Open field "name" has the same field name as closed field at index 0</expected-error>
             </compilation-unit>
         </test-case>
         <test-case FilePath="records">
             <compilation-unit name="open-open-fieldname-conflict_issue173">
                 <output-dir compare="Text">open-open-fieldname-conflict_issue173</output-dir>
-                <expected-error>org.apache.asterix.common.exceptions.AsterixException</expected-error>
+                <expected-error>org.apache.hyracks.api.exceptions.HyracksDataException: Open fields 0 and 1 have the same field name "name"</expected-error>
             </compilation-unit>
         </test-case>
         <!-- RECORD MANIPULATION TESTS -->

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-app/src/test/resources/runtimets/queries_sqlpp/records/RecordsQueries.xml
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/records/RecordsQueries.xml b/asterix-app/src/test/resources/runtimets/queries_sqlpp/records/RecordsQueries.xml
index 48ce410..a4121ea 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/records/RecordsQueries.xml
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/records/RecordsQueries.xml
@@ -121,13 +121,13 @@
         <test-case FilePath="records">
             <compilation-unit name="open-closed-fieldname-conflict_issue173">
                 <output-dir compare="Text">open-closed-fieldname-conflict_issue173</output-dir>
-                <expected-error>org.apache.asterix.common.exceptions.AsterixException</expected-error>
+                <expected-error>org.apache.hyracks.api.exceptions.HyracksDataException: Open field "name" has the same field name as closed field at index 0</expected-error>
             </compilation-unit>
         </test-case>
         <test-case FilePath="records">
             <compilation-unit name="open-open-fieldname-conflict_issue173">
                 <output-dir compare="Text">open-open-fieldname-conflict_issue173</output-dir>
-                <expected-error>org.apache.asterix.common.exceptions.AsterixException</expected-error>
+                <expected-error>org.apache.hyracks.api.exceptions.HyracksDataException: Open fields 0 and 1 have the same field name "name"</expected-error>
             </compilation-unit>
         </test-case>
     </test-group>

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-app/src/test/resources/runtimets/testsuite.xml
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index 621c73a..d593039 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -5706,7 +5706,7 @@
         <test-case FilePath="cross-dataverse">
             <compilation-unit name="drop-dataverse">
                 <output-dir compare="Text">drop-dataverse</output-dir>
-                <expected-error>SyntaxError: org.apache.asterix.metadata.MetadataException: Cannot drop dataverse. Type a.a used by dataset b.b1</expected-error>
+                <expected-error>org.apache.asterix.metadata.MetadataException: Cannot drop dataverse. Type a.a used by dataset b.b1</expected-error>
             </compilation-unit>
         </test-case>
         <test-case FilePath="cross-dataverse">

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParser.java
index 4ad4c4f..e680822 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParser.java
@@ -55,18 +55,16 @@ public interface IDataParser {
      *            2. The query compiler.
      * @param recordType
      *            The expected record type
-     * @throws HyracksDataException
      * @throws IOException
      */
-    public void configure(Map<String, String> configuration, ARecordType recordType)
-            throws HyracksDataException, IOException;
+    public void configure(Map<String, String> configuration, ARecordType recordType) throws IOException;
 
     /*
      * The following two static methods are expensive. right now, they are used by RSSFeeds and Twitter feed
      * TODO: Get rid of them
      */
     public static void writeRecord(AMutableRecord record, DataOutput dataOutput, IARecordBuilder recordBuilder)
-            throws IOException, AsterixException {
+            throws HyracksDataException {
         ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
         int numFields = record.getType().getFieldNames().length;
         for (int pos = 0; pos < numFields; pos++) {
@@ -79,7 +77,7 @@ public interface IDataParser {
     }
 
     @SuppressWarnings("unchecked")
-    public static void writeObject(IAObject obj, DataOutput dataOutput) throws IOException, AsterixException {
+    public static void writeObject(IAObject obj, DataOutput dataOutput) throws HyracksDataException {
         switch (obj.getType().getTypeTag()) {
             case RECORD: {
                 IARecordBuilder recordBuilder = new RecordBuilder();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/api/IFunctionHelper.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IFunctionHelper.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IFunctionHelper.java
index ebd0757..b8be685 100755
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IFunctionHelper.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IFunctionHelper.java
@@ -18,10 +18,8 @@
  */
 package org.apache.asterix.external.api;
 
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.library.java.JTypeTag;
-
-import java.io.IOException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public interface IFunctionHelper {
 
@@ -29,7 +27,7 @@ public interface IFunctionHelper {
 
     public IJObject getResultObject();
 
-    public void setResult(IJObject result) throws IOException, AsterixException;
+    public void setResult(IJObject result) throws HyracksDataException;
 
     public boolean isValidResult();
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/api/IJObject.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IJObject.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IJObject.java
index a88f47d..250e4fa 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IJObject.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IJObject.java
@@ -22,7 +22,6 @@ import java.io.DataOutput;
 
 import org.apache.asterix.om.base.IAObject;
 import org.apache.asterix.om.types.ATypeTag;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public interface IJObject {
@@ -33,5 +32,5 @@ public interface IJObject {
 
     public void serialize(DataOutput dataOutput, boolean writeTypeTag) throws HyracksDataException;
 
-    public void reset() throws AlgebricksException;
+    public void reset() throws HyracksDataException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParser.java
index cc24847..3cb8f37 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParser.java
@@ -19,6 +19,7 @@
 package org.apache.asterix.external.api;
 
 import java.io.DataOutput;
+import java.io.IOException;
 
 public interface IRecordDataParser<T> extends IDataParser {
 
@@ -27,7 +28,7 @@ public interface IRecordDataParser<T> extends IDataParser {
      * @param out
      * @throws Exception
      */
-    public void parse(IRawRecord<? extends T> record, DataOutput out) throws Exception;
+    public void parse(IRawRecord<? extends T> record, DataOutput out) throws IOException;
 
     /**
      * @return the record class

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java
index 531d050..f596efa 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java
@@ -26,20 +26,22 @@ public interface IStreamDataParser extends IDataParser {
     /**
      * Sets the inputStream for the parser. called only for parsers that support InputStreams
      */
-    public void setInputStream(InputStream in) throws Exception;
+    public void setInputStream(InputStream in) throws IOException;
 
     /**
      * Parse data into output AsterixDataModel binary records.
      * Used with parsers that support stream sources
+     *
      * @param out
      *            DataOutput instance that for writing the parser output.
      */
-    public boolean parse(DataOutput out) throws Exception;
+    public boolean parse(DataOutput out) throws IOException;
 
     /**
      * reset the parser state. this is called when a failure takes place
      * and the job needs to continue and to do that, the parser need to
      * be in a consistent state
+     *
      * @return true if reset was successful, false, otherwise
      * @throws IOException
      */

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/library/JavaFunctionHelper.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/JavaFunctionHelper.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/JavaFunctionHelper.java
index 13ea589..34e3878 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/JavaFunctionHelper.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/JavaFunctionHelper.java
@@ -18,6 +18,10 @@
  */
 package org.apache.asterix.external.library;
 
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.IFunctionHelper;
 import org.apache.asterix.external.api.IJObject;
@@ -40,10 +44,6 @@ import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.api.IDataOutputProvider;
 import org.apache.hyracks.data.std.api.IValueReference;
 
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
 public class JavaFunctionHelper implements IFunctionHelper {
 
     private final IExternalFunctionInfo finfo;
@@ -80,18 +80,14 @@ public class JavaFunctionHelper implements IFunctionHelper {
     }
 
     @Override
-    public void setResult(IJObject result) throws IOException, AsterixException {
+    public void setResult(IJObject result) throws HyracksDataException {
         if (result == null) {
             JNull.INSTANCE.serialize(outputProvider.getDataOutput(), true);
             isValidResult = false;
         } else {
-            try {
-                isValidResult = true;
-                result.serialize(outputProvider.getDataOutput(), true);
-                result.reset();
-            } catch (IOException | AlgebricksException e) {
-                throw new HyracksDataException(e);
-            }
+            isValidResult = true;
+            result.serialize(outputProvider.getDataOutput(), true);
+            result.reset();
         }
     }
 
@@ -99,7 +95,7 @@ public class JavaFunctionHelper implements IFunctionHelper {
      * Gets the value of the result flag
      *
      * @return
-     *    boolean True is the setResult is called and result is not null
+     *         boolean True is the setResult is called and result is not null
      */
     @Override
     public boolean isValidResult() {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectAccessors.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectAccessors.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectAccessors.java
index 1835739..5923354 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectAccessors.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectAccessors.java
@@ -88,7 +88,6 @@ import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.om.types.EnumDeserializer;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.util.container.IObjectPool;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.util.string.UTF8StringReader;
 
@@ -337,11 +336,7 @@ public class JObjectAccessors {
             long intervalEnd = AIntervalSerializerDeserializer.getIntervalEnd(b, s);
             byte intervalType = AIntervalSerializerDeserializer.getIntervalTimeType(b, s);
             IJObject jObject = objectPool.allocate(BuiltinType.AINTERVAL);
-            try {
-                ((JInterval) jObject).setValue(intervalStart, intervalEnd, intervalType);
-            } catch (AlgebricksException e) {
-                throw new HyracksDataException(e);
-            }
+            ((JInterval) jObject).setValue(intervalStart, intervalEnd, intervalType);
             return jObject;
         }
     }
@@ -464,11 +459,7 @@ public class JObjectAccessors {
         @Override
         public JRecord access(ARecordVisitablePointable pointable, IObjectPool<IJObject, IAType> objectPool,
                 ARecordType recordType, JObjectPointableVisitor pointableVisitor) throws HyracksDataException {
-            try {
-                jRecord.reset();
-            } catch (AlgebricksException e) {
-                throw new HyracksDataException(e);
-            }
+            jRecord.reset();
             ARecordVisitablePointable recordPointable = pointable;
             List<IVisitablePointable> fieldPointables = recordPointable.getFieldValues();
             List<IVisitablePointable> fieldTypeTags = recordPointable.getFieldTypeTags();
@@ -525,11 +516,7 @@ public class JObjectAccessors {
         }
 
         public void reset() throws HyracksDataException {
-            try {
-                jRecord.reset();
-            } catch (AlgebricksException e) {
-                throw new HyracksDataException(e);
-            }
+            jRecord.reset();
             openFields.clear();
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java
index e7c1ec1..ed3cd32 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java
@@ -22,7 +22,6 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
 import org.apache.asterix.dataflow.data.nontagged.serde.SerializerDeserializerUtil;
 import org.apache.asterix.external.api.IJObject;
@@ -57,13 +56,13 @@ import org.apache.asterix.om.types.EnumDeserializer;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.util.NonTaggedFormatUtil;
 import org.apache.asterix.om.util.container.IObjectPool;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 
 public class JObjectUtil {
 
     /**
      * Normalize an input string by removing linebreaks, and replace them with space
      * Also remove non-readable special characters
+     *
      * @param originalString
      *            The input String
      * @return
@@ -88,7 +87,7 @@ public class JObjectUtil {
     }
 
     public static IJObject getJType(ATypeTag typeTag, IAType type, ByteArrayAccessibleDataInputStream dis,
-            IObjectPool<IJObject, IAType> objectPool) throws IOException, AsterixException {
+            IObjectPool<IJObject, IAType> objectPool) throws IOException {
         IJObject jObject;
 
         switch (typeTag) {
@@ -160,11 +159,7 @@ public class JObjectUtil {
                 long start = dis.readLong();
                 long end = dis.readLong();
                 byte intervalType = dis.readByte();
-                try {
-                    ((JInterval) jObject).setValue(start, end, intervalType);
-                } catch (AlgebricksException e) {
-                    throw new AsterixException(e);
-                }
+                ((JInterval) jObject).setValue(start, end, intervalType);
                 break;
             }
 
@@ -408,7 +403,7 @@ public class JObjectUtil {
         return fields;
     }
 
-    private static ARecordType mergeRecordTypes(ARecordType recType1, ARecordType recType2) throws AsterixException {
+    private static ARecordType mergeRecordTypes(ARecordType recType1, ARecordType recType2) {
 
         String[] fieldNames = new String[recType1.getFieldNames().length + recType2.getFieldNames().length];
         IAType[] fieldTypes = new IAType[recType1.getFieldTypes().length + recType2.getFieldTypes().length];

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjects.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjects.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjects.java
index 97fe983..d5e849c 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjects.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjects.java
@@ -91,7 +91,6 @@ import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.AUnorderedListType;
 import org.apache.asterix.om.types.IAType;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 
@@ -548,7 +547,7 @@ public class JObjects {
             super(new AMutableInterval(intervalStart, intervalEnd, (byte) 0));
         }
 
-        public void setValue(long intervalStart, long intervalEnd, byte typetag) throws AlgebricksException {
+        public void setValue(long intervalStart, long intervalEnd, byte typetag) throws HyracksDataException {
             ((AMutableInterval) value).setValue(intervalStart, intervalEnd, typetag);
         }
 
@@ -577,7 +576,7 @@ public class JObjects {
         }
 
         @Override
-        public void reset() throws AlgebricksException {
+        public void reset() throws HyracksDataException {
             ((AMutableInterval) value).setValue(0L, 0L, (byte) 0);
         }
 
@@ -1097,14 +1096,10 @@ public class JObjects {
                         recordBuilder.addField(openFieldName, openFieldValue);
                     }
                 }
-            } catch (IOException | AsterixException ae) {
+            } catch (IOException ae) {
                 throw new HyracksDataException(ae);
             }
-            try {
-                recordBuilder.write(output, writeTypeTag);
-            } catch (IOException | AsterixException e) {
-                throw new HyracksDataException(e);
-            }
+            recordBuilder.write(output, writeTypeTag);
         }
 
         @Override
@@ -1113,7 +1108,7 @@ public class JObjects {
         }
 
         @Override
-        public void reset() throws AlgebricksException {
+        public void reset() throws HyracksDataException {
             if (openFields != null && !openFields.isEmpty()) {
                 openFields.clear();
             }
@@ -1126,7 +1121,7 @@ public class JObjects {
             }
         }
 
-        public void reset(IJObject[] fields, LinkedHashMap<String, IJObject> openFields) throws AlgebricksException {
+        public void reset(IJObject[] fields, LinkedHashMap<String, IJObject> openFields) throws HyracksDataException {
             this.reset();
             this.fields = fields;
             this.openFields = openFields;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
index d523c6e..d9a93ff 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
@@ -33,7 +33,6 @@ import org.apache.asterix.builders.ListBuilderFactory;
 import org.apache.asterix.builders.OrderedListBuilder;
 import org.apache.asterix.builders.RecordBuilderFactory;
 import org.apache.asterix.builders.UnorderedListBuilder;
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.dataflow.data.nontagged.serde.APolygonSerializerDeserializer;
 import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRawRecord;
@@ -54,7 +53,6 @@ import org.apache.asterix.om.util.NonTaggedFormatUtil;
 import org.apache.asterix.om.util.container.IObjectPool;
 import org.apache.asterix.om.util.container.ListObjectPool;
 import org.apache.asterix.runtime.operators.file.adm.AdmLexer;
-import org.apache.asterix.runtime.operators.file.adm.AdmLexerException;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.api.IMutableValueStorage;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
@@ -82,7 +80,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
     private String mismatchErrorMessage2 = " got a value of type ";
     private Map<String, String> configuration;
 
-    static class ParseException extends AsterixException {
+    static class ParseException extends HyracksDataException {
         private static final long serialVersionUID = 1L;
         private String filename;
         private int line = -1;
@@ -137,17 +135,15 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
     }
 
     @Override
-    public boolean parse(DataOutput out) throws AsterixException {
+    public boolean parse(DataOutput out) throws IOException {
         try {
             resetPools();
             return parseAdmInstance(recordType, out);
-        } catch (IOException e) {
-            throw new ParseException(e, filename, admLexer.getLine(), admLexer.getColumn());
-        } catch (AdmLexerException e) {
-            throw new AsterixException(e);
         } catch (ParseException e) {
             e.setLocation(filename, admLexer.getLine(), admLexer.getColumn());
             throw e;
+        } catch (IOException e) {
+            throw new ParseException(e, filename, admLexer.getLine(), admLexer.getColumn());
         }
     }
 
@@ -168,18 +164,16 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
     }
 
     @Override
-    public void parse(IRawRecord<? extends char[]> record, DataOutput out) throws Exception {
+    public void parse(IRawRecord<? extends char[]> record, DataOutput out) throws IOException {
         try {
             resetPools();
             admLexer.setBuffer(record.get());
             parseAdmInstance(recordType, out);
-        } catch (IOException e) {
-            throw new ParseException(e, filename, admLexer.getLine(), admLexer.getColumn());
-        } catch (AdmLexerException e) {
-            throw new AsterixException(e);
         } catch (ParseException e) {
             e.setLocation(filename, admLexer.getLine(), admLexer.getColumn());
             throw e;
+        } catch (IOException e) {
+            throw new ParseException(e, filename, admLexer.getLine(), admLexer.getColumn());
         }
     }
 
@@ -189,12 +183,11 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
     }
 
     @Override
-    public void setInputStream(InputStream in) throws Exception {
+    public void setInputStream(InputStream in) throws IOException {
         admLexer = new AdmLexer(new java.io.InputStreamReader(in));
     }
 
-    protected boolean parseAdmInstance(IAType objectType, DataOutput out)
-            throws AsterixException, IOException, AdmLexerException {
+    protected boolean parseAdmInstance(IAType objectType, DataOutput out) throws IOException {
         int token = admLexer.next();
         if (token == AdmLexer.TOKEN_EOF) {
             return false;
@@ -204,8 +197,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         }
     }
 
-    private void admFromLexerStream(int token, IAType objectType, DataOutput out)
-            throws AsterixException, IOException, AdmLexerException {
+    private void admFromLexerStream(int token, IAType objectType, DataOutput out) throws IOException {
 
         switch (token) {
             case AdmLexer.TOKEN_NULL_LITERAL: {
@@ -529,7 +521,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         return null; // wont get here
     }
 
-    private ATypeTag getTargetTypeTag(ATypeTag expectedTypeTag, IAType aObjectType) throws IOException {
+    private ATypeTag getTargetTypeTag(ATypeTag expectedTypeTag, IAType aObjectType) throws HyracksDataException {
         if (aObjectType == null) {
             return expectedTypeTag;
         }
@@ -559,8 +551,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         return getTargetTypeTag(expectedTypeTag, aObjectType) != null;
     }
 
-    private void parseRecord(ARecordType recType, DataOutput out)
-            throws IOException, AsterixException, AdmLexerException {
+    private void parseRecord(ARecordType recType, DataOutput out) throws IOException {
 
         ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
         ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
@@ -702,8 +693,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         return -1;
     }
 
-    private void parseOrderedList(AOrderedListType oltype, DataOutput out)
-            throws IOException, AsterixException, AdmLexerException {
+    private void parseOrderedList(AOrderedListType oltype, DataOutput out) throws IOException {
         ArrayBackedValueStorage itemBuffer = getTempBuffer();
         OrderedListBuilder orderedListBuilder = (OrderedListBuilder) getOrderedListBuilder();
 
@@ -744,8 +734,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         orderedListBuilder.write(out, true);
     }
 
-    private void parseUnorderedList(AUnorderedListType uoltype, DataOutput out)
-            throws IOException, AsterixException, AdmLexerException {
+    private void parseUnorderedList(AUnorderedListType uoltype, DataOutput out) throws IOException {
         ArrayBackedValueStorage itemBuffer = getTempBuffer();
         UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) getUnorderedListBuilder();
 
@@ -821,16 +810,14 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         }
     }
 
-    private void parseToNumericTarget(ATypeTag typeTag, IAType objectType, DataOutput out)
-            throws AsterixException, IOException {
+    private void parseToNumericTarget(ATypeTag typeTag, IAType objectType, DataOutput out) throws IOException {
         final ATypeTag targetTypeTag = getTargetTypeTag(typeTag, objectType);
         if (targetTypeTag == null || !parseValue(admLexer.getLastTokenImage(), targetTypeTag, out)) {
             throw new ParseException(mismatchErrorMessage + objectType.getTypeName() + mismatchErrorMessage2 + typeTag);
         }
     }
 
-    private void parseAndCastNumeric(ATypeTag typeTag, IAType objectType, DataOutput out)
-            throws AsterixException, IOException {
+    private void parseAndCastNumeric(ATypeTag typeTag, IAType objectType, DataOutput out) throws IOException {
         final ATypeTag targetTypeTag = getTargetTypeTag(typeTag, objectType);
         DataOutput dataOutput = out;
         if (targetTypeTag != typeTag) {
@@ -848,8 +835,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
                 // can promote typeTag to targetTypeTag
                 ITypeConvertComputer promoteComputer = ATypeHierarchy.getTypePromoteComputer(typeTag, targetTypeTag);
                 if (promoteComputer == null) {
-                    throw new AsterixException(
-                            "Can't cast the " + typeTag + " type to the " + targetTypeTag + " type.");
+                    throw new ParseException("Can't cast the " + typeTag + " type to the " + targetTypeTag + " type.");
                 }
                 // do the promotion; note that the type tag field should be skipped
                 promoteComputer.convertType(castBuffer.getByteArray(), castBuffer.getStartOffset() + 1,
@@ -858,8 +844,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
                 //can demote source type to the target type
                 ITypeConvertComputer demoteComputer = ATypeHierarchy.getTypeDemoteComputer(typeTag, targetTypeTag);
                 if (demoteComputer == null) {
-                    throw new AsterixException(
-                            "Can't cast the " + typeTag + " type to the " + targetTypeTag + " type.");
+                    throw new ParseException("Can't cast the " + typeTag + " type to the " + targetTypeTag + " type.");
                 }
                 // do the demotion; note that the type tag field should be skipped
                 demoteComputer.convertType(castBuffer.getByteArray(), castBuffer.getStartOffset() + 1,
@@ -868,8 +853,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         }
     }
 
-    private void parseConstructor(ATypeTag typeTag, IAType objectType, DataOutput out)
-            throws AsterixException, AdmLexerException, IOException {
+    private void parseConstructor(ATypeTag typeTag, IAType objectType, DataOutput out) throws IOException {
         final ATypeTag targetTypeTag = getTargetTypeTag(typeTag, objectType);
         if (targetTypeTag != null) {
             DataOutput dataOutput = out;
@@ -906,8 +890,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         throw new ParseException(mismatchErrorMessage + objectType.getTypeName() + ". Got " + typeTag + " instead.");
     }
 
-    private boolean parseValue(final String unquoted, ATypeTag typeTag, DataOutput out)
-            throws AsterixException, HyracksDataException, IOException {
+    private boolean parseValue(final String unquoted, ATypeTag typeTag, DataOutput out) throws HyracksDataException {
         switch (typeTag) {
             case BOOLEAN:
                 parseBoolean(unquoted, out);
@@ -981,7 +964,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         }
     }
 
-    private void parseBoolean(String bool, DataOutput out) throws AsterixException, HyracksDataException {
+    private void parseBoolean(String bool, DataOutput out) throws HyracksDataException {
         String errorMessage = "This can not be an instance of boolean";
         if (bool.equals("true")) {
             booleanSerde.serialize(ABoolean.TRUE, out);
@@ -992,7 +975,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         }
     }
 
-    private void parseInt8(String int8, DataOutput out) throws AsterixException, HyracksDataException {
+    private void parseInt8(String int8, DataOutput out) throws HyracksDataException {
         String errorMessage = "This can not be an instance of int8";
         boolean positive = true;
         byte value = 0;
@@ -1023,7 +1006,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         int8Serde.serialize(aInt8, out);
     }
 
-    private void parseInt16(String int16, DataOutput out) throws AsterixException, HyracksDataException {
+    private void parseInt16(String int16, DataOutput out) throws HyracksDataException {
         String errorMessage = "This can not be an instance of int16";
         boolean positive = true;
         short value = 0;
@@ -1055,7 +1038,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         int16Serde.serialize(aInt16, out);
     }
 
-    private void parseInt32(String int32, DataOutput out) throws AsterixException, HyracksDataException {
+    private void parseInt32(String int32, DataOutput out) throws HyracksDataException {
         String errorMessage = "This can not be an instance of int32";
         boolean positive = true;
         int value = 0;
@@ -1088,7 +1071,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         int32Serde.serialize(aInt32, out);
     }
 
-    private void parseInt64(String int64, DataOutput out) throws AsterixException, HyracksDataException {
+    private void parseInt64(String int64, DataOutput out) throws HyracksDataException {
         String errorMessage = "This can not be an instance of int64";
         boolean positive = true;
         long value = 0;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/parser/AbstractDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/AbstractDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/AbstractDataParser.java
index f5f2793..20b4124 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/AbstractDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/AbstractDataParser.java
@@ -73,7 +73,6 @@ import org.apache.asterix.om.base.temporal.ATimeParserFactory;
 import org.apache.asterix.om.base.temporal.GregorianCalendarSystem;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.BuiltinType;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.util.bytes.Base64Parser;
@@ -198,23 +197,13 @@ public abstract class AbstractDataParser implements IDataParser {
     }
 
     protected void parseTime(String time, DataOutput out) throws HyracksDataException {
-        int chrononTimeInMs;
-        try {
-            chrononTimeInMs = ATimeParserFactory.parseTimePart(time, 0, time.length());
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        }
+        int chrononTimeInMs = ATimeParserFactory.parseTimePart(time, 0, time.length());
         aTime.setValue(chrononTimeInMs);
         timeSerde.serialize(aTime, out);
     }
 
     protected void parseDate(String date, DataOutput out) throws HyracksDataException {
-        long chrononTimeInMs = 0;
-        try {
-            chrononTimeInMs = ADateParserFactory.parseDatePart(date, 0, date.length());
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        }
+        long chrononTimeInMs = ADateParserFactory.parseDatePart(date, 0, date.length());
         short temp = 0;
         if (chrononTimeInMs < 0 && chrononTimeInMs % GregorianCalendarSystem.CHRONON_OF_DAY != 0) {
             temp = 1;
@@ -224,56 +213,39 @@ public abstract class AbstractDataParser implements IDataParser {
     }
 
     protected void parseDateTime(String datetime, DataOutput out) throws HyracksDataException {
-        long chrononTimeInMs = 0;
-        try {
-            // +1 if it is negative (-)
-            short timeOffset = (short) ((datetime.charAt(0) == '-') ? 1 : 0);
+        // +1 if it is negative (-)
+        short timeOffset = (short) ((datetime.charAt(0) == '-') ? 1 : 0);
 
-            timeOffset += 8;
+        timeOffset += 8;
 
+        if (datetime.charAt(timeOffset) != 'T') {
+            timeOffset += 2;
             if (datetime.charAt(timeOffset) != 'T') {
-                timeOffset += 2;
-                if (datetime.charAt(timeOffset) != 'T') {
-                    throw new AlgebricksException("This can not be an instance of datetime: missing T");
-                }
+                throw new HyracksDataException("This can not be an instance of datetime: missing T");
             }
-            chrononTimeInMs = ADateParserFactory.parseDatePart(datetime, 0, timeOffset);
-            chrononTimeInMs += ATimeParserFactory.parseTimePart(datetime, timeOffset + 1,
-                    datetime.length() - timeOffset - 1);
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
         }
+        long chrononTimeInMs = ADateParserFactory.parseDatePart(datetime, 0, timeOffset);
+        chrononTimeInMs += ATimeParserFactory.parseTimePart(datetime, timeOffset + 1,
+                datetime.length() - timeOffset - 1);
         aDateTime.setValue(chrononTimeInMs);
         datetimeSerde.serialize(aDateTime, out);
     }
 
     protected void parseDuration(String duration, DataOutput out) throws HyracksDataException {
-        try {
-            ADurationParserFactory.parseDuration(duration, 0, duration.length(), aDuration, ADurationParseOption.All);
-            durationSerde.serialize(aDuration, out);
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        }
+        ADurationParserFactory.parseDuration(duration, 0, duration.length(), aDuration, ADurationParseOption.All);
+        durationSerde.serialize(aDuration, out);
     }
 
     protected void parseDateTimeDuration(String durationString, DataOutput out) throws HyracksDataException {
-        try {
-            ADurationParserFactory.parseDuration(durationString, 0, durationString.length(), aDayTimeDuration,
-                    ADurationParseOption.All);
-            dayTimeDurationSerde.serialize(aDayTimeDuration, out);
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        }
+        ADurationParserFactory.parseDuration(durationString, 0, durationString.length(), aDayTimeDuration,
+                ADurationParseOption.All);
+        dayTimeDurationSerde.serialize(aDayTimeDuration, out);
     }
 
     protected void parseYearMonthDuration(String durationString, DataOutput out) throws HyracksDataException {
-        try {
-            ADurationParserFactory.parseDuration(durationString, 0, durationString.length(), aYearMonthDuration,
-                    ADurationParseOption.All);
-            yearMonthDurationSerde.serialize(aYearMonthDuration, out);
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        }
+        ADurationParserFactory.parseDuration(durationString, 0, durationString.length(), aYearMonthDuration,
+                ADurationParseOption.All);
+        yearMonthDurationSerde.serialize(aYearMonthDuration, out);
     }
 
     protected void parsePoint(String point, DataOutput out) throws HyracksDataException {
@@ -362,138 +334,103 @@ public abstract class AbstractDataParser implements IDataParser {
     }
 
     protected void parseDateTimeInterval(String interval, DataOutput out) throws HyracksDataException {
-        long chrononTimeInMsStart = 0;
-        long chrononTimeInMsEnd = 0;
-        try {
-            // the starting point for parsing (so for the accessor)
-            int startOffset = 0;
-            int endOffset, timeSeperatorOffsetInDatetimeString;
-
-            // Get the index for the comma
-            int commaIndex = interval.indexOf(',');
-            if (commaIndex < 1) {
-                throw new AlgebricksException("comma is missing for a string of interval");
-            }
+        // the starting point for parsing (so for the accessor)
+        int startOffset = 0;
+        int endOffset, timeSeperatorOffsetInDatetimeString;
+
+        // Get the index for the comma
+        int commaIndex = interval.indexOf(',');
+        if (commaIndex < 1) {
+            throw new HyracksDataException("comma is missing for a string of interval");
+        }
 
-            endOffset = commaIndex - 1;
-            timeSeperatorOffsetInDatetimeString = interval.indexOf('T');
+        endOffset = commaIndex - 1;
+        timeSeperatorOffsetInDatetimeString = interval.indexOf('T');
 
-            if (timeSeperatorOffsetInDatetimeString < 0) {
-                throw new AlgebricksException(
-                        "This can not be an instance of interval: missing T for a datetime value.");
-            }
+        if (timeSeperatorOffsetInDatetimeString < 0) {
+            throw new HyracksDataException("This can not be an instance of interval: missing T for a datetime value.");
+        }
 
-            chrononTimeInMsStart = parseDatePart(interval, startOffset, timeSeperatorOffsetInDatetimeString - 1);
+        long chrononTimeInMsStart = parseDatePart(interval, startOffset, timeSeperatorOffsetInDatetimeString - 1);
 
-            chrononTimeInMsStart += parseTimePart(interval, timeSeperatorOffsetInDatetimeString + 1, endOffset);
+        chrononTimeInMsStart += parseTimePart(interval, timeSeperatorOffsetInDatetimeString + 1, endOffset);
 
-            // Interval End
-            startOffset = commaIndex + 1;
-            endOffset = interval.length() - 1;
+        // Interval End
+        startOffset = commaIndex + 1;
+        endOffset = interval.length() - 1;
 
-            timeSeperatorOffsetInDatetimeString = interval.indexOf('T', startOffset);
+        timeSeperatorOffsetInDatetimeString = interval.indexOf('T', startOffset);
 
-            if (timeSeperatorOffsetInDatetimeString < 0) {
-                throw new AlgebricksException(
-                        "This can not be an instance of interval: missing T for a datetime value.");
-            }
+        if (timeSeperatorOffsetInDatetimeString < 0) {
+            throw new HyracksDataException("This can not be an instance of interval: missing T for a datetime value.");
+        }
 
-            chrononTimeInMsEnd = parseDatePart(interval, startOffset, timeSeperatorOffsetInDatetimeString - 1);
+        long chrononTimeInMsEnd = parseDatePart(interval, startOffset, timeSeperatorOffsetInDatetimeString - 1);
 
-            chrononTimeInMsEnd += parseTimePart(interval, timeSeperatorOffsetInDatetimeString + 1, endOffset);
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        }
+        chrononTimeInMsEnd += parseTimePart(interval, timeSeperatorOffsetInDatetimeString + 1, endOffset);
 
-        try {
-            aInterval.setValue(chrononTimeInMsStart, chrononTimeInMsEnd, ATypeTag.DATETIME.serialize());
-        } catch (AlgebricksException e) {
-            throw new HyracksDataException(e);
-        }
+        aInterval.setValue(chrononTimeInMsStart, chrononTimeInMsEnd, ATypeTag.DATETIME.serialize());
 
         intervalSerde.serialize(aInterval, out);
     }
 
     protected void parseTimeInterval(String interval, DataOutput out) throws HyracksDataException {
-        long chrononTimeInMsStart = 0;
-        long chrononTimeInMsEnd = 0;
-        try {
-            int startOffset = 0;
-            int endOffset;
-
-            // Get the index for the comma
-            int commaIndex = interval.indexOf(',');
-            if (commaIndex < 0) {
-                throw new AlgebricksException("comma is missing for a string of interval");
-            }
+        int startOffset = 0;
+        int endOffset;
 
-            endOffset = commaIndex - 1;
-            // Interval Start
-            chrononTimeInMsStart = parseTimePart(interval, startOffset, endOffset);
+        // Get the index for the comma
+        int commaIndex = interval.indexOf(',');
+        if (commaIndex < 0) {
+            throw new HyracksDataException("comma is missing for a string of interval");
+        }
 
-            if (chrononTimeInMsStart < 0) {
-                chrononTimeInMsStart += GregorianCalendarSystem.CHRONON_OF_DAY;
-            }
+        endOffset = commaIndex - 1;
+        // Interval Start
+        long chrononTimeInMsStart = parseTimePart(interval, startOffset, endOffset);
 
-            // Interval End
-            startOffset = commaIndex + 1;
-            endOffset = interval.length() - 1;
+        if (chrononTimeInMsStart < 0) {
+            chrononTimeInMsStart += GregorianCalendarSystem.CHRONON_OF_DAY;
+        }
 
-            chrononTimeInMsEnd = parseTimePart(interval, startOffset, endOffset);
-            if (chrononTimeInMsEnd < 0) {
-                chrononTimeInMsEnd += GregorianCalendarSystem.CHRONON_OF_DAY;
-            }
+        // Interval End
+        startOffset = commaIndex + 1;
+        endOffset = interval.length() - 1;
 
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
+        long chrononTimeInMsEnd = parseTimePart(interval, startOffset, endOffset);
+        if (chrononTimeInMsEnd < 0) {
+            chrononTimeInMsEnd += GregorianCalendarSystem.CHRONON_OF_DAY;
         }
 
-        try {
-            aInterval.setValue(chrononTimeInMsStart, chrononTimeInMsEnd, ATypeTag.TIME.serialize());
-        } catch (AlgebricksException e) {
-            throw new HyracksDataException(e);
-        }
+        aInterval.setValue(chrononTimeInMsStart, chrononTimeInMsEnd, ATypeTag.TIME.serialize());
         intervalSerde.serialize(aInterval, out);
     }
 
     protected void parseDateInterval(String interval, DataOutput out) throws HyracksDataException {
-        long chrononTimeInMsStart = 0;
-        long chrononTimeInMsEnd = 0;
-        try {
-            // the starting point for parsing (so for the accessor)
-            int startOffset = 0;
-            int endOffset;
-
-            // Get the index for the comma
-            int commaIndex = interval.indexOf(',');
-            if (commaIndex < 1) {
-                throw new AlgebricksException("comma is missing for a string of interval");
-            }
-
-            endOffset = commaIndex - 1;
-            chrononTimeInMsStart = parseDatePart(interval, startOffset, endOffset);
+        // the starting point for parsing (so for the accessor)
+        int startOffset = 0;
+        int endOffset;
+
+        // Get the index for the comma
+        int commaIndex = interval.indexOf(',');
+        if (commaIndex < 1) {
+            throw new HyracksDataException("comma is missing for a string of interval");
+        }
 
-            // Interval End
-            startOffset = commaIndex + 1;
-            endOffset = interval.length() - 1;
+        endOffset = commaIndex - 1;
+        long chrononTimeInMsStart = parseDatePart(interval, startOffset, endOffset);
 
-            chrononTimeInMsEnd = parseDatePart(interval, startOffset, endOffset);
+        // Interval End
+        startOffset = commaIndex + 1;
+        endOffset = interval.length() - 1;
 
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        }
+        long chrononTimeInMsEnd = parseDatePart(interval, startOffset, endOffset);
 
-        try {
-            aInterval.setValue((chrononTimeInMsStart / GregorianCalendarSystem.CHRONON_OF_DAY),
-                    (chrononTimeInMsEnd / GregorianCalendarSystem.CHRONON_OF_DAY), ATypeTag.DATE.serialize());
-        } catch (AlgebricksException e) {
-            throw new HyracksDataException(e);
-        }
+        aInterval.setValue((chrononTimeInMsStart / GregorianCalendarSystem.CHRONON_OF_DAY),
+                (chrononTimeInMsEnd / GregorianCalendarSystem.CHRONON_OF_DAY), ATypeTag.DATE.serialize());
         intervalSerde.serialize(aInterval, out);
     }
 
-    private long parseDatePart(String interval, int startOffset, int endOffset)
-            throws AlgebricksException, HyracksDataException {
+    private long parseDatePart(String interval, int startOffset, int endOffset) throws HyracksDataException {
 
         while (interval.charAt(endOffset) == '"' || interval.charAt(endOffset) == ' ') {
             endOffset--;
@@ -506,8 +443,7 @@ public abstract class AbstractDataParser implements IDataParser {
         return ADateParserFactory.parseDatePart(interval, startOffset, endOffset - startOffset + 1);
     }
 
-    private int parseTimePart(String interval, int startOffset, int endOffset)
-            throws AlgebricksException, HyracksDataException {
+    private int parseTimePart(String interval, int startOffset, int endOffset) throws HyracksDataException {
 
         while (interval.charAt(endOffset) == '"' || interval.charAt(endOffset) == ' ') {
             endOffset--;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
index 2f0fc86..7e231a4 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
@@ -26,7 +26,6 @@ import java.util.Map;
 
 import org.apache.asterix.builders.IARecordBuilder;
 import org.apache.asterix.builders.RecordBuilder;
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.dataflow.data.nontagged.serde.ANullSerializerDeserializer;
 import org.apache.asterix.external.api.IDataParser;
 import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
@@ -72,7 +71,7 @@ public class DelimitedDataParser extends AbstractDataParser implements IStreamDa
     }
 
     @Override
-    public boolean parse(DataOutput out) throws AsterixException, IOException {
+    public boolean parse(DataOutput out) throws IOException {
         while (cursor.nextRecord()) {
             parseRecord(out);
             if (!areAllNullFields) {
@@ -83,7 +82,7 @@ public class DelimitedDataParser extends AbstractDataParser implements IStreamDa
         return false;
     }
 
-    private void parseRecord(DataOutput out) throws AsterixException, IOException {
+    private void parseRecord(DataOutput out) throws IOException {
         recBuilder.reset(recordType);
         recBuilder.init();
         areAllNullFields = true;
@@ -100,7 +99,7 @@ public class DelimitedDataParser extends AbstractDataParser implements IStreamDa
                 // NULL. Note that string type can also process empty field as an
                 // empty string
                 if (!NonTaggedFormatUtil.isOptional(recordType.getFieldTypes()[i])) {
-                    throw new AsterixException("At record: " + cursor.recordCount + " - Field " + cursor.fieldCount
+                    throw new HyracksDataException("At record: " + cursor.recordCount + " - Field " + cursor.fieldCount
                             + " is not an optional type so it cannot accept null value. ");
                 }
                 fieldValueBufferOutput.writeByte(ATypeTag.NULL.serialize());
@@ -174,7 +173,7 @@ public class DelimitedDataParser extends AbstractDataParser implements IStreamDa
     }
 
     @Override
-    public void parse(IRawRecord<? extends char[]> record, DataOutput out) throws Exception {
+    public void parse(IRawRecord<? extends char[]> record, DataOutput out) throws IOException {
         cursor.nextRecord(record.get(), record.size());
         parseRecord(out);
         if (!areAllNullFields) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/parser/HiveRecordParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/HiveRecordParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/HiveRecordParser.java
index fb61339..1c91130 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/HiveRecordParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/HiveRecordParser.java
@@ -119,25 +119,28 @@ public class HiveRecordParser implements IRecordDataParser<Writable> {
     }
 
     @Override
-    public void parse(IRawRecord<? extends Writable> record, DataOutput out) throws Exception {
-        Writable hiveRawRecord = record.get();
-        Object hiveObject = hiveSerde.deserialize(hiveRawRecord);
-        int n = aRecord.getFieldNames().length;
-        List<Object> attributesValues = oi.getStructFieldsDataAsList(hiveObject);
-        recBuilder.reset(aRecord);
-        recBuilder.init();
-        for (int i = 0; i < n; i++) {
-            final Object value = attributesValues.get(i);
-            final ObjectInspector foi = fieldRefs.get(i).getFieldObjectInspector();
-            fieldValueBuffer.reset();
-            final DataOutput dataOutput = fieldValueBuffer.getDataOutput();
-            dataOutput.writeByte(fieldTypeTags[i]);
-            //get field type
-            parseItem(fieldTypes[i], value, foi, dataOutput, false);
-            recBuilder.addField(i, fieldValueBuffer);
+    public void parse(IRawRecord<? extends Writable> record, DataOutput out) throws HyracksDataException {
+        try {
+            Writable hiveRawRecord = record.get();
+            Object hiveObject = hiveSerde.deserialize(hiveRawRecord);
+            int n = aRecord.getFieldNames().length;
+            List<Object> attributesValues = oi.getStructFieldsDataAsList(hiveObject);
+            recBuilder.reset(aRecord);
+            recBuilder.init();
+            for (int i = 0; i < n; i++) {
+                final Object value = attributesValues.get(i);
+                final ObjectInspector foi = fieldRefs.get(i).getFieldObjectInspector();
+                fieldValueBuffer.reset();
+                final DataOutput dataOutput = fieldValueBuffer.getDataOutput();
+                dataOutput.writeByte(fieldTypeTags[i]);
+                //get field type
+                parseItem(fieldTypes[i], value, foi, dataOutput, false);
+                recBuilder.addField(i, fieldValueBuffer);
+            }
+            recBuilder.write(out, true);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
         }
-        recBuilder.write(out, true);
-
     }
 
     private void parseItem(IAType itemType, Object value, ObjectInspector foi, DataOutput dataOutput,

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RSSParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RSSParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RSSParser.java
index 4d93dc5..3a3bd7d 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RSSParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RSSParser.java
@@ -59,7 +59,7 @@ public class RSSParser implements IRecordDataParser<SyndEntryImpl> {
     }
 
     @Override
-    public void parse(IRawRecord<? extends SyndEntryImpl> record, DataOutput out) throws Exception {
+    public void parse(IRawRecord<? extends SyndEntryImpl> record, DataOutput out) throws IOException {
         SyndEntryImpl entry = record.get();
         tupleFieldValues[0] = idPrefix + ":" + id;
         tupleFieldValues[1] = entry.getTitle();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithMetadataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithMetadataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithMetadataParser.java
index ecdb03d..67d84b5 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithMetadataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithMetadataParser.java
@@ -89,19 +89,23 @@ public class RecordWithMetadataParser<T> implements IRecordDataParser<RecordWith
     }
 
     @Override
-    public void parse(IRawRecord<? extends RecordWithMetadata<T>> record, DataOutput out) throws Exception {
-        recBuilder.reset(recordType);
-        valueBuffer.reset();
-        recBuilder.init();
-        RecordWithMetadata<T> rwm = record.get();
-        for (int i = 0; i < numberOfFields; i++) {
-            if (i == valueIndex) {
-                valueParser.parse(rwm.getRecord(), valueBuffer.getDataOutput());
-                recBuilder.addField(i, valueBuffer);
-            } else {
-                recBuilder.addField(i, rwm.getMetadata(metaIndexes[i]));
+    public void parse(IRawRecord<? extends RecordWithMetadata<T>> record, DataOutput out) throws HyracksDataException {
+        try {
+            recBuilder.reset(recordType);
+            valueBuffer.reset();
+            recBuilder.init();
+            RecordWithMetadata<T> rwm = record.get();
+            for (int i = 0; i < numberOfFields; i++) {
+                if (i == valueIndex) {
+                    valueParser.parse(rwm.getRecord(), valueBuffer.getDataOutput());
+                    recBuilder.addField(i, valueBuffer);
+                } else {
+                    recBuilder.addField(i, rwm.getMetadata(metaIndexes[i]));
+                }
             }
+            recBuilder.write(out, true);
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
         }
-        recBuilder.write(out, true);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TweetParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TweetParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TweetParser.java
index b9cd60b..7ae3303 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TweetParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TweetParser.java
@@ -92,7 +92,7 @@ public class TweetParser implements IRecordDataParser<Status> {
     }
 
     @Override
-    public void parse(IRawRecord<? extends Status> record, DataOutput out) throws Exception {
+    public void parse(IRawRecord<? extends Status> record, DataOutput out) throws HyracksDataException {
         Status tweet = record.get();
         User user = tweet.getUser();
         // Tweet user data

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-external-data/src/main/resources/adm.grammar
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/resources/adm.grammar b/asterix-external-data/src/main/resources/adm.grammar
index 1910436..973c2b9 100644
--- a/asterix-external-data/src/main/resources/adm.grammar
+++ b/asterix-external-data/src/main/resources/adm.grammar
@@ -20,8 +20,9 @@
 # Place *first* the generic configuration
 # then list your grammar.
 
-PACKAGE:          org.apache.asterix.runtime.operators.file.adm
-LEXER_NAME:       AdmLexer
+PACKAGE:               org.apache.asterix.runtime.operators.file.adm
+LEXER_NAME:            AdmLexer
+EXCEPTION_SUPER_CLASS: org.apache.hyracks.api.exceptions.HyracksDataException
 
 TOKENS:
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/org/apache/asterix/lexergenerator/LexerGenerator.java
----------------------------------------------------------------------
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/org/apache/asterix/lexergenerator/LexerGenerator.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/org/apache/asterix/lexergenerator/LexerGenerator.java
index 1588c39..3ae7339 100644
--- a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/org/apache/asterix/lexergenerator/LexerGenerator.java
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/org/apache/asterix/lexergenerator/LexerGenerator.java
@@ -131,14 +131,14 @@ public class LexerGenerator {
         Set<String> functions = main.neededAuxFunctions();
         for (String token : functions) {
             result.append("private int parse_" + token
-                    + "(char currentChar) throws IOException, [LEXER_NAME]Exception{\n");
+                    + "(char currentChar) throws IOException {\n");
             result.append(tokens.get(token).getNode().toJavaAuxFunction());
             result.append("\n}\n\n");
         }
         return result.toString();
     }
 
-    private static String readFile(Reader input) throws FileNotFoundException, IOException {
+    private static String readFile(Reader input) throws IOException {
         StringBuffer fileData = new StringBuffer(1000);
         BufferedReader reader = new BufferedReader(input);
         char[] buf = new char[1024];
@@ -152,14 +152,14 @@ public class LexerGenerator {
         return fileData.toString();
     }
 
-    private static String readFile(InputStream input) throws FileNotFoundException, IOException {
+    private static String readFile(InputStream input) throws IOException {
         if (input == null) {
             throw new FileNotFoundException();
         }
         return readFile(new InputStreamReader(input));
     }
 
-    private static String readFile(String fileName) throws FileNotFoundException, IOException {
+    private static String readFile(String fileName) throws IOException {
         return readFile(new FileReader(fileName));
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/LexerException.java
----------------------------------------------------------------------
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/LexerException.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/LexerException.java
index 9030eb2..2701319 100644
--- a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/LexerException.java
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/LexerException.java
@@ -18,7 +18,7 @@
  */
 package [PACKAGE];
 
-public class [LEXER_NAME]Exception extends Exception {
+public class [LEXER_NAME]Exception extends [EXCEPTION_SUPER_CLASS] {
 
     public [LEXER_NAME]Exception(String message) {
         super(message);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/default.config
----------------------------------------------------------------------
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/default.config b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/default.config
index 63e0dd3..03a98dd 100644
--- a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/default.config
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/default.config
@@ -20,9 +20,10 @@
 # Place *first* the generic configuration
 # then list your grammar.
 
-PACKAGE:          com.my.lexer
-LEXER_NAME:       MyLexer
-OUTPUT_DIR:       output
+PACKAGE:               com.my.lexer
+LEXER_NAME:            MyLexer
+EXCEPTION_SUPER_CLASS: java.io.IOException
+OUTPUT_DIR:            output
 
 TOKENS:
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/9dcba3c9/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
index 8f4e763..9786ebf 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
@@ -543,7 +543,7 @@ public class MetadataManager implements IMetadataManager {
         } catch (RemoteException e) {
             throw new MetadataException(e);
         }
-        ctx.addNogeGroup(nodeGroup);
+        ctx.addNodeGroup(nodeGroup);
     }
 
     @Override
@@ -584,7 +584,7 @@ public class MetadataManager implements IMetadataManager {
         // We fetched the nodeGroup from the MetadataNode. Add it to the cache
         // when this transaction commits.
         if (nodeGroup != null) {
-            ctx.addNogeGroup(nodeGroup);
+            ctx.addNodeGroup(nodeGroup);
         }
         return nodeGroup;
     }