You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@storm.apache.org by pt...@apache.org on 2015/12/01 23:05:14 UTC

[36/51] [partial] storm git commit: Update JStorm to latest release 2.1.0

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/generated/TopologyTaskHbInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/TopologyTaskHbInfo.java b/jstorm-core/src/main/java/backtype/storm/generated/TopologyTaskHbInfo.java
new file mode 100644
index 0000000..2734f4f
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/TopologyTaskHbInfo.java
@@ -0,0 +1,663 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-20")
+public class TopologyTaskHbInfo implements org.apache.thrift.TBase<TopologyTaskHbInfo, TopologyTaskHbInfo._Fields>, java.io.Serializable, Cloneable, Comparable<TopologyTaskHbInfo> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TopologyTaskHbInfo");
+
+  private static final org.apache.thrift.protocol.TField TOPOLOGY_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("topologyId", org.apache.thrift.protocol.TType.STRING, (short)1);
+  private static final org.apache.thrift.protocol.TField TOPOLOGY_MASTER_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("topologyMasterId", org.apache.thrift.protocol.TType.I32, (short)2);
+  private static final org.apache.thrift.protocol.TField TASK_HBS_FIELD_DESC = new org.apache.thrift.protocol.TField("taskHbs", org.apache.thrift.protocol.TType.MAP, (short)3);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new TopologyTaskHbInfoStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new TopologyTaskHbInfoTupleSchemeFactory());
+  }
+
+  private String topologyId; // required
+  private int topologyMasterId; // required
+  private Map<Integer,TaskHeartbeat> taskHbs; // optional
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    TOPOLOGY_ID((short)1, "topologyId"),
+    TOPOLOGY_MASTER_ID((short)2, "topologyMasterId"),
+    TASK_HBS((short)3, "taskHbs");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // TOPOLOGY_ID
+          return TOPOLOGY_ID;
+        case 2: // TOPOLOGY_MASTER_ID
+          return TOPOLOGY_MASTER_ID;
+        case 3: // TASK_HBS
+          return TASK_HBS;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  private static final int __TOPOLOGYMASTERID_ISSET_ID = 0;
+  private byte __isset_bitfield = 0;
+  private static final _Fields optionals[] = {_Fields.TASK_HBS};
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.TOPOLOGY_ID, new org.apache.thrift.meta_data.FieldMetaData("topologyId", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.TOPOLOGY_MASTER_ID, new org.apache.thrift.meta_data.FieldMetaData("topologyMasterId", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+    tmpMap.put(_Fields.TASK_HBS, new org.apache.thrift.meta_data.FieldMetaData("taskHbs", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, 
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32), 
+            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TaskHeartbeat.class))));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TopologyTaskHbInfo.class, metaDataMap);
+  }
+
+  public TopologyTaskHbInfo() {
+  }
+
+  public TopologyTaskHbInfo(
+    String topologyId,
+    int topologyMasterId)
+  {
+    this();
+    this.topologyId = topologyId;
+    this.topologyMasterId = topologyMasterId;
+    set_topologyMasterId_isSet(true);
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public TopologyTaskHbInfo(TopologyTaskHbInfo other) {
+    __isset_bitfield = other.__isset_bitfield;
+    if (other.is_set_topologyId()) {
+      this.topologyId = other.topologyId;
+    }
+    this.topologyMasterId = other.topologyMasterId;
+    if (other.is_set_taskHbs()) {
+      Map<Integer,TaskHeartbeat> __this__taskHbs = new HashMap<Integer,TaskHeartbeat>(other.taskHbs.size());
+      for (Map.Entry<Integer, TaskHeartbeat> other_element : other.taskHbs.entrySet()) {
+
+        Integer other_element_key = other_element.getKey();
+        TaskHeartbeat other_element_value = other_element.getValue();
+
+        Integer __this__taskHbs_copy_key = other_element_key;
+
+        TaskHeartbeat __this__taskHbs_copy_value = new TaskHeartbeat(other_element_value);
+
+        __this__taskHbs.put(__this__taskHbs_copy_key, __this__taskHbs_copy_value);
+      }
+      this.taskHbs = __this__taskHbs;
+    }
+  }
+
+  public TopologyTaskHbInfo deepCopy() {
+    return new TopologyTaskHbInfo(this);
+  }
+
+  @Override
+  public void clear() {
+    this.topologyId = null;
+    set_topologyMasterId_isSet(false);
+    this.topologyMasterId = 0;
+    this.taskHbs = null;
+  }
+
+  public String get_topologyId() {
+    return this.topologyId;
+  }
+
+  public void set_topologyId(String topologyId) {
+    this.topologyId = topologyId;
+  }
+
+  public void unset_topologyId() {
+    this.topologyId = null;
+  }
+
+  /** Returns true if field topologyId is set (has been assigned a value) and false otherwise */
+  public boolean is_set_topologyId() {
+    return this.topologyId != null;
+  }
+
+  public void set_topologyId_isSet(boolean value) {
+    if (!value) {
+      this.topologyId = null;
+    }
+  }
+
+  public int get_topologyMasterId() {
+    return this.topologyMasterId;
+  }
+
+  public void set_topologyMasterId(int topologyMasterId) {
+    this.topologyMasterId = topologyMasterId;
+    set_topologyMasterId_isSet(true);
+  }
+
+  public void unset_topologyMasterId() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __TOPOLOGYMASTERID_ISSET_ID);
+  }
+
+  /** Returns true if field topologyMasterId is set (has been assigned a value) and false otherwise */
+  public boolean is_set_topologyMasterId() {
+    return EncodingUtils.testBit(__isset_bitfield, __TOPOLOGYMASTERID_ISSET_ID);
+  }
+
+  public void set_topologyMasterId_isSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __TOPOLOGYMASTERID_ISSET_ID, value);
+  }
+
+  public int get_taskHbs_size() {
+    return (this.taskHbs == null) ? 0 : this.taskHbs.size();
+  }
+
+  public void put_to_taskHbs(int key, TaskHeartbeat val) {
+    if (this.taskHbs == null) {
+      this.taskHbs = new HashMap<Integer,TaskHeartbeat>();
+    }
+    this.taskHbs.put(key, val);
+  }
+
+  public Map<Integer,TaskHeartbeat> get_taskHbs() {
+    return this.taskHbs;
+  }
+
+  public void set_taskHbs(Map<Integer,TaskHeartbeat> taskHbs) {
+    this.taskHbs = taskHbs;
+  }
+
+  public void unset_taskHbs() {
+    this.taskHbs = null;
+  }
+
+  /** Returns true if field taskHbs is set (has been assigned a value) and false otherwise */
+  public boolean is_set_taskHbs() {
+    return this.taskHbs != null;
+  }
+
+  public void set_taskHbs_isSet(boolean value) {
+    if (!value) {
+      this.taskHbs = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case TOPOLOGY_ID:
+      if (value == null) {
+        unset_topologyId();
+      } else {
+        set_topologyId((String)value);
+      }
+      break;
+
+    case TOPOLOGY_MASTER_ID:
+      if (value == null) {
+        unset_topologyMasterId();
+      } else {
+        set_topologyMasterId((Integer)value);
+      }
+      break;
+
+    case TASK_HBS:
+      if (value == null) {
+        unset_taskHbs();
+      } else {
+        set_taskHbs((Map<Integer,TaskHeartbeat>)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case TOPOLOGY_ID:
+      return get_topologyId();
+
+    case TOPOLOGY_MASTER_ID:
+      return Integer.valueOf(get_topologyMasterId());
+
+    case TASK_HBS:
+      return get_taskHbs();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case TOPOLOGY_ID:
+      return is_set_topologyId();
+    case TOPOLOGY_MASTER_ID:
+      return is_set_topologyMasterId();
+    case TASK_HBS:
+      return is_set_taskHbs();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof TopologyTaskHbInfo)
+      return this.equals((TopologyTaskHbInfo)that);
+    return false;
+  }
+
+  public boolean equals(TopologyTaskHbInfo that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_topologyId = true && this.is_set_topologyId();
+    boolean that_present_topologyId = true && that.is_set_topologyId();
+    if (this_present_topologyId || that_present_topologyId) {
+      if (!(this_present_topologyId && that_present_topologyId))
+        return false;
+      if (!this.topologyId.equals(that.topologyId))
+        return false;
+    }
+
+    boolean this_present_topologyMasterId = true;
+    boolean that_present_topologyMasterId = true;
+    if (this_present_topologyMasterId || that_present_topologyMasterId) {
+      if (!(this_present_topologyMasterId && that_present_topologyMasterId))
+        return false;
+      if (this.topologyMasterId != that.topologyMasterId)
+        return false;
+    }
+
+    boolean this_present_taskHbs = true && this.is_set_taskHbs();
+    boolean that_present_taskHbs = true && that.is_set_taskHbs();
+    if (this_present_taskHbs || that_present_taskHbs) {
+      if (!(this_present_taskHbs && that_present_taskHbs))
+        return false;
+      if (!this.taskHbs.equals(that.taskHbs))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    List<Object> list = new ArrayList<Object>();
+
+    boolean present_topologyId = true && (is_set_topologyId());
+    list.add(present_topologyId);
+    if (present_topologyId)
+      list.add(topologyId);
+
+    boolean present_topologyMasterId = true;
+    list.add(present_topologyMasterId);
+    if (present_topologyMasterId)
+      list.add(topologyMasterId);
+
+    boolean present_taskHbs = true && (is_set_taskHbs());
+    list.add(present_taskHbs);
+    if (present_taskHbs)
+      list.add(taskHbs);
+
+    return list.hashCode();
+  }
+
+  @Override
+  public int compareTo(TopologyTaskHbInfo other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = Boolean.valueOf(is_set_topologyId()).compareTo(other.is_set_topologyId());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_topologyId()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.topologyId, other.topologyId);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(is_set_topologyMasterId()).compareTo(other.is_set_topologyMasterId());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_topologyMasterId()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.topologyMasterId, other.topologyMasterId);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(is_set_taskHbs()).compareTo(other.is_set_taskHbs());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_taskHbs()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.taskHbs, other.taskHbs);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("TopologyTaskHbInfo(");
+    boolean first = true;
+
+    sb.append("topologyId:");
+    if (this.topologyId == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.topologyId);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("topologyMasterId:");
+    sb.append(this.topologyMasterId);
+    first = false;
+    if (is_set_taskHbs()) {
+      if (!first) sb.append(", ");
+      sb.append("taskHbs:");
+      if (this.taskHbs == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.taskHbs);
+      }
+      first = false;
+    }
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws TException {
+    // check for required fields
+    if (!is_set_topologyId()) {
+      throw new TProtocolException("Required field 'topologyId' is unset! Struct:" + toString());
+    }
+
+    if (!is_set_topologyMasterId()) {
+      throw new TProtocolException("Required field 'topologyMasterId' is unset! Struct:" + toString());
+    }
+
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+      __isset_bitfield = 0;
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class TopologyTaskHbInfoStandardSchemeFactory implements SchemeFactory {
+    public TopologyTaskHbInfoStandardScheme getScheme() {
+      return new TopologyTaskHbInfoStandardScheme();
+    }
+  }
+
+  private static class TopologyTaskHbInfoStandardScheme extends StandardScheme<TopologyTaskHbInfo> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, TopologyTaskHbInfo struct) throws TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // TOPOLOGY_ID
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.topologyId = iprot.readString();
+              struct.set_topologyId_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 2: // TOPOLOGY_MASTER_ID
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.topologyMasterId = iprot.readI32();
+              struct.set_topologyMasterId_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 3: // TASK_HBS
+            if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+              {
+                org.apache.thrift.protocol.TMap _map222 = iprot.readMapBegin();
+                struct.taskHbs = new HashMap<Integer,TaskHeartbeat>(2*_map222.size);
+                int _key223;
+                TaskHeartbeat _val224;
+                for (int _i225 = 0; _i225 < _map222.size; ++_i225)
+                {
+                  _key223 = iprot.readI32();
+                  _val224 = new TaskHeartbeat();
+                  _val224.read(iprot);
+                  struct.taskHbs.put(_key223, _val224);
+                }
+                iprot.readMapEnd();
+              }
+              struct.set_taskHbs_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, TopologyTaskHbInfo struct) throws TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.topologyId != null) {
+        oprot.writeFieldBegin(TOPOLOGY_ID_FIELD_DESC);
+        oprot.writeString(struct.topologyId);
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldBegin(TOPOLOGY_MASTER_ID_FIELD_DESC);
+      oprot.writeI32(struct.topologyMasterId);
+      oprot.writeFieldEnd();
+      if (struct.taskHbs != null) {
+        if (struct.is_set_taskHbs()) {
+          oprot.writeFieldBegin(TASK_HBS_FIELD_DESC);
+          {
+            oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.I32, org.apache.thrift.protocol.TType.STRUCT, struct.taskHbs.size()));
+            for (Map.Entry<Integer, TaskHeartbeat> _iter226 : struct.taskHbs.entrySet())
+            {
+              oprot.writeI32(_iter226.getKey());
+              _iter226.getValue().write(oprot);
+            }
+            oprot.writeMapEnd();
+          }
+          oprot.writeFieldEnd();
+        }
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class TopologyTaskHbInfoTupleSchemeFactory implements SchemeFactory {
+    public TopologyTaskHbInfoTupleScheme getScheme() {
+      return new TopologyTaskHbInfoTupleScheme();
+    }
+  }
+
+  private static class TopologyTaskHbInfoTupleScheme extends TupleScheme<TopologyTaskHbInfo> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, TopologyTaskHbInfo struct) throws TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      oprot.writeString(struct.topologyId);
+      oprot.writeI32(struct.topologyMasterId);
+      BitSet optionals = new BitSet();
+      if (struct.is_set_taskHbs()) {
+        optionals.set(0);
+      }
+      oprot.writeBitSet(optionals, 1);
+      if (struct.is_set_taskHbs()) {
+        {
+          oprot.writeI32(struct.taskHbs.size());
+          for (Map.Entry<Integer, TaskHeartbeat> _iter227 : struct.taskHbs.entrySet())
+          {
+            oprot.writeI32(_iter227.getKey());
+            _iter227.getValue().write(oprot);
+          }
+        }
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, TopologyTaskHbInfo struct) throws TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      struct.topologyId = iprot.readString();
+      struct.set_topologyId_isSet(true);
+      struct.topologyMasterId = iprot.readI32();
+      struct.set_topologyMasterId_isSet(true);
+      BitSet incoming = iprot.readBitSet(1);
+      if (incoming.get(0)) {
+        {
+          org.apache.thrift.protocol.TMap _map228 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.I32, org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+          struct.taskHbs = new HashMap<Integer,TaskHeartbeat>(2*_map228.size);
+          int _key229;
+          TaskHeartbeat _val230;
+          for (int _i231 = 0; _i231 < _map228.size; ++_i231)
+          {
+            _key229 = iprot.readI32();
+            _val230 = new TaskHeartbeat();
+            _val230.read(iprot);
+            struct.taskHbs.put(_key229, _val230);
+          }
+        }
+        struct.set_taskHbs_isSet(true);
+      }
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/generated/WorkerSummary.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/WorkerSummary.java b/jstorm-core/src/main/java/backtype/storm/generated/WorkerSummary.java
index 9faaee7..0fbe63f 100644
--- a/jstorm-core/src/main/java/backtype/storm/generated/WorkerSummary.java
+++ b/jstorm-core/src/main/java/backtype/storm/generated/WorkerSummary.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-20")
 public class WorkerSummary implements org.apache.thrift.TBase<WorkerSummary, WorkerSummary._Fields>, java.io.Serializable, Cloneable, Comparable<WorkerSummary> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("WorkerSummary");
 
@@ -505,11 +505,11 @@ public class WorkerSummary implements org.apache.thrift.TBase<WorkerSummary, Wor
     return _Fields.findByThriftId(fieldId);
   }
 
-  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws TException {
     schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
   }
 
-  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws TException {
     schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
   }
 
@@ -545,22 +545,22 @@ public class WorkerSummary implements org.apache.thrift.TBase<WorkerSummary, Wor
     return sb.toString();
   }
 
-  public void validate() throws org.apache.thrift.TException {
+  public void validate() throws TException {
     // check for required fields
     if (!is_set_port()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'port' is unset! Struct:" + toString());
+      throw new TProtocolException("Required field 'port' is unset! Struct:" + toString());
     }
 
     if (!is_set_uptime()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'uptime' is unset! Struct:" + toString());
+      throw new TProtocolException("Required field 'uptime' is unset! Struct:" + toString());
     }
 
     if (!is_set_topology()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'topology' is unset! Struct:" + toString());
+      throw new TProtocolException("Required field 'topology' is unset! Struct:" + toString());
     }
 
     if (!is_set_tasks()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'tasks' is unset! Struct:" + toString());
+      throw new TProtocolException("Required field 'tasks' is unset! Struct:" + toString());
     }
 
     // check for sub-struct validity
@@ -569,7 +569,7 @@ public class WorkerSummary implements org.apache.thrift.TBase<WorkerSummary, Wor
   private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
     try {
       write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
-    } catch (org.apache.thrift.TException te) {
+    } catch (TException te) {
       throw new java.io.IOException(te);
     }
   }
@@ -579,7 +579,7 @@ public class WorkerSummary implements org.apache.thrift.TBase<WorkerSummary, Wor
       // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
       __isset_bitfield = 0;
       read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
-    } catch (org.apache.thrift.TException te) {
+    } catch (TException te) {
       throw new java.io.IOException(te);
     }
   }
@@ -592,7 +592,7 @@ public class WorkerSummary implements org.apache.thrift.TBase<WorkerSummary, Wor
 
   private static class WorkerSummaryStandardScheme extends StandardScheme<WorkerSummary> {
 
-    public void read(org.apache.thrift.protocol.TProtocol iprot, WorkerSummary struct) throws org.apache.thrift.TException {
+    public void read(org.apache.thrift.protocol.TProtocol iprot, WorkerSummary struct) throws TException {
       org.apache.thrift.protocol.TField schemeField;
       iprot.readStructBegin();
       while (true)
@@ -654,7 +654,7 @@ public class WorkerSummary implements org.apache.thrift.TBase<WorkerSummary, Wor
       struct.validate();
     }
 
-    public void write(org.apache.thrift.protocol.TProtocol oprot, WorkerSummary struct) throws org.apache.thrift.TException {
+    public void write(org.apache.thrift.protocol.TProtocol oprot, WorkerSummary struct) throws TException {
       struct.validate();
 
       oprot.writeStructBegin(STRUCT_DESC);
@@ -696,7 +696,7 @@ public class WorkerSummary implements org.apache.thrift.TBase<WorkerSummary, Wor
   private static class WorkerSummaryTupleScheme extends TupleScheme<WorkerSummary> {
 
     @Override
-    public void write(org.apache.thrift.protocol.TProtocol prot, WorkerSummary struct) throws org.apache.thrift.TException {
+    public void write(org.apache.thrift.protocol.TProtocol prot, WorkerSummary struct) throws TException {
       TTupleProtocol oprot = (TTupleProtocol) prot;
       oprot.writeI32(struct.port);
       oprot.writeI32(struct.uptime);
@@ -711,7 +711,7 @@ public class WorkerSummary implements org.apache.thrift.TBase<WorkerSummary, Wor
     }
 
     @Override
-    public void read(org.apache.thrift.protocol.TProtocol prot, WorkerSummary struct) throws org.apache.thrift.TException {
+    public void read(org.apache.thrift.protocol.TProtocol prot, WorkerSummary struct) throws TException {
       TTupleProtocol iprot = (TTupleProtocol) prot;
       struct.port = iprot.readI32();
       struct.set_port_isSet(true);

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/generated/WorkerUploadMetrics.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/WorkerUploadMetrics.java b/jstorm-core/src/main/java/backtype/storm/generated/WorkerUploadMetrics.java
index 934cfb3..7258105 100644
--- a/jstorm-core/src/main/java/backtype/storm/generated/WorkerUploadMetrics.java
+++ b/jstorm-core/src/main/java/backtype/storm/generated/WorkerUploadMetrics.java
@@ -34,16 +34,14 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-20")
 public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUploadMetrics, WorkerUploadMetrics._Fields>, java.io.Serializable, Cloneable, Comparable<WorkerUploadMetrics> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("WorkerUploadMetrics");
 
-  private static final org.apache.thrift.protocol.TField TOPOLOGY_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("topology_id", org.apache.thrift.protocol.TType.STRING, (short)1);
-  private static final org.apache.thrift.protocol.TField SUPERVISOR_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("supervisor_id", org.apache.thrift.protocol.TType.STRING, (short)2);
+  private static final org.apache.thrift.protocol.TField TOPOLOGY_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("topologyId", org.apache.thrift.protocol.TType.STRING, (short)1);
+  private static final org.apache.thrift.protocol.TField SUPERVISOR_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("supervisorId", org.apache.thrift.protocol.TType.STRING, (short)2);
   private static final org.apache.thrift.protocol.TField PORT_FIELD_DESC = new org.apache.thrift.protocol.TField("port", org.apache.thrift.protocol.TType.I32, (short)3);
-  private static final org.apache.thrift.protocol.TField WORKER_METRIC_FIELD_DESC = new org.apache.thrift.protocol.TField("workerMetric", org.apache.thrift.protocol.TType.STRUCT, (short)4);
-  private static final org.apache.thrift.protocol.TField NETTY_METRIC_FIELD_DESC = new org.apache.thrift.protocol.TField("nettyMetric", org.apache.thrift.protocol.TType.STRUCT, (short)5);
-  private static final org.apache.thrift.protocol.TField TASK_METRIC_FIELD_DESC = new org.apache.thrift.protocol.TField("taskMetric", org.apache.thrift.protocol.TType.MAP, (short)6);
+  private static final org.apache.thrift.protocol.TField ALL_METRICS_FIELD_DESC = new org.apache.thrift.protocol.TField("allMetrics", org.apache.thrift.protocol.TType.STRUCT, (short)4);
 
   private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
   static {
@@ -51,21 +49,17 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
     schemes.put(TupleScheme.class, new WorkerUploadMetricsTupleSchemeFactory());
   }
 
-  private String topology_id; // required
-  private String supervisor_id; // required
+  private String topologyId; // required
+  private String supervisorId; // required
   private int port; // required
-  private MetricInfo workerMetric; // required
-  private NettyMetric nettyMetric; // required
-  private Map<Integer,MetricInfo> taskMetric; // required
+  private MetricInfo allMetrics; // required
 
   /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
   public enum _Fields implements org.apache.thrift.TFieldIdEnum {
-    TOPOLOGY_ID((short)1, "topology_id"),
-    SUPERVISOR_ID((short)2, "supervisor_id"),
+    TOPOLOGY_ID((short)1, "topologyId"),
+    SUPERVISOR_ID((short)2, "supervisorId"),
     PORT((short)3, "port"),
-    WORKER_METRIC((short)4, "workerMetric"),
-    NETTY_METRIC((short)5, "nettyMetric"),
-    TASK_METRIC((short)6, "taskMetric");
+    ALL_METRICS((short)4, "allMetrics");
 
     private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 
@@ -86,12 +80,8 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
           return SUPERVISOR_ID;
         case 3: // PORT
           return PORT;
-        case 4: // WORKER_METRIC
-          return WORKER_METRIC;
-        case 5: // NETTY_METRIC
-          return NETTY_METRIC;
-        case 6: // TASK_METRIC
-          return TASK_METRIC;
+        case 4: // ALL_METRICS
+          return ALL_METRICS;
         default:
           return null;
       }
@@ -137,20 +127,14 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
   public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-    tmpMap.put(_Fields.TOPOLOGY_ID, new org.apache.thrift.meta_data.FieldMetaData("topology_id", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+    tmpMap.put(_Fields.TOPOLOGY_ID, new org.apache.thrift.meta_data.FieldMetaData("topologyId", org.apache.thrift.TFieldRequirementType.REQUIRED, 
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.SUPERVISOR_ID, new org.apache.thrift.meta_data.FieldMetaData("supervisor_id", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+    tmpMap.put(_Fields.SUPERVISOR_ID, new org.apache.thrift.meta_data.FieldMetaData("supervisorId", org.apache.thrift.TFieldRequirementType.REQUIRED, 
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
     tmpMap.put(_Fields.PORT, new org.apache.thrift.meta_data.FieldMetaData("port", org.apache.thrift.TFieldRequirementType.REQUIRED, 
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
-    tmpMap.put(_Fields.WORKER_METRIC, new org.apache.thrift.meta_data.FieldMetaData("workerMetric", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+    tmpMap.put(_Fields.ALL_METRICS, new org.apache.thrift.meta_data.FieldMetaData("allMetrics", org.apache.thrift.TFieldRequirementType.REQUIRED, 
         new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, MetricInfo.class)));
-    tmpMap.put(_Fields.NETTY_METRIC, new org.apache.thrift.meta_data.FieldMetaData("nettyMetric", org.apache.thrift.TFieldRequirementType.REQUIRED, 
-        new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, NettyMetric.class)));
-    tmpMap.put(_Fields.TASK_METRIC, new org.apache.thrift.meta_data.FieldMetaData("taskMetric", org.apache.thrift.TFieldRequirementType.REQUIRED, 
-        new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, 
-            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32), 
-            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, MetricInfo.class))));
     metaDataMap = Collections.unmodifiableMap(tmpMap);
     org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(WorkerUploadMetrics.class, metaDataMap);
   }
@@ -159,21 +143,17 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
   }
 
   public WorkerUploadMetrics(
-    String topology_id,
-    String supervisor_id,
+    String topologyId,
+    String supervisorId,
     int port,
-    MetricInfo workerMetric,
-    NettyMetric nettyMetric,
-    Map<Integer,MetricInfo> taskMetric)
+    MetricInfo allMetrics)
   {
     this();
-    this.topology_id = topology_id;
-    this.supervisor_id = supervisor_id;
+    this.topologyId = topologyId;
+    this.supervisorId = supervisorId;
     this.port = port;
     set_port_isSet(true);
-    this.workerMetric = workerMetric;
-    this.nettyMetric = nettyMetric;
-    this.taskMetric = taskMetric;
+    this.allMetrics = allMetrics;
   }
 
   /**
@@ -181,33 +161,15 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
    */
   public WorkerUploadMetrics(WorkerUploadMetrics other) {
     __isset_bitfield = other.__isset_bitfield;
-    if (other.is_set_topology_id()) {
-      this.topology_id = other.topology_id;
+    if (other.is_set_topologyId()) {
+      this.topologyId = other.topologyId;
     }
-    if (other.is_set_supervisor_id()) {
-      this.supervisor_id = other.supervisor_id;
+    if (other.is_set_supervisorId()) {
+      this.supervisorId = other.supervisorId;
     }
     this.port = other.port;
-    if (other.is_set_workerMetric()) {
-      this.workerMetric = new MetricInfo(other.workerMetric);
-    }
-    if (other.is_set_nettyMetric()) {
-      this.nettyMetric = new NettyMetric(other.nettyMetric);
-    }
-    if (other.is_set_taskMetric()) {
-      Map<Integer,MetricInfo> __this__taskMetric = new HashMap<Integer,MetricInfo>(other.taskMetric.size());
-      for (Map.Entry<Integer, MetricInfo> other_element : other.taskMetric.entrySet()) {
-
-        Integer other_element_key = other_element.getKey();
-        MetricInfo other_element_value = other_element.getValue();
-
-        Integer __this__taskMetric_copy_key = other_element_key;
-
-        MetricInfo __this__taskMetric_copy_value = new MetricInfo(other_element_value);
-
-        __this__taskMetric.put(__this__taskMetric_copy_key, __this__taskMetric_copy_value);
-      }
-      this.taskMetric = __this__taskMetric;
+    if (other.is_set_allMetrics()) {
+      this.allMetrics = new MetricInfo(other.allMetrics);
     }
   }
 
@@ -217,58 +179,56 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
 
   @Override
   public void clear() {
-    this.topology_id = null;
-    this.supervisor_id = null;
+    this.topologyId = null;
+    this.supervisorId = null;
     set_port_isSet(false);
     this.port = 0;
-    this.workerMetric = null;
-    this.nettyMetric = null;
-    this.taskMetric = null;
+    this.allMetrics = null;
   }
 
-  public String get_topology_id() {
-    return this.topology_id;
+  public String get_topologyId() {
+    return this.topologyId;
   }
 
-  public void set_topology_id(String topology_id) {
-    this.topology_id = topology_id;
+  public void set_topologyId(String topologyId) {
+    this.topologyId = topologyId;
   }
 
-  public void unset_topology_id() {
-    this.topology_id = null;
+  public void unset_topologyId() {
+    this.topologyId = null;
   }
 
-  /** Returns true if field topology_id is set (has been assigned a value) and false otherwise */
-  public boolean is_set_topology_id() {
-    return this.topology_id != null;
+  /** Returns true if field topologyId is set (has been assigned a value) and false otherwise */
+  public boolean is_set_topologyId() {
+    return this.topologyId != null;
   }
 
-  public void set_topology_id_isSet(boolean value) {
+  public void set_topologyId_isSet(boolean value) {
     if (!value) {
-      this.topology_id = null;
+      this.topologyId = null;
     }
   }
 
-  public String get_supervisor_id() {
-    return this.supervisor_id;
+  public String get_supervisorId() {
+    return this.supervisorId;
   }
 
-  public void set_supervisor_id(String supervisor_id) {
-    this.supervisor_id = supervisor_id;
+  public void set_supervisorId(String supervisorId) {
+    this.supervisorId = supervisorId;
   }
 
-  public void unset_supervisor_id() {
-    this.supervisor_id = null;
+  public void unset_supervisorId() {
+    this.supervisorId = null;
   }
 
-  /** Returns true if field supervisor_id is set (has been assigned a value) and false otherwise */
-  public boolean is_set_supervisor_id() {
-    return this.supervisor_id != null;
+  /** Returns true if field supervisorId is set (has been assigned a value) and false otherwise */
+  public boolean is_set_supervisorId() {
+    return this.supervisorId != null;
   }
 
-  public void set_supervisor_id_isSet(boolean value) {
+  public void set_supervisorId_isSet(boolean value) {
     if (!value) {
-      this.supervisor_id = null;
+      this.supervisorId = null;
     }
   }
 
@@ -294,83 +254,26 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
     __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __PORT_ISSET_ID, value);
   }
 
-  public MetricInfo get_workerMetric() {
-    return this.workerMetric;
-  }
-
-  public void set_workerMetric(MetricInfo workerMetric) {
-    this.workerMetric = workerMetric;
-  }
-
-  public void unset_workerMetric() {
-    this.workerMetric = null;
-  }
-
-  /** Returns true if field workerMetric is set (has been assigned a value) and false otherwise */
-  public boolean is_set_workerMetric() {
-    return this.workerMetric != null;
-  }
-
-  public void set_workerMetric_isSet(boolean value) {
-    if (!value) {
-      this.workerMetric = null;
-    }
-  }
-
-  public NettyMetric get_nettyMetric() {
-    return this.nettyMetric;
-  }
-
-  public void set_nettyMetric(NettyMetric nettyMetric) {
-    this.nettyMetric = nettyMetric;
-  }
-
-  public void unset_nettyMetric() {
-    this.nettyMetric = null;
-  }
-
-  /** Returns true if field nettyMetric is set (has been assigned a value) and false otherwise */
-  public boolean is_set_nettyMetric() {
-    return this.nettyMetric != null;
-  }
-
-  public void set_nettyMetric_isSet(boolean value) {
-    if (!value) {
-      this.nettyMetric = null;
-    }
-  }
-
-  public int get_taskMetric_size() {
-    return (this.taskMetric == null) ? 0 : this.taskMetric.size();
-  }
-
-  public void put_to_taskMetric(int key, MetricInfo val) {
-    if (this.taskMetric == null) {
-      this.taskMetric = new HashMap<Integer,MetricInfo>();
-    }
-    this.taskMetric.put(key, val);
+  public MetricInfo get_allMetrics() {
+    return this.allMetrics;
   }
 
-  public Map<Integer,MetricInfo> get_taskMetric() {
-    return this.taskMetric;
+  public void set_allMetrics(MetricInfo allMetrics) {
+    this.allMetrics = allMetrics;
   }
 
-  public void set_taskMetric(Map<Integer,MetricInfo> taskMetric) {
-    this.taskMetric = taskMetric;
+  public void unset_allMetrics() {
+    this.allMetrics = null;
   }
 
-  public void unset_taskMetric() {
-    this.taskMetric = null;
+  /** Returns true if field allMetrics is set (has been assigned a value) and false otherwise */
+  public boolean is_set_allMetrics() {
+    return this.allMetrics != null;
   }
 
-  /** Returns true if field taskMetric is set (has been assigned a value) and false otherwise */
-  public boolean is_set_taskMetric() {
-    return this.taskMetric != null;
-  }
-
-  public void set_taskMetric_isSet(boolean value) {
+  public void set_allMetrics_isSet(boolean value) {
     if (!value) {
-      this.taskMetric = null;
+      this.allMetrics = null;
     }
   }
 
@@ -378,17 +281,17 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
     switch (field) {
     case TOPOLOGY_ID:
       if (value == null) {
-        unset_topology_id();
+        unset_topologyId();
       } else {
-        set_topology_id((String)value);
+        set_topologyId((String)value);
       }
       break;
 
     case SUPERVISOR_ID:
       if (value == null) {
-        unset_supervisor_id();
+        unset_supervisorId();
       } else {
-        set_supervisor_id((String)value);
+        set_supervisorId((String)value);
       }
       break;
 
@@ -400,27 +303,11 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
       }
       break;
 
-    case WORKER_METRIC:
-      if (value == null) {
-        unset_workerMetric();
-      } else {
-        set_workerMetric((MetricInfo)value);
-      }
-      break;
-
-    case NETTY_METRIC:
+    case ALL_METRICS:
       if (value == null) {
-        unset_nettyMetric();
+        unset_allMetrics();
       } else {
-        set_nettyMetric((NettyMetric)value);
-      }
-      break;
-
-    case TASK_METRIC:
-      if (value == null) {
-        unset_taskMetric();
-      } else {
-        set_taskMetric((Map<Integer,MetricInfo>)value);
+        set_allMetrics((MetricInfo)value);
       }
       break;
 
@@ -430,22 +317,16 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
   public Object getFieldValue(_Fields field) {
     switch (field) {
     case TOPOLOGY_ID:
-      return get_topology_id();
+      return get_topologyId();
 
     case SUPERVISOR_ID:
-      return get_supervisor_id();
+      return get_supervisorId();
 
     case PORT:
       return Integer.valueOf(get_port());
 
-    case WORKER_METRIC:
-      return get_workerMetric();
-
-    case NETTY_METRIC:
-      return get_nettyMetric();
-
-    case TASK_METRIC:
-      return get_taskMetric();
+    case ALL_METRICS:
+      return get_allMetrics();
 
     }
     throw new IllegalStateException();
@@ -459,17 +340,13 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
 
     switch (field) {
     case TOPOLOGY_ID:
-      return is_set_topology_id();
+      return is_set_topologyId();
     case SUPERVISOR_ID:
-      return is_set_supervisor_id();
+      return is_set_supervisorId();
     case PORT:
       return is_set_port();
-    case WORKER_METRIC:
-      return is_set_workerMetric();
-    case NETTY_METRIC:
-      return is_set_nettyMetric();
-    case TASK_METRIC:
-      return is_set_taskMetric();
+    case ALL_METRICS:
+      return is_set_allMetrics();
     }
     throw new IllegalStateException();
   }
@@ -487,21 +364,21 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
     if (that == null)
       return false;
 
-    boolean this_present_topology_id = true && this.is_set_topology_id();
-    boolean that_present_topology_id = true && that.is_set_topology_id();
-    if (this_present_topology_id || that_present_topology_id) {
-      if (!(this_present_topology_id && that_present_topology_id))
+    boolean this_present_topologyId = true && this.is_set_topologyId();
+    boolean that_present_topologyId = true && that.is_set_topologyId();
+    if (this_present_topologyId || that_present_topologyId) {
+      if (!(this_present_topologyId && that_present_topologyId))
         return false;
-      if (!this.topology_id.equals(that.topology_id))
+      if (!this.topologyId.equals(that.topologyId))
         return false;
     }
 
-    boolean this_present_supervisor_id = true && this.is_set_supervisor_id();
-    boolean that_present_supervisor_id = true && that.is_set_supervisor_id();
-    if (this_present_supervisor_id || that_present_supervisor_id) {
-      if (!(this_present_supervisor_id && that_present_supervisor_id))
+    boolean this_present_supervisorId = true && this.is_set_supervisorId();
+    boolean that_present_supervisorId = true && that.is_set_supervisorId();
+    if (this_present_supervisorId || that_present_supervisorId) {
+      if (!(this_present_supervisorId && that_present_supervisorId))
         return false;
-      if (!this.supervisor_id.equals(that.supervisor_id))
+      if (!this.supervisorId.equals(that.supervisorId))
         return false;
     }
 
@@ -514,30 +391,12 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
         return false;
     }
 
-    boolean this_present_workerMetric = true && this.is_set_workerMetric();
-    boolean that_present_workerMetric = true && that.is_set_workerMetric();
-    if (this_present_workerMetric || that_present_workerMetric) {
-      if (!(this_present_workerMetric && that_present_workerMetric))
+    boolean this_present_allMetrics = true && this.is_set_allMetrics();
+    boolean that_present_allMetrics = true && that.is_set_allMetrics();
+    if (this_present_allMetrics || that_present_allMetrics) {
+      if (!(this_present_allMetrics && that_present_allMetrics))
         return false;
-      if (!this.workerMetric.equals(that.workerMetric))
-        return false;
-    }
-
-    boolean this_present_nettyMetric = true && this.is_set_nettyMetric();
-    boolean that_present_nettyMetric = true && that.is_set_nettyMetric();
-    if (this_present_nettyMetric || that_present_nettyMetric) {
-      if (!(this_present_nettyMetric && that_present_nettyMetric))
-        return false;
-      if (!this.nettyMetric.equals(that.nettyMetric))
-        return false;
-    }
-
-    boolean this_present_taskMetric = true && this.is_set_taskMetric();
-    boolean that_present_taskMetric = true && that.is_set_taskMetric();
-    if (this_present_taskMetric || that_present_taskMetric) {
-      if (!(this_present_taskMetric && that_present_taskMetric))
-        return false;
-      if (!this.taskMetric.equals(that.taskMetric))
+      if (!this.allMetrics.equals(that.allMetrics))
         return false;
     }
 
@@ -548,35 +407,25 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
   public int hashCode() {
     List<Object> list = new ArrayList<Object>();
 
-    boolean present_topology_id = true && (is_set_topology_id());
-    list.add(present_topology_id);
-    if (present_topology_id)
-      list.add(topology_id);
+    boolean present_topologyId = true && (is_set_topologyId());
+    list.add(present_topologyId);
+    if (present_topologyId)
+      list.add(topologyId);
 
-    boolean present_supervisor_id = true && (is_set_supervisor_id());
-    list.add(present_supervisor_id);
-    if (present_supervisor_id)
-      list.add(supervisor_id);
+    boolean present_supervisorId = true && (is_set_supervisorId());
+    list.add(present_supervisorId);
+    if (present_supervisorId)
+      list.add(supervisorId);
 
     boolean present_port = true;
     list.add(present_port);
     if (present_port)
       list.add(port);
 
-    boolean present_workerMetric = true && (is_set_workerMetric());
-    list.add(present_workerMetric);
-    if (present_workerMetric)
-      list.add(workerMetric);
-
-    boolean present_nettyMetric = true && (is_set_nettyMetric());
-    list.add(present_nettyMetric);
-    if (present_nettyMetric)
-      list.add(nettyMetric);
-
-    boolean present_taskMetric = true && (is_set_taskMetric());
-    list.add(present_taskMetric);
-    if (present_taskMetric)
-      list.add(taskMetric);
+    boolean present_allMetrics = true && (is_set_allMetrics());
+    list.add(present_allMetrics);
+    if (present_allMetrics)
+      list.add(allMetrics);
 
     return list.hashCode();
   }
@@ -589,22 +438,22 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
 
     int lastComparison = 0;
 
-    lastComparison = Boolean.valueOf(is_set_topology_id()).compareTo(other.is_set_topology_id());
+    lastComparison = Boolean.valueOf(is_set_topologyId()).compareTo(other.is_set_topologyId());
     if (lastComparison != 0) {
       return lastComparison;
     }
-    if (is_set_topology_id()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.topology_id, other.topology_id);
+    if (is_set_topologyId()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.topologyId, other.topologyId);
       if (lastComparison != 0) {
         return lastComparison;
       }
     }
-    lastComparison = Boolean.valueOf(is_set_supervisor_id()).compareTo(other.is_set_supervisor_id());
+    lastComparison = Boolean.valueOf(is_set_supervisorId()).compareTo(other.is_set_supervisorId());
     if (lastComparison != 0) {
       return lastComparison;
     }
-    if (is_set_supervisor_id()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.supervisor_id, other.supervisor_id);
+    if (is_set_supervisorId()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.supervisorId, other.supervisorId);
       if (lastComparison != 0) {
         return lastComparison;
       }
@@ -619,32 +468,12 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
         return lastComparison;
       }
     }
-    lastComparison = Boolean.valueOf(is_set_workerMetric()).compareTo(other.is_set_workerMetric());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (is_set_workerMetric()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.workerMetric, other.workerMetric);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = Boolean.valueOf(is_set_nettyMetric()).compareTo(other.is_set_nettyMetric());
+    lastComparison = Boolean.valueOf(is_set_allMetrics()).compareTo(other.is_set_allMetrics());
     if (lastComparison != 0) {
       return lastComparison;
     }
-    if (is_set_nettyMetric()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.nettyMetric, other.nettyMetric);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = Boolean.valueOf(is_set_taskMetric()).compareTo(other.is_set_taskMetric());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (is_set_taskMetric()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.taskMetric, other.taskMetric);
+    if (is_set_allMetrics()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.allMetrics, other.allMetrics);
       if (lastComparison != 0) {
         return lastComparison;
       }
@@ -656,11 +485,11 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
     return _Fields.findByThriftId(fieldId);
   }
 
-  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws TException {
     schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
   }
 
-  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws TException {
     schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
   }
 
@@ -669,19 +498,19 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
     StringBuilder sb = new StringBuilder("WorkerUploadMetrics(");
     boolean first = true;
 
-    sb.append("topology_id:");
-    if (this.topology_id == null) {
+    sb.append("topologyId:");
+    if (this.topologyId == null) {
       sb.append("null");
     } else {
-      sb.append(this.topology_id);
+      sb.append(this.topologyId);
     }
     first = false;
     if (!first) sb.append(", ");
-    sb.append("supervisor_id:");
-    if (this.supervisor_id == null) {
+    sb.append("supervisorId:");
+    if (this.supervisorId == null) {
       sb.append("null");
     } else {
-      sb.append(this.supervisor_id);
+      sb.append(this.supervisorId);
     }
     first = false;
     if (!first) sb.append(", ");
@@ -689,72 +518,45 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
     sb.append(this.port);
     first = false;
     if (!first) sb.append(", ");
-    sb.append("workerMetric:");
-    if (this.workerMetric == null) {
+    sb.append("allMetrics:");
+    if (this.allMetrics == null) {
       sb.append("null");
     } else {
-      sb.append(this.workerMetric);
-    }
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("nettyMetric:");
-    if (this.nettyMetric == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.nettyMetric);
-    }
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("taskMetric:");
-    if (this.taskMetric == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.taskMetric);
+      sb.append(this.allMetrics);
     }
     first = false;
     sb.append(")");
     return sb.toString();
   }
 
-  public void validate() throws org.apache.thrift.TException {
+  public void validate() throws TException {
     // check for required fields
-    if (!is_set_topology_id()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'topology_id' is unset! Struct:" + toString());
+    if (!is_set_topologyId()) {
+      throw new TProtocolException("Required field 'topologyId' is unset! Struct:" + toString());
     }
 
-    if (!is_set_supervisor_id()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'supervisor_id' is unset! Struct:" + toString());
+    if (!is_set_supervisorId()) {
+      throw new TProtocolException("Required field 'supervisorId' is unset! Struct:" + toString());
     }
 
     if (!is_set_port()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'port' is unset! Struct:" + toString());
-    }
-
-    if (!is_set_workerMetric()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'workerMetric' is unset! Struct:" + toString());
+      throw new TProtocolException("Required field 'port' is unset! Struct:" + toString());
     }
 
-    if (!is_set_nettyMetric()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'nettyMetric' is unset! Struct:" + toString());
-    }
-
-    if (!is_set_taskMetric()) {
-      throw new org.apache.thrift.protocol.TProtocolException("Required field 'taskMetric' is unset! Struct:" + toString());
+    if (!is_set_allMetrics()) {
+      throw new TProtocolException("Required field 'allMetrics' is unset! Struct:" + toString());
     }
 
     // check for sub-struct validity
-    if (workerMetric != null) {
-      workerMetric.validate();
-    }
-    if (nettyMetric != null) {
-      nettyMetric.validate();
+    if (allMetrics != null) {
+      allMetrics.validate();
     }
   }
 
   private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
     try {
       write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
-    } catch (org.apache.thrift.TException te) {
+    } catch (TException te) {
       throw new java.io.IOException(te);
     }
   }
@@ -764,7 +566,7 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
       // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
       __isset_bitfield = 0;
       read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
-    } catch (org.apache.thrift.TException te) {
+    } catch (TException te) {
       throw new java.io.IOException(te);
     }
   }
@@ -777,7 +579,7 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
 
   private static class WorkerUploadMetricsStandardScheme extends StandardScheme<WorkerUploadMetrics> {
 
-    public void read(org.apache.thrift.protocol.TProtocol iprot, WorkerUploadMetrics struct) throws org.apache.thrift.TException {
+    public void read(org.apache.thrift.protocol.TProtocol iprot, WorkerUploadMetrics struct) throws TException {
       org.apache.thrift.protocol.TField schemeField;
       iprot.readStructBegin();
       while (true)
@@ -789,16 +591,16 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
         switch (schemeField.id) {
           case 1: // TOPOLOGY_ID
             if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.topology_id = iprot.readString();
-              struct.set_topology_id_isSet(true);
+              struct.topologyId = iprot.readString();
+              struct.set_topologyId_isSet(true);
             } else { 
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
           case 2: // SUPERVISOR_ID
             if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.supervisor_id = iprot.readString();
-              struct.set_supervisor_id_isSet(true);
+              struct.supervisorId = iprot.readString();
+              struct.set_supervisorId_isSet(true);
             } else { 
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
@@ -811,41 +613,11 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
-          case 4: // WORKER_METRIC
+          case 4: // ALL_METRICS
             if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
-              struct.workerMetric = new MetricInfo();
-              struct.workerMetric.read(iprot);
-              struct.set_workerMetric_isSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 5: // NETTY_METRIC
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
-              struct.nettyMetric = new NettyMetric();
-              struct.nettyMetric.read(iprot);
-              struct.set_nettyMetric_isSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 6: // TASK_METRIC
-            if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
-              {
-                org.apache.thrift.protocol.TMap _map284 = iprot.readMapBegin();
-                struct.taskMetric = new HashMap<Integer,MetricInfo>(2*_map284.size);
-                int _key285;
-                MetricInfo _val286;
-                for (int _i287 = 0; _i287 < _map284.size; ++_i287)
-                {
-                  _key285 = iprot.readI32();
-                  _val286 = new MetricInfo();
-                  _val286.read(iprot);
-                  struct.taskMetric.put(_key285, _val286);
-                }
-                iprot.readMapEnd();
-              }
-              struct.set_taskMetric_isSet(true);
+              struct.allMetrics = new MetricInfo();
+              struct.allMetrics.read(iprot);
+              struct.set_allMetrics_isSet(true);
             } else { 
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
@@ -859,44 +631,26 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
       struct.validate();
     }
 
-    public void write(org.apache.thrift.protocol.TProtocol oprot, WorkerUploadMetrics struct) throws org.apache.thrift.TException {
+    public void write(org.apache.thrift.protocol.TProtocol oprot, WorkerUploadMetrics struct) throws TException {
       struct.validate();
 
       oprot.writeStructBegin(STRUCT_DESC);
-      if (struct.topology_id != null) {
+      if (struct.topologyId != null) {
         oprot.writeFieldBegin(TOPOLOGY_ID_FIELD_DESC);
-        oprot.writeString(struct.topology_id);
+        oprot.writeString(struct.topologyId);
         oprot.writeFieldEnd();
       }
-      if (struct.supervisor_id != null) {
+      if (struct.supervisorId != null) {
         oprot.writeFieldBegin(SUPERVISOR_ID_FIELD_DESC);
-        oprot.writeString(struct.supervisor_id);
+        oprot.writeString(struct.supervisorId);
         oprot.writeFieldEnd();
       }
       oprot.writeFieldBegin(PORT_FIELD_DESC);
       oprot.writeI32(struct.port);
       oprot.writeFieldEnd();
-      if (struct.workerMetric != null) {
-        oprot.writeFieldBegin(WORKER_METRIC_FIELD_DESC);
-        struct.workerMetric.write(oprot);
-        oprot.writeFieldEnd();
-      }
-      if (struct.nettyMetric != null) {
-        oprot.writeFieldBegin(NETTY_METRIC_FIELD_DESC);
-        struct.nettyMetric.write(oprot);
-        oprot.writeFieldEnd();
-      }
-      if (struct.taskMetric != null) {
-        oprot.writeFieldBegin(TASK_METRIC_FIELD_DESC);
-        {
-          oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.I32, org.apache.thrift.protocol.TType.STRUCT, struct.taskMetric.size()));
-          for (Map.Entry<Integer, MetricInfo> _iter288 : struct.taskMetric.entrySet())
-          {
-            oprot.writeI32(_iter288.getKey());
-            _iter288.getValue().write(oprot);
-          }
-          oprot.writeMapEnd();
-        }
+      if (struct.allMetrics != null) {
+        oprot.writeFieldBegin(ALL_METRICS_FIELD_DESC);
+        struct.allMetrics.write(oprot);
         oprot.writeFieldEnd();
       }
       oprot.writeFieldStop();
@@ -914,52 +668,26 @@ public class WorkerUploadMetrics implements org.apache.thrift.TBase<WorkerUpload
   private static class WorkerUploadMetricsTupleScheme extends TupleScheme<WorkerUploadMetrics> {
 
     @Override
-    public void write(org.apache.thrift.protocol.TProtocol prot, WorkerUploadMetrics struct) throws org.apache.thrift.TException {
+    public void write(org.apache.thrift.protocol.TProtocol prot, WorkerUploadMetrics struct) throws TException {
       TTupleProtocol oprot = (TTupleProtocol) prot;
-      oprot.writeString(struct.topology_id);
-      oprot.writeString(struct.supervisor_id);
+      oprot.writeString(struct.topologyId);
+      oprot.writeString(struct.supervisorId);
       oprot.writeI32(struct.port);
-      struct.workerMetric.write(oprot);
-      struct.nettyMetric.write(oprot);
-      {
-        oprot.writeI32(struct.taskMetric.size());
-        for (Map.Entry<Integer, MetricInfo> _iter289 : struct.taskMetric.entrySet())
-        {
-          oprot.writeI32(_iter289.getKey());
-          _iter289.getValue().write(oprot);
-        }
-      }
+      struct.allMetrics.write(oprot);
     }
 
     @Override
-    public void read(org.apache.thrift.protocol.TProtocol prot, WorkerUploadMetrics struct) throws org.apache.thrift.TException {
+    public void read(org.apache.thrift.protocol.TProtocol prot, WorkerUploadMetrics struct) throws TException {
       TTupleProtocol iprot = (TTupleProtocol) prot;
-      struct.topology_id = iprot.readString();
-      struct.set_topology_id_isSet(true);
-      struct.supervisor_id = iprot.readString();
-      struct.set_supervisor_id_isSet(true);
+      struct.topologyId = iprot.readString();
+      struct.set_topologyId_isSet(true);
+      struct.supervisorId = iprot.readString();
+      struct.set_supervisorId_isSet(true);
       struct.port = iprot.readI32();
       struct.set_port_isSet(true);
-      struct.workerMetric = new MetricInfo();
-      struct.workerMetric.read(iprot);
-      struct.set_workerMetric_isSet(true);
-      struct.nettyMetric = new NettyMetric();
-      struct.nettyMetric.read(iprot);
-      struct.set_nettyMetric_isSet(true);
-      {
-        org.apache.thrift.protocol.TMap _map290 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.I32, org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-        struct.taskMetric = new HashMap<Integer,MetricInfo>(2*_map290.size);
-        int _key291;
-        MetricInfo _val292;
-        for (int _i293 = 0; _i293 < _map290.size; ++_i293)
-        {
-          _key291 = iprot.readI32();
-          _val292 = new MetricInfo();
-          _val292.read(iprot);
-          struct.taskMetric.put(_key291, _val292);
-        }
-      }
-      struct.set_taskMetric_isSet(true);
+      struct.allMetrics = new MetricInfo();
+      struct.allMetrics.read(iprot);
+      struct.set_allMetrics_isSet(true);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/grouping/CustomStreamGrouping.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/grouping/CustomStreamGrouping.java b/jstorm-core/src/main/java/backtype/storm/grouping/CustomStreamGrouping.java
index 0e599f5..d1ce29d 100755
--- a/jstorm-core/src/main/java/backtype/storm/grouping/CustomStreamGrouping.java
+++ b/jstorm-core/src/main/java/backtype/storm/grouping/CustomStreamGrouping.java
@@ -23,21 +23,19 @@ import java.io.Serializable;
 import java.util.List;
 
 public interface CustomStreamGrouping extends Serializable {
-    
-   /**
-     * Tells the stream grouping at runtime the tasks in the target bolt.
-     * This information should be used in chooseTasks to determine the target tasks.
+
+    /**
+     * Tells the stream grouping at runtime the tasks in the target bolt. This information should be used in chooseTasks to determine the target tasks.
      * 
      * It also tells the grouping the metadata on the stream this grouping will be used on.
      */
-   void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks);
-    
-   /**
-     * This function implements a custom stream grouping. It takes in as input
-     * the number of tasks in the target bolt in prepare and returns the
-     * tasks to send the tuples to.
+    void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks);
+
+    /**
+     * This function implements a custom stream grouping. It takes in as input the number of tasks in the target bolt in prepare and returns the tasks to send
+     * the tuples to.
      * 
      * @param values the values to group on
      */
-   List<Integer> chooseTasks(int taskId, List<Object> values); 
+    List<Integer> chooseTasks(int taskId, List<Object> values);
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/grouping/PartialKeyGrouping.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/grouping/PartialKeyGrouping.java b/jstorm-core/src/main/java/backtype/storm/grouping/PartialKeyGrouping.java
index d1f534b..2cc936c 100755
--- a/jstorm-core/src/main/java/backtype/storm/grouping/PartialKeyGrouping.java
+++ b/jstorm-core/src/main/java/backtype/storm/grouping/PartialKeyGrouping.java
@@ -20,6 +20,7 @@ package backtype.storm.grouping;
 import java.io.Serializable;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 import backtype.storm.generated.GlobalStreamId;
@@ -40,7 +41,7 @@ public class PartialKeyGrouping implements CustomStreamGrouping, Serializable {
     private Fields outFields = null;
 
     public PartialKeyGrouping() {
-        //Empty
+        // Empty
     }
 
     public PartialKeyGrouping(Fields fields) {
@@ -65,12 +66,37 @@ public class PartialKeyGrouping implements CustomStreamGrouping, Serializable {
                 List<Object> selectedFields = outFields.select(fields, values);
                 ByteBuffer out = ByteBuffer.allocate(selectedFields.size() * 4);
                 for (Object o: selectedFields) {
-                    out.putInt(o.hashCode());
+                    if (o instanceof List) {
+                        out.putInt(Arrays.deepHashCode(((List) o).toArray()));
+                    } else if (o instanceof Object[]) {
+                        out.putInt(Arrays.deepHashCode((Object[])o));
+                    } else if (o instanceof byte[]) {
+                        out.putInt(Arrays.hashCode((byte[]) o));
+                    } else if (o instanceof short[]) {
+                        out.putInt(Arrays.hashCode((short[]) o));
+                    } else if (o instanceof int[]) {
+                        out.putInt(Arrays.hashCode((int[]) o));
+                    } else if (o instanceof long[]) {
+                        out.putInt(Arrays.hashCode((long[]) o));
+                    } else if (o instanceof char[]) {
+                        out.putInt(Arrays.hashCode((char[]) o));
+                    } else if (o instanceof float[]) {
+                        out.putInt(Arrays.hashCode((float[]) o));
+                    } else if (o instanceof double[]) {
+                        out.putInt(Arrays.hashCode((double[]) o));
+                    } else if (o instanceof boolean[]) {
+                        out.putInt(Arrays.hashCode((boolean[]) o));
+                    } else if (o != null) {
+                        out.putInt(o.hashCode());
+                    } else {
+                        out.putInt(0);
+                    }
                 }
                 raw = out.array();
             } else {
                 raw = values.get(0).toString().getBytes(); // assume key is the first field
             }
+
             int firstChoice = (int) (Math.abs(h1.hashBytes(raw).asLong()) % this.targetTasks.size());
             int secondChoice = (int) (Math.abs(h2.hashBytes(raw).asLong()) % this.targetTasks.size());
             int selected = targetTaskStats[firstChoice] > targetTaskStats[secondChoice] ? secondChoice : firstChoice;

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/hooks/BaseTaskHook.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/hooks/BaseTaskHook.java b/jstorm-core/src/main/java/backtype/storm/hooks/BaseTaskHook.java
index 12386d5..2f51576 100755
--- a/jstorm-core/src/main/java/backtype/storm/hooks/BaseTaskHook.java
+++ b/jstorm-core/src/main/java/backtype/storm/hooks/BaseTaskHook.java
@@ -33,7 +33,7 @@ public class BaseTaskHook implements ITaskHook {
 
     @Override
     public void cleanup() {
-    }    
+    }
 
     @Override
     public void emit(EmitInfo info) {

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/hooks/ITaskHook.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/hooks/ITaskHook.java b/jstorm-core/src/main/java/backtype/storm/hooks/ITaskHook.java
index c2833ca..ab19d63 100755
--- a/jstorm-core/src/main/java/backtype/storm/hooks/ITaskHook.java
+++ b/jstorm-core/src/main/java/backtype/storm/hooks/ITaskHook.java
@@ -28,11 +28,18 @@ import java.util.Map;
 
 public interface ITaskHook {
     void prepare(Map conf, TopologyContext context);
+
     void cleanup();
+
     void emit(EmitInfo info);
+
     void spoutAck(SpoutAckInfo info);
+
     void spoutFail(SpoutFailInfo info);
+
     void boltExecute(BoltExecuteInfo info);
+
     void boltAck(BoltAckInfo info);
+
     void boltFail(BoltFailInfo info);
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltAckInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltAckInfo.java b/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltAckInfo.java
index 769a37c..228da94 100755
--- a/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltAckInfo.java
+++ b/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltAckInfo.java
@@ -23,7 +23,7 @@ public class BoltAckInfo {
     public Tuple tuple;
     public int ackingTaskId;
     public Long processLatencyMs; // null if it wasn't sampled
-    
+
     public BoltAckInfo(Tuple tuple, int ackingTaskId, Long processLatencyMs) {
         this.tuple = tuple;
         this.ackingTaskId = ackingTaskId;

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltExecuteInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltExecuteInfo.java b/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltExecuteInfo.java
index 52e2c70..d666322 100755
--- a/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltExecuteInfo.java
+++ b/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltExecuteInfo.java
@@ -23,7 +23,7 @@ public class BoltExecuteInfo {
     public Tuple tuple;
     public int executingTaskId;
     public Long executeLatencyMs; // null if it wasn't sampled
-    
+
     public BoltExecuteInfo(Tuple tuple, int executingTaskId, Long executeLatencyMs) {
         this.tuple = tuple;
         this.executingTaskId = executingTaskId;

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltFailInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltFailInfo.java b/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltFailInfo.java
index 7dc930d..d32416c 100755
--- a/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltFailInfo.java
+++ b/jstorm-core/src/main/java/backtype/storm/hooks/info/BoltFailInfo.java
@@ -23,7 +23,7 @@ public class BoltFailInfo {
     public Tuple tuple;
     public int failingTaskId;
     public Long failLatencyMs; // null if it wasn't sampled
-    
+
     public BoltFailInfo(Tuple tuple, int failingTaskId, Long failLatencyMs) {
         this.tuple = tuple;
         this.failingTaskId = failingTaskId;

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/hooks/info/EmitInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/hooks/info/EmitInfo.java b/jstorm-core/src/main/java/backtype/storm/hooks/info/EmitInfo.java
index 59c01fa..0e7b369 100755
--- a/jstorm-core/src/main/java/backtype/storm/hooks/info/EmitInfo.java
+++ b/jstorm-core/src/main/java/backtype/storm/hooks/info/EmitInfo.java
@@ -25,7 +25,7 @@ public class EmitInfo {
     public String stream;
     public int taskId;
     public Collection<Integer> outTasks;
-    
+
     public EmitInfo(List<Object> values, String stream, int taskId, Collection<Integer> outTasks) {
         this.values = values;
         this.stream = stream;

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/hooks/info/SpoutAckInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/hooks/info/SpoutAckInfo.java b/jstorm-core/src/main/java/backtype/storm/hooks/info/SpoutAckInfo.java
index 962f998..9e2735b 100755
--- a/jstorm-core/src/main/java/backtype/storm/hooks/info/SpoutAckInfo.java
+++ b/jstorm-core/src/main/java/backtype/storm/hooks/info/SpoutAckInfo.java
@@ -21,7 +21,7 @@ public class SpoutAckInfo {
     public Object messageId;
     public int spoutTaskId;
     public Long completeLatencyMs; // null if it wasn't sampled
-    
+
     public SpoutAckInfo(Object messageId, int spoutTaskId, Long completeLatencyMs) {
         this.messageId = messageId;
         this.spoutTaskId = spoutTaskId;

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/hooks/info/SpoutFailInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/hooks/info/SpoutFailInfo.java b/jstorm-core/src/main/java/backtype/storm/hooks/info/SpoutFailInfo.java
index 493d1e4..76ad7d1 100755
--- a/jstorm-core/src/main/java/backtype/storm/hooks/info/SpoutFailInfo.java
+++ b/jstorm-core/src/main/java/backtype/storm/hooks/info/SpoutFailInfo.java
@@ -21,7 +21,7 @@ public class SpoutFailInfo {
     public Object messageId;
     public int spoutTaskId;
     public Long failLatencyMs; // null if it wasn't sampled
-    
+
     public SpoutFailInfo(Object messageId, int spoutTaskId, Long failLatencyMs) {
         this.messageId = messageId;
         this.spoutTaskId = spoutTaskId;

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/messaging/ConnectionWithStatus.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/messaging/ConnectionWithStatus.java b/jstorm-core/src/main/java/backtype/storm/messaging/ConnectionWithStatus.java
index 37981ca..96bbb90 100644
--- a/jstorm-core/src/main/java/backtype/storm/messaging/ConnectionWithStatus.java
+++ b/jstorm-core/src/main/java/backtype/storm/messaging/ConnectionWithStatus.java
@@ -19,31 +19,28 @@ package backtype.storm.messaging;
 
 public abstract class ConnectionWithStatus implements IConnection {
 
-  public static enum Status {
+    public static enum Status {
 
-    /**
-     * we are establishing a active connection with target host. The new data
-     * sending request can be buffered for future sending, or dropped(cases like
-     * there is no enough memory). It varies with difference IConnection
-     * implementations.
-     */
-    Connecting,
+        /**
+         * we are establishing a active connection with target host. The new data sending request can be buffered for future sending, or dropped(cases like
+         * there is no enough memory). It varies with difference IConnection implementations.
+         */
+        Connecting,
 
-    /**
-     * We have a alive connection channel, which can be used to transfer data.
-     */
-    Ready,
+        /**
+         * We have a alive connection channel, which can be used to transfer data.
+         */
+        Ready,
+
+        /**
+         * The connection channel is closed or being closed. We don't accept further data sending or receiving. All data sending request will be dropped.
+         */
+        Closed
+    };
 
     /**
-     * The connection channel is closed or being closed. We don't accept further
-     * data sending or receiving. All data sending request will be dropped.
+     * whether this connection is available to transfer data
      */
-    Closed
-  };
-
-  /**
-   * whether this connection is available to transfer data
-   */
-  public abstract Status status();
+    public abstract Status status();
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/messaging/IConnection.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/messaging/IConnection.java b/jstorm-core/src/main/java/backtype/storm/messaging/IConnection.java
index 24c404f..19c0cdc 100644
--- a/jstorm-core/src/main/java/backtype/storm/messaging/IConnection.java
+++ b/jstorm-core/src/main/java/backtype/storm/messaging/IConnection.java
@@ -32,8 +32,7 @@ public interface IConnection {
     public Object recv(Integer taskId, int flags);
 
     /**
-     * In the new design, receive flow is through registerQueue, then push
-     * message into queue
+     * In the new design, receive flow is through registerQueue, then push message into queue
      * 
      * @param recvQueu
      */
@@ -45,6 +44,8 @@ public interface IConnection {
 
     public void send(TaskMessage message);
 
+    public boolean available();
+
     /**
      * close this connection
      */

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/messaging/IContext.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/messaging/IContext.java b/jstorm-core/src/main/java/backtype/storm/messaging/IContext.java
index 2611366..2241faa 100644
--- a/jstorm-core/src/main/java/backtype/storm/messaging/IContext.java
+++ b/jstorm-core/src/main/java/backtype/storm/messaging/IContext.java
@@ -25,11 +25,9 @@ import backtype.storm.utils.DisruptorQueue;
 /**
  * This interface needs to be implemented for messaging plugin.
  * 
- * Messaging plugin is specified via Storm config parameter,
- * storm.messaging.transport.
+ * Messaging plugin is specified via Storm config parameter, storm.messaging.transport.
  * 
- * A messaging plugin should have a default constructor and implements IContext
- * interface. Upon construction, we will invoke IContext::prepare(storm_conf) to
+ * A messaging plugin should have a default constructor and implements IContext interface. Upon construction, we will invoke IContext::prepare(storm_conf) to
  * enable context to be configured according to storm configuration.
  */
 public interface IContext {

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/messaging/TaskMessage.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/messaging/TaskMessage.java b/jstorm-core/src/main/java/backtype/storm/messaging/TaskMessage.java
index fd95f5d..5895e76 100755
--- a/jstorm-core/src/main/java/backtype/storm/messaging/TaskMessage.java
+++ b/jstorm-core/src/main/java/backtype/storm/messaging/TaskMessage.java
@@ -22,20 +22,20 @@ import java.nio.ByteBuffer;
 public class TaskMessage {
     private int _task;
     private byte[] _message;
-    
+
     public TaskMessage(int task, byte[] message) {
         _task = task;
         _message = message;
     }
-    
+
     public int task() {
         return _task;
     }
-    
+
     public byte[] message() {
         return _message;
     }
-    
+
     public static boolean isEmpty(TaskMessage message) {
         if (message == null) {
             return true;
@@ -44,10 +44,10 @@ public class TaskMessage {
         } else if (message.message().length == 0) {
             return true;
         }
-        
+
         return false;
     }
-    
+
     @Deprecated
     public ByteBuffer serialize() {
         ByteBuffer bb = ByteBuffer.allocate(_message.length + 2);
@@ -55,7 +55,7 @@ public class TaskMessage {
         bb.put(_message);
         return bb;
     }
-    
+
     @Deprecated
     public void deserialize(ByteBuffer packet) {
         if (packet == null)
@@ -64,5 +64,5 @@ public class TaskMessage {
         _message = new byte[packet.limit() - 2];
         packet.get(_message);
     }
-    
+
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/7eaf0651/jstorm-core/src/main/java/backtype/storm/messaging/TransportFactory.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/messaging/TransportFactory.java b/jstorm-core/src/main/java/backtype/storm/messaging/TransportFactory.java
index 4eddf4b..4cda654 100755
--- a/jstorm-core/src/main/java/backtype/storm/messaging/TransportFactory.java
+++ b/jstorm-core/src/main/java/backtype/storm/messaging/TransportFactory.java
@@ -28,13 +28,13 @@ import backtype.storm.Config;
 
 public class TransportFactory {
     public static final Logger LOG = LoggerFactory.getLogger(TransportFactory.class);
-    
+
     public static IContext makeContext(Map storm_conf) {
-        
+
         // get factory class name
         String transport_plugin_klassName = (String) storm_conf.get(Config.STORM_MESSAGING_TRANSPORT);
         LOG.info("JStorm peer transport plugin:" + transport_plugin_klassName);
-        
+
         IContext transport = null;
         try {
             // create a factory class
@@ -64,5 +64,5 @@ public class TransportFactory {
         }
         return transport;
     }
-    
+
 }