You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@storm.apache.org by kn...@apache.org on 2015/11/23 22:07:40 UTC

[01/37] storm git commit: PACEMAKER OPEN SOURCE!

Repository: storm
Updated Branches:
  refs/heads/master a8d253a9b -> 45792ddd3


http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/py/storm/ttypes.py
----------------------------------------------------------------------
diff --git a/storm-core/src/py/storm/ttypes.py b/storm-core/src/py/storm/ttypes.py
index f91442f..f3a2d85 100644
--- a/storm-core/src/py/storm/ttypes.py
+++ b/storm-core/src/py/storm/ttypes.py
@@ -116,6 +116,71 @@ class LogLevelAction:
     "REMOVE": 3,
   }
 
+class HBServerMessageType:
+  CREATE_PATH = 0
+  CREATE_PATH_RESPONSE = 1
+  EXISTS = 2
+  EXISTS_RESPONSE = 3
+  SEND_PULSE = 4
+  SEND_PULSE_RESPONSE = 5
+  GET_ALL_PULSE_FOR_PATH = 6
+  GET_ALL_PULSE_FOR_PATH_RESPONSE = 7
+  GET_ALL_NODES_FOR_PATH = 8
+  GET_ALL_NODES_FOR_PATH_RESPONSE = 9
+  GET_PULSE = 10
+  GET_PULSE_RESPONSE = 11
+  DELETE_PATH = 12
+  DELETE_PATH_RESPONSE = 13
+  DELETE_PULSE_ID = 14
+  DELETE_PULSE_ID_RESPONSE = 15
+  CONTROL_MESSAGE = 16
+  SASL_MESSAGE_TOKEN = 17
+  NOT_AUTHORIZED = 18
+
+  _VALUES_TO_NAMES = {
+    0: "CREATE_PATH",
+    1: "CREATE_PATH_RESPONSE",
+    2: "EXISTS",
+    3: "EXISTS_RESPONSE",
+    4: "SEND_PULSE",
+    5: "SEND_PULSE_RESPONSE",
+    6: "GET_ALL_PULSE_FOR_PATH",
+    7: "GET_ALL_PULSE_FOR_PATH_RESPONSE",
+    8: "GET_ALL_NODES_FOR_PATH",
+    9: "GET_ALL_NODES_FOR_PATH_RESPONSE",
+    10: "GET_PULSE",
+    11: "GET_PULSE_RESPONSE",
+    12: "DELETE_PATH",
+    13: "DELETE_PATH_RESPONSE",
+    14: "DELETE_PULSE_ID",
+    15: "DELETE_PULSE_ID_RESPONSE",
+    16: "CONTROL_MESSAGE",
+    17: "SASL_MESSAGE_TOKEN",
+    18: "NOT_AUTHORIZED",
+  }
+
+  _NAMES_TO_VALUES = {
+    "CREATE_PATH": 0,
+    "CREATE_PATH_RESPONSE": 1,
+    "EXISTS": 2,
+    "EXISTS_RESPONSE": 3,
+    "SEND_PULSE": 4,
+    "SEND_PULSE_RESPONSE": 5,
+    "GET_ALL_PULSE_FOR_PATH": 6,
+    "GET_ALL_PULSE_FOR_PATH_RESPONSE": 7,
+    "GET_ALL_NODES_FOR_PATH": 8,
+    "GET_ALL_NODES_FOR_PATH_RESPONSE": 9,
+    "GET_PULSE": 10,
+    "GET_PULSE_RESPONSE": 11,
+    "DELETE_PATH": 12,
+    "DELETE_PATH_RESPONSE": 13,
+    "DELETE_PULSE_ID": 14,
+    "DELETE_PULSE_ID_RESPONSE": 15,
+    "CONTROL_MESSAGE": 16,
+    "SASL_MESSAGE_TOKEN": 17,
+    "NOT_AUTHORIZED": 18,
+  }
+
 
 class JavaObjectArg:
   """
@@ -8216,3 +8281,596 @@ class DRPCExecutionException(TException):
 
   def __ne__(self, other):
     return not (self == other)
+
+class HBMessageData:
+  """
+  Attributes:
+   - path
+   - pulse
+   - boolval
+   - records
+   - nodes
+   - message_blob
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'path', None, None, ), # 1
+    (2, TType.STRUCT, 'pulse', (HBPulse, HBPulse.thrift_spec), None, ), # 2
+    (3, TType.BOOL, 'boolval', None, None, ), # 3
+    (4, TType.STRUCT, 'records', (HBRecords, HBRecords.thrift_spec), None, ), # 4
+    (5, TType.STRUCT, 'nodes', (HBNodes, HBNodes.thrift_spec), None, ), # 5
+    None, # 6
+    (7, TType.STRING, 'message_blob', None, None, ), # 7
+  )
+
+  def __init__(self, path=None, pulse=None, boolval=None, records=None, nodes=None, message_blob=None,):
+    self.path = path
+    self.pulse = pulse
+    self.boolval = boolval
+    self.records = records
+    self.nodes = nodes
+    self.message_blob = message_blob
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.path = iprot.readString().decode('utf-8')
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRUCT:
+          self.pulse = HBPulse()
+          self.pulse.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.BOOL:
+          self.boolval = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRUCT:
+          self.records = HBRecords()
+          self.records.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.STRUCT:
+          self.nodes = HBNodes()
+          self.nodes.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.STRING:
+          self.message_blob = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('HBMessageData')
+    if self.path is not None:
+      oprot.writeFieldBegin('path', TType.STRING, 1)
+      oprot.writeString(self.path.encode('utf-8'))
+      oprot.writeFieldEnd()
+    if self.pulse is not None:
+      oprot.writeFieldBegin('pulse', TType.STRUCT, 2)
+      self.pulse.write(oprot)
+      oprot.writeFieldEnd()
+    if self.boolval is not None:
+      oprot.writeFieldBegin('boolval', TType.BOOL, 3)
+      oprot.writeBool(self.boolval)
+      oprot.writeFieldEnd()
+    if self.records is not None:
+      oprot.writeFieldBegin('records', TType.STRUCT, 4)
+      self.records.write(oprot)
+      oprot.writeFieldEnd()
+    if self.nodes is not None:
+      oprot.writeFieldBegin('nodes', TType.STRUCT, 5)
+      self.nodes.write(oprot)
+      oprot.writeFieldEnd()
+    if self.message_blob is not None:
+      oprot.writeFieldBegin('message_blob', TType.STRING, 7)
+      oprot.writeString(self.message_blob)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.path)
+    value = (value * 31) ^ hash(self.pulse)
+    value = (value * 31) ^ hash(self.boolval)
+    value = (value * 31) ^ hash(self.records)
+    value = (value * 31) ^ hash(self.nodes)
+    value = (value * 31) ^ hash(self.message_blob)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class HBMessage:
+  """
+  Attributes:
+   - type
+   - data
+   - message_id
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'type', None, None, ), # 1
+    (2, TType.STRUCT, 'data', (HBMessageData, HBMessageData.thrift_spec), None, ), # 2
+    (3, TType.I32, 'message_id', None, -1, ), # 3
+  )
+
+  def __init__(self, type=None, data=None, message_id=thrift_spec[3][4],):
+    self.type = type
+    self.data = data
+    self.message_id = message_id
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.type = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRUCT:
+          self.data = HBMessageData()
+          self.data.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.I32:
+          self.message_id = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('HBMessage')
+    if self.type is not None:
+      oprot.writeFieldBegin('type', TType.I32, 1)
+      oprot.writeI32(self.type)
+      oprot.writeFieldEnd()
+    if self.data is not None:
+      oprot.writeFieldBegin('data', TType.STRUCT, 2)
+      self.data.write(oprot)
+      oprot.writeFieldEnd()
+    if self.message_id is not None:
+      oprot.writeFieldBegin('message_id', TType.I32, 3)
+      oprot.writeI32(self.message_id)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.type)
+    value = (value * 31) ^ hash(self.data)
+    value = (value * 31) ^ hash(self.message_id)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class HBAuthorizationException(TException):
+  """
+  Attributes:
+   - msg
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'msg', None, None, ), # 1
+  )
+
+  def __init__(self, msg=None,):
+    self.msg = msg
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.msg = iprot.readString().decode('utf-8')
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('HBAuthorizationException')
+    if self.msg is not None:
+      oprot.writeFieldBegin('msg', TType.STRING, 1)
+      oprot.writeString(self.msg.encode('utf-8'))
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.msg is None:
+      raise TProtocol.TProtocolException(message='Required field msg is unset!')
+    return
+
+
+  def __str__(self):
+    return repr(self)
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.msg)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class HBExecutionException(TException):
+  """
+  Attributes:
+   - msg
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'msg', None, None, ), # 1
+  )
+
+  def __init__(self, msg=None,):
+    self.msg = msg
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.msg = iprot.readString().decode('utf-8')
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('HBExecutionException')
+    if self.msg is not None:
+      oprot.writeFieldBegin('msg', TType.STRING, 1)
+      oprot.writeString(self.msg.encode('utf-8'))
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.msg is None:
+      raise TProtocol.TProtocolException(message='Required field msg is unset!')
+    return
+
+
+  def __str__(self):
+    return repr(self)
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.msg)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class HBPulse:
+  """
+  Attributes:
+   - id
+   - details
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'id', None, None, ), # 1
+    (2, TType.STRING, 'details', None, None, ), # 2
+  )
+
+  def __init__(self, id=None, details=None,):
+    self.id = id
+    self.details = details
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.id = iprot.readString().decode('utf-8')
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.details = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('HBPulse')
+    if self.id is not None:
+      oprot.writeFieldBegin('id', TType.STRING, 1)
+      oprot.writeString(self.id.encode('utf-8'))
+      oprot.writeFieldEnd()
+    if self.details is not None:
+      oprot.writeFieldBegin('details', TType.STRING, 2)
+      oprot.writeString(self.details)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.id is None:
+      raise TProtocol.TProtocolException(message='Required field id is unset!')
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.id)
+    value = (value * 31) ^ hash(self.details)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class HBRecords:
+  """
+  Attributes:
+   - pulses
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.LIST, 'pulses', (TType.STRUCT,(HBPulse, HBPulse.thrift_spec)), None, ), # 1
+  )
+
+  def __init__(self, pulses=None,):
+    self.pulses = pulses
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.LIST:
+          self.pulses = []
+          (_etype599, _size596) = iprot.readListBegin()
+          for _i600 in xrange(_size596):
+            _elem601 = HBPulse()
+            _elem601.read(iprot)
+            self.pulses.append(_elem601)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('HBRecords')
+    if self.pulses is not None:
+      oprot.writeFieldBegin('pulses', TType.LIST, 1)
+      oprot.writeListBegin(TType.STRUCT, len(self.pulses))
+      for iter602 in self.pulses:
+        iter602.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.pulses)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class HBNodes:
+  """
+  Attributes:
+   - pulseIds
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.LIST, 'pulseIds', (TType.STRING,None), None, ), # 1
+  )
+
+  def __init__(self, pulseIds=None,):
+    self.pulseIds = pulseIds
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.LIST:
+          self.pulseIds = []
+          (_etype606, _size603) = iprot.readListBegin()
+          for _i607 in xrange(_size603):
+            _elem608 = iprot.readString().decode('utf-8')
+            self.pulseIds.append(_elem608)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('HBNodes')
+    if self.pulseIds is not None:
+      oprot.writeFieldBegin('pulseIds', TType.LIST, 1)
+      oprot.writeListBegin(TType.STRING, len(self.pulseIds))
+      for iter609 in self.pulseIds:
+        oprot.writeString(iter609.encode('utf-8'))
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.pulseIds)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/storm.thrift
----------------------------------------------------------------------
diff --git a/storm-core/src/storm.thrift b/storm-core/src/storm.thrift
index aae851e..2587760 100644
--- a/storm-core/src/storm.thrift
+++ b/storm-core/src/storm.thrift
@@ -555,3 +555,62 @@ service DistributedRPCInvocations {
   DRPCRequest fetchRequest(1: string functionName) throws (1: AuthorizationException aze);
   void failRequest(1: string id) throws (1: AuthorizationException aze);  
 }
+
+enum HBServerMessageType {
+  CREATE_PATH,
+  CREATE_PATH_RESPONSE,
+  EXISTS,
+  EXISTS_RESPONSE,
+  SEND_PULSE,
+  SEND_PULSE_RESPONSE,
+  GET_ALL_PULSE_FOR_PATH,
+  GET_ALL_PULSE_FOR_PATH_RESPONSE,
+  GET_ALL_NODES_FOR_PATH,
+  GET_ALL_NODES_FOR_PATH_RESPONSE,
+  GET_PULSE,
+  GET_PULSE_RESPONSE,
+  DELETE_PATH,
+  DELETE_PATH_RESPONSE,
+  DELETE_PULSE_ID,
+  DELETE_PULSE_ID_RESPONSE,
+  CONTROL_MESSAGE,
+  SASL_MESSAGE_TOKEN,
+  NOT_AUTHORIZED
+}
+
+union HBMessageData {
+  1: string path,
+  2: HBPulse pulse,
+  3: bool boolval,
+  4: HBRecords records,
+  5: HBNodes nodes,
+  7: optional binary message_blob;
+}
+
+struct HBMessage {
+  1: HBServerMessageType type,
+  2: HBMessageData data,
+  3: optional i32 message_id = -1,
+}
+
+
+exception HBAuthorizationException {
+  1: required string msg;
+}
+
+exception HBExecutionException {
+  1: required string msg;
+}
+
+struct HBPulse {
+  1: required string id;
+  2: binary details;
+}
+
+struct HBRecords {
+  1: list<HBPulse> pulses;
+}
+
+struct HBNodes {
+  1: list<string> pulseIds;
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/test/clj/backtype/storm/cluster_test.clj
----------------------------------------------------------------------
diff --git a/storm-core/test/clj/backtype/storm/cluster_test.clj b/storm-core/test/clj/backtype/storm/cluster_test.clj
index 198222e..eea4637 100644
--- a/storm-core/test/clj/backtype/storm/cluster_test.clj
+++ b/storm-core/test/clj/backtype/storm/cluster_test.clj
@@ -23,6 +23,7 @@
   (:import [org.mockito.exceptions.base MockitoAssertionError])
   (:import [org.apache.curator.framework CuratorFramework CuratorFrameworkFactory CuratorFrameworkFactory$Builder])
   (:import [backtype.storm.utils Utils TestUtils ZookeeperAuthInfo])
+  (:import [backtype.storm.cluster ClusterState])
   (:require [backtype.storm [zookeeper :as zk]])
   (:require [conjure.core])
   (:use [conjure core])
@@ -312,9 +313,9 @@
       (mk-distributed-cluster-state {})
       (verify-call-times-for zk/mkdirs 1)
       (verify-first-call-args-for-indices zk/mkdirs [2] nil))
-    (stubbing [mk-distributed-cluster-state nil
-               register nil
-               mkdirs nil]
+    (stubbing [mk-distributed-cluster-state (reify ClusterState
+                                              (register [this callback] nil)
+                                              (mkdirs [this path acls] nil))]
       (mk-storm-cluster-state {})
       (verify-call-times-for mk-distributed-cluster-state 1)
       (verify-first-call-args-for-indices mk-distributed-cluster-state [4] nil))))

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/test/clj/backtype/storm/messaging/netty_unit_test.clj
----------------------------------------------------------------------
diff --git a/storm-core/test/clj/backtype/storm/messaging/netty_unit_test.clj b/storm-core/test/clj/backtype/storm/messaging/netty_unit_test.clj
index f7d3802..c323c49 100644
--- a/storm-core/test/clj/backtype/storm/messaging/netty_unit_test.clj
+++ b/storm-core/test/clj/backtype/storm/messaging/netty_unit_test.clj
@@ -54,7 +54,10 @@
                     STORM-MESSAGING-NETTY-MAX-SLEEP-MS 5000
                     STORM-MESSAGING-NETTY-SERVER-WORKER-THREADS 1
                     STORM-MESSAGING-NETTY-CLIENT-WORKER-THREADS 1
-                    }
+                    TOPOLOGY-KRYO-FACTORY "backtype.storm.serialization.DefaultKryoFactory"
+                    TOPOLOGY-TUPLE-SERIALIZER "backtype.storm.serialization.types.ListDelegateSerializer"
+                    TOPOLOGY-FALL-BACK-ON-JAVA-SERIALIZATION false
+                    TOPOLOGY-SKIP-MISSING-KRYO-REGISTRATIONS false}
         context (TransportFactory/makeContext storm-conf)
         server (.bind context nil port)
         client (.connect context nil "localhost" port)
@@ -79,7 +82,10 @@
                     STORM-MESSAGING-NETTY-MAX-SLEEP-MS 5000
                     STORM-MESSAGING-NETTY-SERVER-WORKER-THREADS 1
                     STORM-MESSAGING-NETTY-CLIENT-WORKER-THREADS 1
-                    }
+                    TOPOLOGY-KRYO-FACTORY "backtype.storm.serialization.DefaultKryoFactory"
+                    TOPOLOGY-TUPLE-SERIALIZER "backtype.storm.serialization.types.ListDelegateSerializer"
+                    TOPOLOGY-FALL-BACK-ON-JAVA-SERIALIZATION false
+                    TOPOLOGY-SKIP-MISSING-KRYO-REGISTRATIONS false}
         context (TransportFactory/makeContext storm-conf)
         server (.bind context nil port)
         client (.connect context nil "localhost" port)
@@ -104,7 +110,10 @@
                     STORM-MESSAGING-NETTY-MAX-SLEEP-MS 5000
                     STORM-MESSAGING-NETTY-SERVER-WORKER-THREADS 1
                     STORM-MESSAGING-NETTY-CLIENT-WORKER-THREADS 1
-                    }
+                    TOPOLOGY-KRYO-FACTORY "backtype.storm.serialization.DefaultKryoFactory"
+                    TOPOLOGY-TUPLE-SERIALIZER "backtype.storm.serialization.types.ListDelegateSerializer"
+                    TOPOLOGY-FALL-BACK-ON-JAVA-SERIALIZATION false
+                    TOPOLOGY-SKIP-MISSING-KRYO-REGISTRATIONS false}
         _ (log-message "Should send and receive many messages (testing with " num-messages " messages)")
         context (TransportFactory/makeContext storm-conf)
         server (.bind context nil port)

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/test/clj/org/apache/storm/pacemaker_state_factory_test.clj
----------------------------------------------------------------------
diff --git a/storm-core/test/clj/org/apache/storm/pacemaker_state_factory_test.clj b/storm-core/test/clj/org/apache/storm/pacemaker_state_factory_test.clj
new file mode 100644
index 0000000..5024922
--- /dev/null
+++ b/storm-core/test/clj/org/apache/storm/pacemaker_state_factory_test.clj
@@ -0,0 +1,135 @@
+(ns org.apache.storm.pacemaker-state-factory-test
+  (:require [clojure.test :refer :all]
+            [conjure.core :refer :all]
+            [org.apache.storm.pacemaker [pacemaker-state-factory :as psf]])
+  (:import [backtype.storm.generated
+            HBExecutionException HBNodes HBRecords
+            HBServerMessageType HBMessage HBMessageData HBPulse]
+           [backtype.storm.cluster ClusterStateContext]
+           [org.mockito Mockito Matchers]))
+
+(defn- string-to-bytes [string]
+  (byte-array (map int string)))
+
+(defn- bytes-to-string [bytez]
+  (apply str (map char bytez)))
+
+(defprotocol send-capture
+  (send [this something])
+  (check-captured [this]))
+
+(defn- make-send-capture [response]
+  (let [captured (atom nil)]
+    (reify send-capture
+      (send [this something] (reset! captured something) response)
+      (check-captured [this] @captured))))
+
+(defmacro with-mock-pacemaker-client-and-state [client state response & body]
+  `(let [~client (make-send-capture ~response)]
+     (stubbing [psf/makeZKState nil
+                psf/makeClient ~client]
+               (let [~state (psf/-mkState nil nil nil nil (ClusterStateContext.))]
+                 ~@body))))
+
+
+(deftest pacemaker_state_set_worker_hb
+  (testing "set_worker_hb"
+    (with-mock-pacemaker-client-and-state
+      client state
+      (HBMessage. HBServerMessageType/SEND_PULSE_RESPONSE nil)
+
+      (.set_worker_hb state "/foo" (string-to-bytes "data") nil)
+      (let [sent (.check-captured client)
+            pulse (.get_pulse (.get_data sent))]
+        (is (= (.get_type sent) HBServerMessageType/SEND_PULSE))
+        (is (= (.get_id pulse) "/foo"))
+        (is (= (bytes-to-string (.get_details pulse)) "data")))))
+
+  (testing "set_worker_hb"
+    (with-mock-pacemaker-client-and-state
+      client state
+      (HBMessage. HBServerMessageType/SEND_PULSE nil)
+
+      (is (thrown? HBExecutionException      
+                   (.set_worker_hb state "/foo" (string-to-bytes "data") nil))))))
+
+      
+
+(deftest pacemaker_state_delete_worker_hb
+  (testing "delete_worker_hb"
+    (with-mock-pacemaker-client-and-state
+      client state
+      (HBMessage. HBServerMessageType/DELETE_PATH_RESPONSE nil)
+
+      (.delete_worker_hb state "/foo/bar")
+      (let [sent (.check-captured client)]
+        (is (= (.get_type sent) HBServerMessageType/DELETE_PATH))
+        (is (= (.get_path (.get_data sent)) "/foo/bar")))))
+
+    (testing "delete_worker_hb"
+      (with-mock-pacemaker-client-and-state
+        client state
+        (HBMessage. HBServerMessageType/DELETE_PATH nil)
+        
+        (is (thrown? HBExecutionException
+                     (.delete_worker_hb state "/foo/bar"))))))
+
+(deftest pacemaker_state_get_worker_hb
+  (testing "get_worker_hb"
+    (with-mock-pacemaker-client-and-state
+      client state
+      (HBMessage. HBServerMessageType/GET_PULSE_RESPONSE
+                (HBMessageData/pulse
+                 (doto (HBPulse.)
+                   (.set_id "/foo")
+                   (.set_details (string-to-bytes "some data")))))
+
+      (.get_worker_hb state "/foo" false)
+      (let [sent (.check-captured client)]
+        (is (= (.get_type sent) HBServerMessageType/GET_PULSE))
+        (is (= (.get_path (.get_data sent)) "/foo")))))
+
+  (testing "get_worker_hb - fail (bad response)"
+    (with-mock-pacemaker-client-and-state
+      client state
+      (HBMessage. HBServerMessageType/GET_PULSE nil)
+      
+      (is (thrown? HBExecutionException
+                   (.get_worker_hb state "/foo" false)))))
+  
+  (testing "get_worker_hb - fail (bad data)"
+    (with-mock-pacemaker-client-and-state
+      client state
+      (HBMessage. HBServerMessageType/GET_PULSE_RESPONSE nil)
+      
+      (is (thrown? HBExecutionException
+                   (.get_worker_hb state "/foo" false))))))
+
+(deftest pacemaker_state_get_worker_hb_children
+  (testing "get_worker_hb_children"
+    (with-mock-pacemaker-client-and-state
+      client state
+      (HBMessage. HBServerMessageType/GET_ALL_NODES_FOR_PATH_RESPONSE
+                (HBMessageData/nodes
+                 (HBNodes. [])))
+
+      (.get_worker_hb_children state "/foo" false)
+      (let [sent (.check-captured client)]
+        (is (= (.get_type sent) HBServerMessageType/GET_ALL_NODES_FOR_PATH))
+        (is (= (.get_path (.get_data sent)) "/foo")))))
+
+  (testing "get_worker_hb_children - fail (bad response)"
+    (with-mock-pacemaker-client-and-state
+      client state
+      (HBMessage. HBServerMessageType/DELETE_PATH nil)
+
+      (is (thrown? HBExecutionException
+                   (.get_worker_hb_children state "/foo" false)))))
+
+    (testing "get_worker_hb_children - fail (bad data)"
+    (with-mock-pacemaker-client-and-state
+      client state
+      (HBMessage. HBServerMessageType/GET_ALL_NODES_FOR_PATH_RESPONSE nil)
+      
+      (is (thrown? HBExecutionException
+                   (.get_worker_hb_children state "/foo" false))))))

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/test/clj/org/apache/storm/pacemaker_test.clj
----------------------------------------------------------------------
diff --git a/storm-core/test/clj/org/apache/storm/pacemaker_test.clj b/storm-core/test/clj/org/apache/storm/pacemaker_test.clj
new file mode 100644
index 0000000..98c21de
--- /dev/null
+++ b/storm-core/test/clj/org/apache/storm/pacemaker_test.clj
@@ -0,0 +1,227 @@
+(ns org.apache.storm.pacemaker-test
+  (:require [clojure.test :refer :all]
+            [org.apache.storm.pacemaker [pacemaker :as pacemaker]]
+            [conjure.core :as conjure])
+  (:import [backtype.storm.generated
+            HBExecutionException HBNodes HBRecords
+            HBServerMessageType HBMessage HBMessageData HBPulse]))
+
+(defn- message-with-rand-id [type data]
+  (let [mid (rand-int 1000)
+        message (HBMessage. type data)]
+    (.set_message_id message mid)
+    [message mid]))
+
+(defn- string-to-bytes [string]
+  (byte-array (map int string)))
+
+(defn- bytes-to-string [bytez]
+  (apply str (map char bytez)))
+
+(defn- makenode [handler path]
+  (.handleMessage handler
+                  (HBMessage.
+                   HBServerMessageType/SEND_PULSE
+                   (HBMessageData/pulse
+                    (doto (HBPulse.)
+                      (.set_id path)
+                      (.set_details (string-to-bytes "nothing")))))
+                  true))
+
+(deftest pacemaker-server-create-path
+  (conjure/stubbing
+   [pacemaker/register nil]
+   (let [handler (pacemaker/mk-handler {})]
+     (testing "CREATE_PATH"
+       (let [[message mid] (message-with-rand-id
+                            HBServerMessageType/CREATE_PATH
+                            (HBMessageData/path "/testpath"))
+             response (.handleMessage handler message true)]
+         (is (= (.get_message_id response) mid))
+         (is (= (.get_type response) HBServerMessageType/CREATE_PATH_RESPONSE))
+         (is (= (.get_data response) nil)))))))
+
+(deftest pacemaker-server-exists
+  (conjure/stubbing
+   [pacemaker/register nil]
+   (let [handler (pacemaker/mk-handler {})]
+     (testing "EXISTS - false"
+       (let [[message mid] (message-with-rand-id HBServerMessageType/EXISTS
+                                                 (HBMessageData/path "/testpath"))
+             bad-response (.handleMessage handler message false)
+             good-response (.handleMessage handler message true)]
+         (is (= (.get_message_id bad-response) mid))
+         (is (= (.get_type bad-response) HBServerMessageType/NOT_AUTHORIZED))
+
+         (is (= (.get_message_id good-response) mid))
+         (is (= (.get_type good-response) HBServerMessageType/EXISTS_RESPONSE))
+         (is (= (.get_boolval (.get_data good-response)) false))))
+
+     (testing "EXISTS - true"
+       (let [path "/exists_path"
+             data-string "pulse data"]
+         (let [[send _] (message-with-rand-id
+                         HBServerMessageType/SEND_PULSE
+                         (HBMessageData/pulse
+                          (doto (HBPulse.)
+                            (.set_id path)
+                            (.set_details (string-to-bytes data-string)))))
+               _ (.handleMessage handler send true)
+               [message mid] (message-with-rand-id HBServerMessageType/EXISTS
+                                                   (HBMessageData/path path))
+               bad-response (.handleMessage handler message false)
+               good-response (.handleMessage handler message true)]
+           (is (= (.get_message_id bad-response) mid))
+          (is (= (.get_type bad-response) HBServerMessageType/NOT_AUTHORIZED))
+
+          (is (= (.get_message_id good-response) mid))
+          (is (= (.get_type good-response) HBServerMessageType/EXISTS_RESPONSE))
+          (is (= (.get_boolval (.get_data good-response)) true))))))))
+
+(deftest pacemaker-server-send-pulse-get-pulse
+  (conjure/stubbing
+   [pacemaker/register nil]
+   (let [handler (pacemaker/mk-handler {})]
+     (testing "SEND_PULSE - GET_PULSE"
+       (let [path "/pulsepath"
+             data-string "pulse data"]
+         (let [[message mid] (message-with-rand-id
+                              HBServerMessageType/SEND_PULSE
+                              (HBMessageData/pulse
+                               (doto (HBPulse.)
+                                 (.set_id path)
+                                 (.set_details (string-to-bytes data-string)))))
+               response (.handleMessage handler message true)]
+           (is (= (.get_message_id response) mid))
+           (is (= (.get_type response) HBServerMessageType/SEND_PULSE_RESPONSE))
+           (is (= (.get_data response) nil)))
+         (let [[message mid] (message-with-rand-id
+                              HBServerMessageType/GET_PULSE
+                              (HBMessageData/path path))
+               response (.handleMessage handler message true)]
+           (is (= (.get_message_id response) mid))
+           (is (= (.get_type response) HBServerMessageType/GET_PULSE_RESPONSE))
+           (is (= (bytes-to-string (.get_details (.get_pulse (.get_data response)))) data-string))))))))
+
+(deftest pacemaker-server-get-all-pulse-for-path
+  (conjure/stubbing
+   [pacemaker/register nil]
+   (let [handler (pacemaker/mk-handler {})]
+     (testing "GET_ALL_PULSE_FOR_PATH"
+       (let [[message mid] (message-with-rand-id HBServerMessageType/GET_ALL_PULSE_FOR_PATH
+                                                 (HBMessageData/path "/testpath"))
+             bad-response (.handleMessage handler message false)
+             good-response (.handleMessage handler message true)]
+         (is (= (.get_message_id bad-response) mid))
+         (is (= (.get_type bad-response) HBServerMessageType/NOT_AUTHORIZED))
+
+         (is (= (.get_message_id good-response) mid))
+         (is (= (.get_type good-response) HBServerMessageType/GET_ALL_PULSE_FOR_PATH_RESPONSE))
+         (is (= (.get_data good-response) nil)))))))
+
+(deftest pacemaker-server-get-all-nodes-for-path
+  (conjure/stubbing
+   [pacemaker/register nil]
+   (let [handler (pacemaker/mk-handler {})]
+     (testing "GET_ALL_NODES_FOR_PATH"
+       (makenode handler "/some-root-path/foo")
+       (makenode handler "/some-root-path/bar")
+       (makenode handler "/some-root-path/baz")
+       (makenode handler "/some-root-path/boo")
+       (let [[message mid] (message-with-rand-id HBServerMessageType/GET_ALL_NODES_FOR_PATH
+                                                 (HBMessageData/path "/some-root-path"))
+             bad-response (.handleMessage handler message false)
+             good-response (.handleMessage handler message true)
+             ids (into #{} (.get_pulseIds (.get_nodes (.get_data good-response))))]
+         (is (= (.get_message_id bad-response) mid))
+         (is (= (.get_type bad-response) HBServerMessageType/NOT_AUTHORIZED))
+
+         (is (= (.get_message_id good-response) mid))
+         (is (= (.get_type good-response) HBServerMessageType/GET_ALL_NODES_FOR_PATH_RESPONSE))
+         (is (contains? ids "foo"))
+         (is (contains? ids "bar"))
+         (is (contains? ids "baz"))
+         (is (contains? ids "boo")))
+
+       (makenode handler "/some/deeper/path/foo")
+       (makenode handler "/some/deeper/path/bar")
+       (makenode handler "/some/deeper/path/baz")
+       (let [[message mid] (message-with-rand-id HBServerMessageType/GET_ALL_NODES_FOR_PATH
+                                                 (HBMessageData/path "/some/deeper/path"))
+             bad-response (.handleMessage handler message false)
+             good-response (.handleMessage handler message true)
+             ids (into #{} (.get_pulseIds (.get_nodes (.get_data good-response))))]
+         (is (= (.get_message_id bad-response) mid))
+         (is (= (.get_type bad-response) HBServerMessageType/NOT_AUTHORIZED))
+
+         (is (= (.get_message_id good-response) mid))
+         (is (= (.get_type good-response) HBServerMessageType/GET_ALL_NODES_FOR_PATH_RESPONSE))
+         (is (contains? ids "foo"))
+         (is (contains? ids "bar"))
+         (is (contains? ids "baz")))))))
+
+(deftest pacemaker-server-get-pulse
+  (conjure/stubbing
+   [pacemaker/register nil]
+   (let [handler (pacemaker/mk-handler {})]
+     (testing "GET_PULSE"
+       (makenode handler "/some-root/GET_PULSE")
+       (let [[message mid] (message-with-rand-id HBServerMessageType/GET_PULSE
+                                                 (HBMessageData/path "/some-root/GET_PULSE"))
+             bad-response (.handleMessage handler message false)
+             good-response (.handleMessage handler message true)
+             good-pulse (.get_pulse (.get_data good-response))]
+         (is (= (.get_message_id bad-response) mid))
+         (is (= (.get_type bad-response) HBServerMessageType/NOT_AUTHORIZED))
+         (is (= (.get_data bad-response) nil))
+
+         (is (= (.get_message_id good-response) mid))
+         (is (= (.get_type good-response) HBServerMessageType/GET_PULSE_RESPONSE))
+         (is (= (.get_id good-pulse) "/some-root/GET_PULSE"))
+         (is (= (bytes-to-string (.get_details good-pulse)) "nothing")))))))
+
+(deftest pacemaker-server-delete-path
+  (conjure/stubbing
+   [pacemaker/register nil]
+   (let [handler (pacemaker/mk-handler {})]
+     (testing "DELETE_PATH"
+       (makenode handler "/some-root/DELETE_PATH/foo")
+       (makenode handler "/some-root/DELETE_PATH/bar")
+       (makenode handler "/some-root/DELETE_PATH/baz")
+       (makenode handler "/some-root/DELETE_PATH/boo")
+       (let [[message mid] (message-with-rand-id HBServerMessageType/DELETE_PATH
+                                                 (HBMessageData/path "/some-root/DELETE_PATH"))
+             response (.handleMessage handler message true)]
+         (is (= (.get_message_id response) mid))
+         (is (= (.get_type response) HBServerMessageType/DELETE_PATH_RESPONSE))
+         (is (= (.get_data response) nil)))
+       (let [[message mid] (message-with-rand-id HBServerMessageType/GET_ALL_NODES_FOR_PATH
+                                                 (HBMessageData/path "/some-root/DELETE_PATH"))
+             response (.handleMessage handler message true)
+             ids (into #{} (.get_pulseIds (.get_nodes (.get_data response))))]
+         (is (= (.get_message_id response) mid))
+         (is (= (.get_type response) HBServerMessageType/GET_ALL_NODES_FOR_PATH_RESPONSE))
+         (is (empty? ids)))))))
+
+(deftest pacemaker-server-delete-pulse-id
+  (conjure/stubbing
+   [pacemaker/register nil]
+   (let [handler (pacemaker/mk-handler {})]
+     (testing "DELETE_PULSE_ID"
+       (makenode handler "/some-root/DELETE_PULSE_ID/foo")
+       (makenode handler "/some-root/DELETE_PULSE_ID/bar")
+       (makenode handler "/some-root/DELETE_PULSE_ID/baz")
+       (makenode handler "/some-root/DELETE_PULSE_ID/boo")
+       (let [[message mid] (message-with-rand-id HBServerMessageType/DELETE_PULSE_ID
+                                                 (HBMessageData/path "/some-root/DELETE_PULSE_ID/foo"))
+             response (.handleMessage handler message true)]
+         (is (= (.get_message_id response) mid))
+         (is (= (.get_type response) HBServerMessageType/DELETE_PULSE_ID_RESPONSE))
+         (is (= (.get_data response) nil)))
+       (let [[message mid] (message-with-rand-id HBServerMessageType/GET_ALL_NODES_FOR_PATH
+                                                 (HBMessageData/path "/some-root/DELETE_PULSE_ID"))
+             response (.handleMessage handler message true)
+             ids (into #{} (.get_pulseIds (.get_nodes (.get_data response))))]
+         (is (= (.get_message_id response) mid))
+         (is (= (.get_type response) HBServerMessageType/GET_ALL_NODES_FOR_PATH_RESPONSE))
+         (is (not (contains? ids "foo"))))))))


[11/37] storm git commit: Fixing spacing in pacemaker_test

Posted by kn...@apache.org.
Fixing spacing in pacemaker_test


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/d59e936a
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/d59e936a
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/d59e936a

Branch: refs/heads/master
Commit: d59e936af6ed797026ad5b6a86b3b4b28f5660ff
Parents: 33903de
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Tue Nov 3 14:31:51 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Tue Nov 3 14:31:51 2015 -0600

----------------------------------------------------------------------
 storm-core/test/clj/org/apache/storm/pacemaker_test.clj | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/d59e936a/storm-core/test/clj/org/apache/storm/pacemaker_test.clj
----------------------------------------------------------------------
diff --git a/storm-core/test/clj/org/apache/storm/pacemaker_test.clj b/storm-core/test/clj/org/apache/storm/pacemaker_test.clj
index 98c21de..ca7c693 100644
--- a/storm-core/test/clj/org/apache/storm/pacemaker_test.clj
+++ b/storm-core/test/clj/org/apache/storm/pacemaker_test.clj
@@ -72,11 +72,11 @@
                bad-response (.handleMessage handler message false)
                good-response (.handleMessage handler message true)]
            (is (= (.get_message_id bad-response) mid))
-          (is (= (.get_type bad-response) HBServerMessageType/NOT_AUTHORIZED))
+           (is (= (.get_type bad-response) HBServerMessageType/NOT_AUTHORIZED))
 
-          (is (= (.get_message_id good-response) mid))
-          (is (= (.get_type good-response) HBServerMessageType/EXISTS_RESPONSE))
-          (is (= (.get_boolval (.get_data good-response)) true))))))))
+           (is (= (.get_message_id good-response) mid))
+           (is (= (.get_type good-response) HBServerMessageType/EXISTS_RESPONSE))
+           (is (= (.get_boolval (.get_data good-response)) true))))))))
 
 (deftest pacemaker-server-send-pulse-get-pulse
   (conjure/stubbing


[20/37] storm git commit: Minor tweaks to documentation.

Posted by kn...@apache.org.
Minor tweaks to documentation.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/c93a0ff6
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/c93a0ff6
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/c93a0ff6

Branch: refs/heads/master
Commit: c93a0ff66f75c75ba3a36db64099f710e0a9d8f0
Parents: 0d98bbe
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Tue Nov 17 15:22:29 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Tue Nov 17 15:22:29 2015 -0600

----------------------------------------------------------------------
 docs/documentation/Pacemaker.md | 7 +++----
 1 file changed, 3 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/c93a0ff6/docs/documentation/Pacemaker.md
----------------------------------------------------------------------
diff --git a/docs/documentation/Pacemaker.md b/docs/documentation/Pacemaker.md
index 827c139..cd7a0a6 100644
--- a/docs/documentation/Pacemaker.md
+++ b/docs/documentation/Pacemaker.md
@@ -53,7 +53,7 @@ PacemakerDigest {
 ```
 
 Any node with these settings configured will be able to read from Pacemaker.
-Worker nodes need not have these configs set, and may keep `pacemaker.auth.method: NONE` set, since they do not need to read from Pacemaker daemon.
+Worker nodes need not have these configs set, and may keep `pacemaker.auth.method: NONE` set, since they do not need to read from the Pacemaker daemon.
 
 #### Kerberos
 To configure Kerberos authentication, set `pacemaker.auth.method: KERBEROS` in the cluster config on the nodes hosting Nimbus and Pacemaker.
@@ -85,6 +85,5 @@ PacemakerServer {
 };
 ```
 
-The client's user principal in the `PacemakerClient` section on the Nimbus host must match the `nimbus.daemon.user` storm cluster config value.
-The client's `serviceName` value must match the server's user principal in the `PacemakerServer` section on the Pacemaker host.
-
+ - The client's user principal in the `PacemakerClient` section on the Nimbus host must match the `nimbus.daemon.user` storm cluster config value.
+ - The client's `serviceName` value must match the server's user principal in the `PacemakerServer` section on the Pacemaker host.


[29/37] storm git commit: Adding a bit to docs.

Posted by kn...@apache.org.
Adding a bit to docs.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/387232c6
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/387232c6
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/387232c6

Branch: refs/heads/master
Commit: 387232c68ae88c317a1607af20a0ad2a21ee62cf
Parents: ee5265d
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Thu Nov 19 11:00:47 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Thu Nov 19 11:00:47 2015 -0600

----------------------------------------------------------------------
 docs/documentation/Pacemaker.md | 19 +++++++++++++++++++
 1 file changed, 19 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/387232c6/docs/documentation/Pacemaker.md
----------------------------------------------------------------------
diff --git a/docs/documentation/Pacemaker.md b/docs/documentation/Pacemaker.md
index e877541..f82f23d 100644
--- a/docs/documentation/Pacemaker.md
+++ b/docs/documentation/Pacemaker.md
@@ -87,3 +87,22 @@ PacemakerServer {
 
  - The client's user principal in the `PacemakerClient` section on the Nimbus host must match the `nimbus.daemon.user` storm cluster config value.
  - The client's `serviceName` value must match the server's user principal in the `PacemakerServer` section on the Pacemaker host.
+
+
+### Fault Tolerance
+
+Pacemaker runs as a single daemon instance currently. This makes it a potential Single Point of Failure.
+
+If Pacemaker becomes unreachable by Nimbus, through crash or partition, the workers will continue to run and Nimbus will repeatedly attempt to reconnect. Nimbus functionality will be disrupted, but the topologies themselves will continue to run.
+In case of partition of the cluster where Nimbus and Pacemaker are on the same side of the partition, the workers that are on the other side of the partition will not be able to heartbeat, and Nimbus will reschedule the tasks elsewhere. This is probably what we want to happen anyway.
+
+
+### ZooKeeper Comparison
+Compared to ZooKeeper, Pacemaker uses less CPU, less memory, and of course no disk for the same load, thanks to lack of overhead from maintaining consistency between nodes.
+On Gigabit networking, there is a theoretical limit of about 6000 nodes. However, the real limit is likely around 2000-3000 nodes. These limits have not yet been tested.
+On a 270 supervisor cluster, fully scheduled with topologies, Pacemaker resource utilization was 70% of one core and nearly 1GiB of RAM on a machine with 4 `Intel(R) Xeon(R) CPU E5530 @ 2.40GHz` and 24GiB of RAM.
+
+
+There is an easy route to HA for Pacemaker. Unlike ZooKeeper, Pacemaker should be able to scale horizontally without overhead. By contrast, with ZooKeeper, there are diminishing returns when adding ZK nodes.
+
+In short, a single Pacemaker node should be able to handle many times the load that a ZooKeeper cluster can, and future HA work allowing horizontal scaling will increase that even farther.


[16/37] storm git commit: Merge remote-tracking branch 'asf/master' into STORM-855

Posted by kn...@apache.org.
Merge remote-tracking branch 'asf/master' into STORM-855


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/62d725a8
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/62d725a8
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/62d725a8

Branch: refs/heads/master
Commit: 62d725a85e7869290805cdbe55f1f3bce1f905de
Parents: 80c60d8 2b6884b
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Tue Nov 10 11:11:47 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Tue Nov 10 11:11:47 2015 -0600

----------------------------------------------------------------------
 CHANGELOG.md                                    |   26 +
 DISCLAIMER                                      |   10 -
 LICENSE                                         |   12 +
 README.markdown                                 |    3 +-
 STORM-UI-REST-API.md                            |  735 --
 bin/flight.bash                                 |  154 +
 bin/storm-config.cmd                            |   10 +-
 bin/storm.py                                    |   14 +-
 conf/defaults.yaml                              |    6 +-
 conf/storm.yaml.example                         |    2 +-
 dev-tools/travis/ratprint.py                    |   26 +
 dev-tools/travis/travis-install.sh              |    5 +
 dev-tools/travis/travis-script.sh               |    6 +
 docs/DYNAMIC_LOG_LEVEL_SETTINGS.md              |   41 -
 docs/documentation/Documentation.md             |    4 +
 docs/documentation/Log-Search.md                |   14 +
 .../Message-passing-implementation.md           |   34 +-
 .../documentation/dynamic-log-level-settings.md |   41 +
 docs/documentation/dynamic-worker-profiling.md  |   29 +
 .../images/dynamic_log_level_settings_1.png     |  Bin 0 -> 93689 bytes
 .../images/dynamic_log_level_settings_2.png     |  Bin 0 -> 78785 bytes
 .../images/dynamic_profiling_debugging_1.png    |  Bin 0 -> 93635 bytes
 .../images/dynamic_profiling_debugging_2.png    |  Bin 0 -> 138120 bytes
 .../images/dynamic_profiling_debugging_3.png    |  Bin 0 -> 96974 bytes
 docs/documentation/images/search-a-topology.png |  Bin 0 -> 671031 bytes
 .../images/search-for-a-single-worker-log.png   |  Bin 0 -> 736579 bytes
 .../storm-metrics-profiling-internal-actions.md |   70 +
 docs/documentation/ui-rest-api.md               |  984 ++
 docs/images/dynamic_log_level_settings_1.png    |  Bin 93689 -> 0 bytes
 docs/images/dynamic_log_level_settings_2.png    |  Bin 78785 -> 0 bytes
 docs/images/viewing_metrics_with_VisualVM.png   |  Bin 0 -> 225100 bytes
 .../starter/ResourceAwareExampleTopology.java   |   20 +-
 external/flux/README.md                         |    1 +
 .../main/java/org/apache/storm/flux/Flux.java   |    3 +-
 .../java/org/apache/storm/flux/FluxBuilder.java |   13 +
 .../org/apache/storm/flux/test/TestBolt.java    |    4 +
 .../resources/configs/config-methods-test.yaml  |    1 +
 external/storm-hdfs/README.md                   |   33 +
 .../storm/hdfs/bolt/AvroGenericRecordBolt.java  |   17 +
 .../ha/codedistributor/HDFSCodeDistributor.java |   17 +
 .../hdfs/bolt/AvroGenericRecordBoltTest.java    |   17 +
 .../storm/hdfs/bolt/TestSequenceFileBolt.java   |  186 +
 .../storm/hdfs/trident/HdfsStateTest.java       |   17 +
 .../storm/jdbc/bolt/AbstractJdbcBolt.java       |    2 +
 .../apache/storm/jdbc/bolt/JdbcInsertBolt.java  |    9 +
 .../apache/storm/jdbc/bolt/JdbcLookupBolt.java  |    5 +
 .../jdbc/mapper/SimpleJdbcLookupMapper.java     |    3 +
 .../storm/jdbc/mapper/SimpleJdbcMapper.java     |    5 +
 .../storm/jdbc/bolt/JdbcInsertBoltTest.java     |   71 +
 .../storm/jdbc/bolt/JdbcLookupBoltTest.java     |   59 +
 external/storm-kafka/README.md                  |   11 +
 .../jvm/storm/kafka/DynamicBrokersReader.java   |   97 +-
 .../kafka/DynamicPartitionConnections.java      |   20 +-
 .../src/jvm/storm/kafka/KafkaSpout.java         |    2 +-
 .../src/jvm/storm/kafka/KafkaUtils.java         |   85 +-
 .../src/jvm/storm/kafka/Partition.java          |   26 +-
 .../src/jvm/storm/kafka/PartitionManager.java   |   18 +-
 .../src/jvm/storm/kafka/StaticCoordinator.java  |   11 +-
 .../storm/kafka/StringMultiSchemeWithTopic.java |   57 +
 .../src/jvm/storm/kafka/ZkCoordinator.java      |    2 +-
 .../jvm/storm/kafka/trident/Coordinator.java    |    7 +-
 .../trident/GlobalPartitionInformation.java     |   26 +-
 .../jvm/storm/kafka/trident/IBrokerReader.java  |    7 +-
 .../kafka/trident/OpaqueTridentKafkaSpout.java  |    5 +-
 .../storm/kafka/trident/StaticBrokerReader.java |   23 +-
 .../kafka/trident/TridentKafkaEmitter.java      |   36 +-
 .../storm/kafka/trident/TridentKafkaState.java  |   25 +-
 .../jvm/storm/kafka/trident/ZkBrokerReader.java |   20 +-
 .../storm/kafka/DynamicBrokersReaderTest.java   |  114 +-
 .../src/test/storm/kafka/KafkaUtilsTest.java    |   58 +-
 .../src/test/storm/kafka/TestUtils.java         |   20 +-
 .../test/storm/kafka/TridentKafkaTopology.java  |    8 +-
 .../src/test/storm/kafka/ZkCoordinatorTest.java |    8 +-
 .../test/storm/kafka/bolt/KafkaBoltTest.java    |    4 +-
 external/storm-solr/pom.xml                     |   17 +
 pom.xml                                         |  132 +-
 storm-core/pom.xml                              |   47 +
 storm-core/src/clj/backtype/storm/cluster.clj   |   57 +-
 storm-core/src/clj/backtype/storm/config.clj    |   23 +
 storm-core/src/clj/backtype/storm/converter.clj |   19 +-
 .../src/clj/backtype/storm/daemon/common.clj    |    5 +-
 .../src/clj/backtype/storm/daemon/drpc.clj      |   18 +-
 .../src/clj/backtype/storm/daemon/executor.clj  |   54 +-
 .../src/clj/backtype/storm/daemon/logviewer.clj |  532 +-
 .../src/clj/backtype/storm/daemon/nimbus.clj    |  194 +-
 .../clj/backtype/storm/daemon/supervisor.clj    |  169 +-
 .../src/clj/backtype/storm/daemon/task.clj      |    4 +-
 .../src/clj/backtype/storm/daemon/worker.clj    |   35 +-
 .../src/clj/backtype/storm/local_state.clj      |   24 +
 .../src/clj/backtype/storm/messaging/local.clj  |   34 +-
 storm-core/src/clj/backtype/storm/stats.clj     |    9 +
 storm-core/src/clj/backtype/storm/timer.clj     |   20 +-
 storm-core/src/clj/backtype/storm/ui/core.clj   |  246 +-
 .../src/clj/backtype/storm/ui/helpers.clj       |   16 +-
 storm-core/src/clj/backtype/storm/util.clj      |   12 +-
 .../src/dev/logviewer-search-context-tests.log  |    1 +
 .../dev/logviewer-search-context-tests.log.gz   |  Bin 0 -> 72 bytes
 storm-core/src/dev/small-worker.log             |    1 +
 storm-core/src/dev/test-3072.log                |    3 +
 storm-core/src/dev/test-worker.log              |  380 +
 storm-core/src/jvm/backtype/storm/Config.java   |   64 +-
 .../src/jvm/backtype/storm/LogWriter.java       |    2 +-
 .../src/jvm/backtype/storm/StormSubmitter.java  |   24 +-
 .../storm/codedistributor/ICodeDistributor.java |   17 +
 .../LocalFileSystemCodeDistributor.java         |   17 +
 .../storm/coordination/BatchBoltExecutor.java   |    4 +-
 .../storm/coordination/CoordinatedBolt.java     |   14 +-
 .../storm/drpc/DRPCInvocationsClient.java       |    5 +-
 .../src/jvm/backtype/storm/drpc/DRPCSpout.java  |   10 +-
 .../src/jvm/backtype/storm/drpc/JoinResult.java |    8 +-
 .../storm/generated/AlreadyAliveException.java  |    7 +-
 .../storm/generated/ClusterSummary.java         |  111 +-
 .../backtype/storm/generated/LSTopoHistory.java |  805 ++
 .../storm/generated/LSTopoHistoryList.java      |  460 +
 .../jvm/backtype/storm/generated/Nimbus.java    | 9689 ++++++++++++------
 .../backtype/storm/generated/ProfileAction.java |   74 +
 .../storm/generated/ProfileRequest.java         |  631 ++
 .../storm/generated/TopologyHistoryInfo.java    |  461 +
 .../src/jvm/backtype/storm/grouping/Load.java   |   77 +
 .../grouping/LoadAwareCustomStreamGrouping.java |   24 +
 .../grouping/LoadAwareShuffleGrouping.java      |   76 +
 .../backtype/storm/grouping/LoadMapping.java    |   64 +
 .../storm/grouping/PartialKeyGrouping.java      |    5 +-
 .../storm/grouping/ShuffleGrouping.java         |   65 +
 .../storm/messaging/ConnectionWithStatus.java   |    4 +-
 .../backtype/storm/messaging/IConnection.java   |   16 +
 .../jvm/backtype/storm/messaging/IContext.java  |    2 +-
 .../storm/messaging/TransportFactory.java       |    2 +-
 .../backtype/storm/messaging/netty/Client.java  |   35 +-
 .../backtype/storm/messaging/netty/Context.java |    8 +-
 .../storm/messaging/netty/ControlMessage.java   |    5 +-
 .../storm/messaging/netty/MessageBatch.java     |   14 +-
 .../storm/messaging/netty/MessageDecoder.java   |    7 +-
 .../storm/messaging/netty/SaslMessageToken.java |    3 +-
 .../storm/messaging/netty/SaslNettyClient.java  |    6 +-
 .../messaging/netty/SaslStormClientHandler.java |    4 +-
 .../messaging/netty/SaslStormServerHandler.java |   11 +-
 .../storm/messaging/netty/SaslUtils.java        |   11 +-
 .../backtype/storm/messaging/netty/Server.java  |   51 +-
 .../messaging/netty/StormClientHandler.java     |   26 +-
 .../backtype/storm/metric/EventLoggerBolt.java  |   25 +-
 .../storm/metric/FileBasedEventLogger.java      |   19 +-
 .../metric/HttpForwardingMetricsConsumer.java   |    1 -
 .../metric/HttpForwardingMetricsServer.java     |    1 -
 .../jvm/backtype/storm/metric/IEventLogger.java |   25 +-
 .../storm/metric/LoggingMetricsConsumer.java    |    1 -
 .../storm/metric/MetricsConsumerBolt.java       |    1 -
 .../jvm/backtype/storm/metric/SystemBolt.java   |    5 -
 .../backtype/storm/metric/api/CountMetric.java  |    2 -
 .../backtype/storm/metric/api/MeanReducer.java  |    4 +-
 .../storm/metric/api/MultiCountMetric.java      |    2 +-
 .../storm/metric/api/MultiReducedMetric.java    |    2 +-
 .../storm/metric/api/rpc/CountShellMetric.java  |    3 +-
 .../AbstractDNSToSwitchMapping.java             |    2 +-
 .../DefaultRackDNSToSwitchMapping.java          |   21 +-
 .../backtype/storm/nimbus/ILeaderElector.java   |   23 +-
 .../jvm/backtype/storm/nimbus/NimbusInfo.java   |   21 +-
 .../jvm/backtype/storm/scheduler/Cluster.java   |   69 +-
 .../scheduler/SchedulerAssignmentImpl.java      |   15 +-
 .../storm/scheduler/SupervisorDetails.java      |    6 +-
 .../backtype/storm/scheduler/Topologies.java    |   12 +-
 .../storm/scheduler/TopologyDetails.java        |   30 +-
 .../scheduler/multitenant/DefaultPool.java      |   22 +-
 .../storm/scheduler/multitenant/FreePool.java   |    6 +-
 .../scheduler/multitenant/IsolatedPool.java     |   32 +-
 .../multitenant/MultitenantScheduler.java       |    6 +-
 .../storm/scheduler/multitenant/Node.java       |   17 +-
 .../storm/scheduler/multitenant/NodePool.java   |   16 +-
 .../strategies/ResourceAwareStrategy.java       |   69 +-
 .../backtype/storm/security/auth/AuthUtils.java |   27 +-
 .../auth/DefaultHttpCredentialsPlugin.java      |    6 +-
 .../security/auth/DefaultPrincipalToLocal.java  |    1 -
 .../storm/security/auth/IAuthorizer.java        |    4 +-
 .../security/auth/ICredentialsRenewer.java      |    3 +-
 .../security/auth/IHttpCredentialsPlugin.java   |    2 -
 .../storm/security/auth/IPrincipalToLocal.java  |    2 +-
 .../storm/security/auth/ITransportPlugin.java   |    4 -
 .../security/auth/KerberosPrincipalToLocal.java |    2 +-
 .../storm/security/auth/ReqContext.java         |   11 +-
 .../security/auth/SaslTransportPlugin.java      |   12 +-
 .../security/auth/ShellBasedGroupsMapping.java  |   10 +-
 .../security/auth/SimpleTransportPlugin.java    |    6 +-
 .../security/auth/SingleUserPrincipal.java      |    5 +-
 .../storm/security/auth/TBackoffConnect.java    |    1 -
 .../storm/security/auth/ThriftClient.java       |   10 +-
 .../storm/security/auth/ThriftServer.java       |    6 +-
 .../auth/authorizer/DRPCAuthorizerBase.java     |    2 +-
 .../authorizer/DRPCSimpleACLAuthorizer.java     |   19 +-
 .../auth/authorizer/DenyAuthorizer.java         |   11 +-
 .../authorizer/ImpersonationAuthorizer.java     |   17 +-
 .../auth/authorizer/NoopAuthorizer.java         |    7 +-
 .../auth/authorizer/SimpleACLAuthorizer.java    |   26 +-
 .../authorizer/SimpleWhitelistAuthorizer.java   |   11 +-
 .../auth/digest/ClientCallbackHandler.java      |    2 -
 .../auth/digest/DigestSaslTransportPlugin.java  |    2 -
 .../auth/digest/ServerCallbackHandler.java      |    5 +-
 .../storm/security/auth/kerberos/AutoTGT.java   |   10 +-
 .../security/auth/kerberos/NoOpTTrasport.java   |   20 +-
 .../serialization/BlowfishTupleSerializer.java  |    6 +-
 .../GzipThriftSerializationDelegate.java        |    1 -
 .../storm/serialization/ITupleDeserializer.java |    1 -
 .../serialization/KryoTupleDeserializer.java    |    3 -
 .../serialization/KryoValuesDeserializer.java   |    3 +-
 .../serialization/SerializationFactory.java     |   23 +-
 .../jvm/backtype/storm/spout/ShellSpout.java    |    4 +-
 .../storm/task/GeneralTopologyContext.java      |   15 +-
 .../src/jvm/backtype/storm/task/ShellBolt.java  |   42 +-
 .../backtype/storm/task/TopologyContext.java    |    9 +-
 .../AlternateRackDNSToSwitchMapping.java        |   17 +
 .../storm/testing/MemoryTransactionalSpout.java |    9 +-
 .../testing/OpaqueMemoryTransactionalSpout.java |    8 +-
 .../storm/testing/TupleCaptureBolt.java         |    4 +-
 .../storm/topology/BasicBoltExecutor.java       |    2 +-
 .../storm/topology/OutputFieldsGetter.java      |    2 +-
 .../storm/topology/TopologyBuilder.java         |   16 +-
 .../storm/topology/base/BaseBatchBolt.java      |    1 -
 .../topology/base/BaseTransactionalSpout.java   |    1 -
 .../TransactionalSpoutBatchExecutor.java        |    4 +-
 .../TransactionalSpoutCoordinator.java          |    2 +-
 ...uePartitionedTransactionalSpoutExecutor.java |   13 +-
 .../PartitionedTransactionalSpoutExecutor.java  |    2 +-
 .../src/jvm/backtype/storm/tuple/Fields.java    |   10 +-
 .../src/jvm/backtype/storm/tuple/MessageId.java |   10 +-
 .../src/jvm/backtype/storm/tuple/Tuple.java     |    1 -
 .../src/jvm/backtype/storm/tuple/TupleImpl.java |   10 +-
 .../jvm/backtype/storm/utils/DRPCClient.java    |    1 -
 .../backtype/storm/utils/DisruptorQueue.java    |    5 +-
 .../backtype/storm/utils/InprocMessaging.java   |    4 +-
 .../storm/utils/KeyedRoundRobinQueue.java       |    6 +-
 .../jvm/backtype/storm/utils/ListDelegate.java  |    6 +-
 .../jvm/backtype/storm/utils/LocalState.java    |   22 +-
 .../src/jvm/backtype/storm/utils/Monitor.java   |    3 +-
 .../jvm/backtype/storm/utils/NimbusClient.java  |   10 +-
 .../storm/utils/RegisteredGlobalState.java      |    6 +-
 .../jvm/backtype/storm/utils/RotatingMap.java   |    2 +-
 .../backtype/storm/utils/ServiceRegistry.java   |    2 +-
 .../jvm/backtype/storm/utils/ShellProcess.java  |    6 +-
 .../jvm/backtype/storm/utils/ShellUtils.java    |    2 +-
 .../StormBoundedExponentialBackoffRetry.java    |    3 +-
 .../src/jvm/backtype/storm/utils/Time.java      |    4 +-
 .../backtype/storm/utils/TransferDrainer.java   |   17 +-
 .../src/jvm/backtype/storm/utils/Utils.java     |   19 +-
 .../jvm/backtype/storm/utils/VersionInfo.java   |    2 +-
 .../storm/validation/ConfigValidation.java      |  113 +-
 .../src/jvm/storm/trident/TridentTopology.java  |   87 +-
 .../trident/drpc/ReturnResultsReducer.java      |    4 +-
 .../fluent/ChainedAggregatorDeclarer.java       |    8 +-
 .../jvm/storm/trident/graph/GraphGrouper.java   |   13 +-
 .../src/jvm/storm/trident/graph/Group.java      |   23 +-
 .../trident/operation/builtin/SnapshotGet.java  |    4 +-
 .../operation/builtin/TupleCollectionGet.java   |    6 +-
 .../storm/trident/partition/GlobalGrouping.java |    5 +-
 .../trident/partition/IdentityGrouping.java     |    8 +-
 .../src/jvm/storm/trident/planner/Node.java     |    5 +-
 .../storm/trident/planner/PartitionNode.java    |    2 -
 .../storm/trident/planner/SubtopologyBolt.java  |   19 +-
 .../processor/MultiReducerProcessor.java        |    2 +-
 .../OpaquePartitionedTridentSpoutExecutor.java  |   10 +-
 .../trident/spout/TridentSpoutExecutor.java     |    4 +-
 .../trident/topology/TridentBoltExecutor.java   |    6 +-
 .../topology/TridentTopologyBuilder.java        |   23 +-
 .../storm/trident/tuple/TridentTupleView.java   |   18 +-
 .../src/native/worker-launcher/impl/main.c      |   10 +
 .../worker-launcher/impl/worker-launcher.c      |   49 +-
 .../worker-launcher/impl/worker-launcher.h      |    2 +
 storm-core/src/py/storm/Nimbus-remote           |   21 +
 storm-core/src/py/storm/Nimbus.py               |  595 ++
 storm-core/src/py/storm/ttypes.py               |  433 +-
 storm-core/src/storm.thrift                     |   38 +
 storm-core/src/ui/public/component.html         |  167 +-
 .../src/ui/public/deep_search_result.html       |  155 +
 storm-core/src/ui/public/images/search.png      |  Bin 0 -> 2354 bytes
 .../src/ui/public/js/typeahead.jquery.min.js    |    7 +
 storm-core/src/ui/public/logviewer_search.html  |   65 +
 storm-core/src/ui/public/search_result.html     |  100 +
 .../templates/component-page-template.html      |   53 +
 .../deep-search-result-page-template.html       |   66 +
 .../logviewer-search-page-template.html         |   44 +
 .../templates/search-result-page-template.html  |   60 +
 .../templates/topology-page-template.html       |   11 +
 .../src/ui/public/templates/user-template.html  |   17 +-
 storm-core/src/ui/public/topology.html          |    8 +-
 .../test/clj/backtype/storm/grouping_test.clj   |   90 +-
 .../clj/backtype/storm/integration_test.clj     |    4 +-
 .../test/clj/backtype/storm/logviewer_test.clj  |  418 +
 .../storm/messaging/netty_integration_test.clj  |    3 +-
 .../storm/messaging/netty_unit_test.clj         |  217 +-
 .../test/clj/backtype/storm/messaging_test.clj  |    3 +-
 .../test/clj/backtype/storm/nimbus_test.clj     |    3 +-
 .../storm/pacemaker_state_factory_test.clj      |   15 +
 .../clj/org/apache/storm/pacemaker_test.clj     |   15 +
 .../storm/utils/DisruptorQueueTest.java         |    9 +-
 storm-dist/binary/src/main/assembly/binary.xml  |    5 -
 293 files changed, 16992 insertions(+), 5462 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/bin/storm.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/conf/defaults.yaml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/clj/backtype/storm/cluster.clj
----------------------------------------------------------------------
diff --cc storm-core/src/clj/backtype/storm/cluster.clj
index b1eb0d0,cf1ece6..35aa8c8
--- a/storm-core/src/clj/backtype/storm/cluster.clj
+++ b/storm-core/src/clj/backtype/storm/cluster.clj
@@@ -17,12 -17,12 +17,12 @@@
  (ns backtype.storm.cluster
    (:import [org.apache.zookeeper.data Stat ACL Id]
             [backtype.storm.generated SupervisorInfo Assignment StormBase ClusterWorkerHeartbeat ErrorInfo Credentials NimbusSummary
-             LogConfig]
+             LogConfig ProfileAction ProfileRequest NodeInfo]
             [java.io Serializable])
    (:import [org.apache.zookeeper KeeperException KeeperException$NoNodeException ZooDefs ZooDefs$Ids ZooDefs$Perms])
 -  (:import [org.apache.curator.framework.state ConnectionStateListener ConnectionState])
    (:import [org.apache.curator.framework CuratorFramework])
    (:import [backtype.storm.utils Utils])
 +  (:import [backtype.storm.cluster ClusterState ClusterStateContext ClusterStateListener ConnectionState])
    (:import [java.security MessageDigest])
    (:import [org.apache.zookeeper.server.auth DigestAuthenticationProvider])
    (:import [backtype.storm.nimbus NimbusInfo])
@@@ -376,6 -509,48 +388,48 @@@
          [this storm-id log-config]
          (.set_data cluster-state (log-config-path storm-id) (Utils/serialize log-config) acls))
  
+       (set-worker-profile-request
+         [this storm-id profile-request]
+         (let [request-type (.get_action profile-request)
+               host (.get_node (.get_nodeInfo profile-request))
+               port (first (.get_port (.get_nodeInfo profile-request)))]
+           (.set_data cluster-state
+                      (profiler-config-path storm-id host port request-type)
+                      (Utils/serialize profile-request)
+                      acls)))
+ 
+       (get-topology-profile-requests
+         [this storm-id thrift?]
+         (let [path (profiler-config-path storm-id)
 -              requests (if (exists-node? cluster-state path false)
++              requests (if (.node_exists cluster-state path false)
+                          (dofor [c (.get_children cluster-state path false)]
+                                 (let [raw (.get_data cluster-state (str path "/" c) false)
+                                       request (maybe-deserialize raw ProfileRequest)]
+                                       (if thrift?
+                                         request
+                                         (clojurify-profile-request request)))))]
+           requests))
+ 
+       (delete-topology-profile-requests
+         [this storm-id profile-request]
+         (let [profile-request-inst (thriftify-profile-request profile-request)
+               action (:action profile-request)
+               host (:host profile-request)
+               port (:port profile-request)]
+           (.delete_node cluster-state
+            (profiler-config-path storm-id host port action))))
+           
+       (get-worker-profile-requests
+         [this storm-id node-info thrift?]
+         (let [host (:host node-info)
+               port (:port node-info)
+               profile-requests (get-topology-profile-requests this storm-id thrift?)]
+           (if thrift?
+             (filter #(and (= host (.get_node (.get_nodeInfo %))) (= port (first (.get_port (.get_nodeInfo  %)))))
+                     profile-requests)
+             (filter #(and (= host (:host %)) (= port (:port %)))
+                     profile-requests))))
+       
        (worker-heartbeat!
          [this storm-id node port info]
          (let [thrift-worker-hb (thriftify-zk-worker-hb info)]
@@@ -482,10 -657,11 +536,11 @@@
  
        (remove-storm!
          [this storm-id]
 -        (delete-node cluster-state (assignment-path storm-id))
 -        (delete-node cluster-state (code-distributor-path storm-id))
 -        (delete-node cluster-state (credentials-path storm-id))
 -        (delete-node cluster-state (log-config-path storm-id))
 -        (delete-node cluster-state (profiler-config-path storm-id))
 +        (.delete_node cluster-state (assignment-path storm-id))
 +        (.delete_node cluster-state (code-distributor-path storm-id))
 +        (.delete_node cluster-state (credentials-path storm-id))
 +        (.delete_node cluster-state (log-config-path storm-id))
++        (.delete_node cluster-state (profiler-config-path storm-id))
          (remove-storm-base! this storm-id))
  
        (set-credentials!

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/clj/backtype/storm/config.clj
----------------------------------------------------------------------
diff --cc storm-core/src/clj/backtype/storm/config.clj
index 89b4a30,1a5cb51..0700a37
--- a/storm-core/src/clj/backtype/storm/config.clj
+++ b/storm-core/src/clj/backtype/storm/config.clj
@@@ -265,7 -273,16 +273,22 @@@
    [conf id]
    (LocalState. (worker-heartbeats-root conf id)))
  
 +(defn override-login-config-with-system-property [conf]
 +  (if-let [login_conf_file (System/getProperty "java.security.auth.login.config")]
 +    (assoc conf "java.security.auth.login.config" login_conf_file)
 +    conf))
++
+ (defn get-topo-logs-users
+   [topology-conf]
+   (sort (distinct (remove nil?
+                     (concat
+                       (topology-conf LOGS-USERS)
+                       (topology-conf TOPOLOGY-USERS))))))
+ 
+ (defn get-topo-logs-groups
+   [topology-conf]
+   (sort (distinct (remove nil?
+                     (concat
+                       (topology-conf LOGS-GROUPS)
+                       (topology-conf TOPOLOGY-GROUPS))))))
++

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/clj/backtype/storm/daemon/worker.clj
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/clj/backtype/storm/util.clj
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/jvm/backtype/storm/Config.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/jvm/backtype/storm/messaging/netty/ControlMessage.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/messaging/netty/ControlMessage.java
index bffd953,e4507f5..769d010
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/ControlMessage.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/ControlMessage.java
@@@ -39,11 -39,10 +39,10 @@@ public enum ControlMessage implements I
      }
  
      /**
-      * Return a control message per an encoded status code
-      * @param encoded
-      * @return
+      * @param encoded status code
+      * @return a control message per an encoded status code
       */
 -    static ControlMessage mkMessage(short encoded) {
 +    public static ControlMessage mkMessage(short encoded) {
          for(ControlMessage cm: ControlMessage.values()) {
            if(encoded == cm.code) return cm;
          }

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/jvm/backtype/storm/messaging/netty/MessageDecoder.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/jvm/backtype/storm/messaging/netty/SaslMessageToken.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/messaging/netty/SaslMessageToken.java
index 2fe5c2d,d7a86d1..70e7089
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslMessageToken.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslMessageToken.java
@@@ -27,14 -24,7 +27,13 @@@ import org.slf4j.LoggerFactory
  /**
   * Send and receive SASL tokens.
   */
- 
 -public class SaslMessageToken {
 +public class SaslMessageToken implements INettySerializable {
 +    public static final short IDENTIFIER = -500;
 +
 +    /** Class logger */
 +    private static final Logger LOG = LoggerFactory
 +            .getLogger(SaslMessageToken.class);
 +
      /** Used for client or server's token to send or receive from each other. */
      private byte[] token;
  
@@@ -92,9 -82,8 +91,9 @@@
          if (token != null)
              payload_len = token.length;
  
- 
 -        bout.writeShort(identifier);
 -        bout.writeInt(payload_len);
 +        bout.writeShort(IDENTIFIER);
 +        bout.writeInt((int) payload_len);
++
          if (payload_len > 0) {
              bout.write(token);
          }

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyClient.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerHandler.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerHandler.java
index 15fe9fb,5ce90a3..2836e80
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerHandler.java
@@@ -121,9 -121,7 +119,8 @@@ public class SaslStormServerHandler ext
                  LOG.debug("Removing SaslServerHandler from pipeline since SASL "
                          + "authentication is complete.");
                  ctx.getPipeline().remove(this);
 +                server.authenticated(channel);
              }
-             return;
          } else {
              // Client should not be sending other-than-SASL messages before
              // SaslServerHandler has removed itself from the pipeline. Such

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/jvm/backtype/storm/messaging/netty/SaslUtils.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
index 719c84c,32c2bd7..5f23064
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
@@@ -17,20 -17,6 +17,21 @@@
   */
  package backtype.storm.messaging.netty;
  
 +import backtype.storm.Config;
++import backtype.storm.grouping.Load;
 +import backtype.storm.messaging.TaskMessage;
 +import backtype.storm.metric.api.IStatefulObject;
 +import backtype.storm.serialization.KryoValuesSerializer;
 +import backtype.storm.utils.Utils;
 +import org.jboss.netty.bootstrap.ServerBootstrap;
 +import org.jboss.netty.channel.Channel;
 +import org.jboss.netty.channel.ChannelFactory;
 +import org.jboss.netty.channel.group.ChannelGroup;
 +import org.jboss.netty.channel.group.DefaultChannelGroup;
 +import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
  import java.net.InetSocketAddress;
  import java.util.ArrayList;
  import java.util.Arrays;
@@@ -46,7 -32,23 +47,8 @@@ import java.util.concurrent.LinkedBlock
  import java.util.concurrent.ThreadFactory;
  import java.io.IOException;
  
 -import org.jboss.netty.bootstrap.ServerBootstrap;
 -import org.jboss.netty.channel.Channel;
 -import org.jboss.netty.channel.ChannelFactory;
 -import org.jboss.netty.channel.group.ChannelGroup;
 -import org.jboss.netty.channel.group.DefaultChannelGroup;
 -import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
 -
 -import org.slf4j.Logger;
 -import org.slf4j.LoggerFactory;
+ 
 -import backtype.storm.Config;
 -import backtype.storm.grouping.Load;
  import backtype.storm.messaging.ConnectionWithStatus;
 -import backtype.storm.messaging.TaskMessage;
 -import backtype.storm.metric.api.IStatefulObject;
 -import backtype.storm.serialization.KryoValuesSerializer;
 -import backtype.storm.utils.Utils;
  
  class Server extends ConnectionWithStatus implements IStatefulObject, ISaslServer {
  
@@@ -54,10 -56,10 +56,10 @@@
      @SuppressWarnings("rawtypes")
      Map storm_conf;
      int port;
-     private final ConcurrentHashMap<String, AtomicInteger> messagesEnqueued = new ConcurrentHashMap<String, AtomicInteger>();
+     private final ConcurrentHashMap<String, AtomicInteger> messagesEnqueued = new ConcurrentHashMap<>();
      private final AtomicInteger messagesDequeued = new AtomicInteger(0);
      private final AtomicInteger[] pendingMessages;
 -    
 +
      // Create multiple queues for incoming messages. The size equals the number of receiver threads.
      // For message which is sent to same task, it will be stored in the same queue to preserve the message order.
      private LinkedBlockingQueue<ArrayList<TaskMessage>>[] message_queue;
@@@ -79,18 -81,18 +81,18 @@@
          this.storm_conf = storm_conf;
          this.port = port;
          _ser = new KryoValuesSerializer(storm_conf);
 -        
 +
          queueCount = Utils.getInt(storm_conf.get(Config.WORKER_RECEIVER_THREAD_COUNT), 1);
          roundRobinQueueId = 0;
-         taskToQueueId = new HashMap<Integer, Integer>();
+         taskToQueueId = new HashMap<>();
 -    
 +
          message_queue = new LinkedBlockingQueue[queueCount];
          pendingMessages = new AtomicInteger[queueCount];
          for (int i = 0; i < queueCount; i++) {
-             message_queue[i] = new LinkedBlockingQueue<ArrayList<TaskMessage>>();
+             message_queue[i] = new LinkedBlockingQueue<>();
              pendingMessages[i] = new AtomicInteger(0);
          }
 -        
 +
          // Configure the server.
          int buffer_size = Utils.getInt(storm_conf.get(Config.STORM_MESSAGING_NETTY_BUFFER_SIZE));
          int backlog = Utils.getInt(storm_conf.get(Config.STORM_MESSAGING_NETTY_SOCKET_BACKLOG), 500);
@@@ -237,18 -236,17 +236,17 @@@
          }
          return null;
      }
 -   
 +
      /**
       * register a newly created channel
-      * @param channel
+      * @param channel newly created channel
       */
      protected void addChannel(Channel channel) {
          allChannels.add(channel);
      }
 -    
 +
      /**
-      * close a channel
-      * @param channel
+      * @param channel channel to close
       */
      public void closeChannel(Channel channel) {
          channel.close().awaitUninterruptibly();

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientHandler.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/messaging/netty/StormClientHandler.java
index 2c8eae9,877b6d8..696a2fc
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientHandler.java
@@@ -51,15 -51,31 +51,31 @@@ public class StormClientHandler extend
          //examine the response message from server
          Object message = event.getMessage();
          if (message instanceof ControlMessage) {
-           ControlMessage msg = (ControlMessage)message;
-           if (msg==ControlMessage.FAILURE_RESPONSE)
-               LOG.info("failure response:{}", msg);
- 
+             ControlMessage msg = (ControlMessage)message;
+             if (msg==ControlMessage.FAILURE_RESPONSE) {
+                 LOG.info("failure response:{}", msg);
+             }
+         } else if (message instanceof List) {
+             try {
+                 //This should be the metrics, and there should only be one of them
+                 List<TaskMessage> list = (List<TaskMessage>)message;
+                 if (list.size() < 1) throw new RuntimeException("Didn't see enough load metrics ("+client.getDstAddress()+") "+list);
+                 if (list.size() != 1) LOG.warn("Messages are not being delivered fast enough, got "+list.size()+" metrics messages at once("+client.getDstAddress()+")");
+                 TaskMessage tm = ((List<TaskMessage>)message).get(list.size() - 1);
+                 if (tm.task() != -1) throw new RuntimeException("Metrics messages are sent to the system task ("+client.getDstAddress()+") "+tm);
+                 List metrics = _des.deserialize(tm.message());
+                 if (metrics.size() < 1) throw new RuntimeException("No metrics data in the metrics message ("+client.getDstAddress()+") "+metrics);
+                 if (!(metrics.get(0) instanceof Map)) throw new RuntimeException("The metrics did not have a map in the first slot ("+client.getDstAddress()+") "+metrics);
+                 client.setLoadMetrics((Map<Integer, Double>)metrics.get(0));
+             } catch (IOException e) {
+                 throw new RuntimeException(e);
+             }
          } else {
-           throw new RuntimeException("Don't know how to handle a message of type "+message+" ("+client.getDstAddress()+")");
+             throw new RuntimeException("Don't know how to handle a message of type "
+                                        + message + " (" + client.getDstAddress() + ")");
          }
      }
 -
 +        
      @Override
      public void channelInterestChanged(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
          client.notifyInterestChanged(e.getChannel());

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
index cd79f4e,8062b4e..943199c
--- a/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
+++ b/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
@@@ -34,11 -32,7 +32,10 @@@ import java.net.URI
  import java.util.Collection;
  import java.util.Set;
  import java.util.HashSet;
 +import java.util.HashMap;
  import java.util.Map;
 +import java.util.SortedMap;
 +import java.util.TreeMap;
- import java.util.concurrent.ExecutorService;
  
  public class AuthUtils {
      private static final Logger LOG = LoggerFactory.getLogger(AuthUtils.class);
@@@ -77,39 -68,8 +74,39 @@@
      }
  
      /**
 +     * Pull a set of keys out of a Configuration.
 +     * @param configs_to_pull A set of config keys that you want the values of.
 +     * @param conf The config to pull the key/value pairs out of.
 +     * @param conf_entry The app configuration entry name to get stuff from.
 +     * @return Return a map of the configs in configs_to_pull to their values.
 +     */
 +    public static SortedMap<String, ?> PullConfig(Configuration conf,
 +                                            String conf_entry) throws IOException {
 +        if(conf == null) {
 +            return null;
 +        }
 +        AppConfigurationEntry configurationEntries[] = conf.getAppConfigurationEntry(conf_entry);
 +        if(configurationEntries == null) {
 +            String errorMessage = "Could not find a '" + conf_entry
 +                + "' entry in this configuration: Client cannot start.";
 +            throw new IOException(errorMessage);
 +        }
 +
 +        TreeMap<String, Object> results = new TreeMap<>();
 +        
 +
 +        for(AppConfigurationEntry entry: configurationEntries) {
 +            Map<String, ?> options = entry.getOptions();
 +            for(String key : options.keySet()) {
 +                results.put(key, options.get(key));
 +            }
 +        }
 +        return results;
 +    }
 +
 +    /**
       * Construct a principal to local plugin
-      * @param conf storm configuration
+      * @param storm_conf storm configuration
       * @return the plugin
       */
      public static IPrincipalToLocal GetPrincipalToLocalPlugin(Map storm_conf) {

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/jvm/backtype/storm/utils/Utils.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/py/storm/ttypes.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/src/storm.thrift
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/test/clj/org/apache/storm/pacemaker_state_factory_test.clj
----------------------------------------------------------------------
diff --cc storm-core/test/clj/org/apache/storm/pacemaker_state_factory_test.clj
index 5024922,0000000..813ae84
mode 100644,000000..100644
--- a/storm-core/test/clj/org/apache/storm/pacemaker_state_factory_test.clj
+++ b/storm-core/test/clj/org/apache/storm/pacemaker_state_factory_test.clj
@@@ -1,135 -1,0 +1,150 @@@
++;; Licensed to the Apache Software Foundation (ASF) under one
++;; or more contributor license agreements.  See the NOTICE file
++;; distributed with this work for additional information
++;; regarding copyright ownership.  The ASF licenses this file
++;; to you under the Apache License, Version 2.0 (the
++;; "License"); you may not use this file except in compliance
++;; with the License.  You may obtain a copy of the License at
++;;
++;; http://www.apache.org/licenses/LICENSE-2.0
++;;
++;; Unless required by applicable law or agreed to in writing, software
++;; distributed under the License is distributed on an "AS IS" BASIS,
++;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++;; See the License for the specific language governing permissions and
++;; limitations under the License.
 +(ns org.apache.storm.pacemaker-state-factory-test
 +  (:require [clojure.test :refer :all]
 +            [conjure.core :refer :all]
 +            [org.apache.storm.pacemaker [pacemaker-state-factory :as psf]])
 +  (:import [backtype.storm.generated
 +            HBExecutionException HBNodes HBRecords
 +            HBServerMessageType HBMessage HBMessageData HBPulse]
 +           [backtype.storm.cluster ClusterStateContext]
 +           [org.mockito Mockito Matchers]))
 +
 +(defn- string-to-bytes [string]
 +  (byte-array (map int string)))
 +
 +(defn- bytes-to-string [bytez]
 +  (apply str (map char bytez)))
 +
 +(defprotocol send-capture
 +  (send [this something])
 +  (check-captured [this]))
 +
 +(defn- make-send-capture [response]
 +  (let [captured (atom nil)]
 +    (reify send-capture
 +      (send [this something] (reset! captured something) response)
 +      (check-captured [this] @captured))))
 +
 +(defmacro with-mock-pacemaker-client-and-state [client state response & body]
 +  `(let [~client (make-send-capture ~response)]
 +     (stubbing [psf/makeZKState nil
 +                psf/makeClient ~client]
 +               (let [~state (psf/-mkState nil nil nil nil (ClusterStateContext.))]
 +                 ~@body))))
 +
 +
 +(deftest pacemaker_state_set_worker_hb
 +  (testing "set_worker_hb"
 +    (with-mock-pacemaker-client-and-state
 +      client state
 +      (HBMessage. HBServerMessageType/SEND_PULSE_RESPONSE nil)
 +
 +      (.set_worker_hb state "/foo" (string-to-bytes "data") nil)
 +      (let [sent (.check-captured client)
 +            pulse (.get_pulse (.get_data sent))]
 +        (is (= (.get_type sent) HBServerMessageType/SEND_PULSE))
 +        (is (= (.get_id pulse) "/foo"))
 +        (is (= (bytes-to-string (.get_details pulse)) "data")))))
 +
 +  (testing "set_worker_hb"
 +    (with-mock-pacemaker-client-and-state
 +      client state
 +      (HBMessage. HBServerMessageType/SEND_PULSE nil)
 +
 +      (is (thrown? HBExecutionException      
 +                   (.set_worker_hb state "/foo" (string-to-bytes "data") nil))))))
 +
 +      
 +
 +(deftest pacemaker_state_delete_worker_hb
 +  (testing "delete_worker_hb"
 +    (with-mock-pacemaker-client-and-state
 +      client state
 +      (HBMessage. HBServerMessageType/DELETE_PATH_RESPONSE nil)
 +
 +      (.delete_worker_hb state "/foo/bar")
 +      (let [sent (.check-captured client)]
 +        (is (= (.get_type sent) HBServerMessageType/DELETE_PATH))
 +        (is (= (.get_path (.get_data sent)) "/foo/bar")))))
 +
 +    (testing "delete_worker_hb"
 +      (with-mock-pacemaker-client-and-state
 +        client state
 +        (HBMessage. HBServerMessageType/DELETE_PATH nil)
 +        
 +        (is (thrown? HBExecutionException
 +                     (.delete_worker_hb state "/foo/bar"))))))
 +
 +(deftest pacemaker_state_get_worker_hb
 +  (testing "get_worker_hb"
 +    (with-mock-pacemaker-client-and-state
 +      client state
 +      (HBMessage. HBServerMessageType/GET_PULSE_RESPONSE
 +                (HBMessageData/pulse
 +                 (doto (HBPulse.)
 +                   (.set_id "/foo")
 +                   (.set_details (string-to-bytes "some data")))))
 +
 +      (.get_worker_hb state "/foo" false)
 +      (let [sent (.check-captured client)]
 +        (is (= (.get_type sent) HBServerMessageType/GET_PULSE))
 +        (is (= (.get_path (.get_data sent)) "/foo")))))
 +
 +  (testing "get_worker_hb - fail (bad response)"
 +    (with-mock-pacemaker-client-and-state
 +      client state
 +      (HBMessage. HBServerMessageType/GET_PULSE nil)
 +      
 +      (is (thrown? HBExecutionException
 +                   (.get_worker_hb state "/foo" false)))))
 +  
 +  (testing "get_worker_hb - fail (bad data)"
 +    (with-mock-pacemaker-client-and-state
 +      client state
 +      (HBMessage. HBServerMessageType/GET_PULSE_RESPONSE nil)
 +      
 +      (is (thrown? HBExecutionException
 +                   (.get_worker_hb state "/foo" false))))))
 +
 +(deftest pacemaker_state_get_worker_hb_children
 +  (testing "get_worker_hb_children"
 +    (with-mock-pacemaker-client-and-state
 +      client state
 +      (HBMessage. HBServerMessageType/GET_ALL_NODES_FOR_PATH_RESPONSE
 +                (HBMessageData/nodes
 +                 (HBNodes. [])))
 +
 +      (.get_worker_hb_children state "/foo" false)
 +      (let [sent (.check-captured client)]
 +        (is (= (.get_type sent) HBServerMessageType/GET_ALL_NODES_FOR_PATH))
 +        (is (= (.get_path (.get_data sent)) "/foo")))))
 +
 +  (testing "get_worker_hb_children - fail (bad response)"
 +    (with-mock-pacemaker-client-and-state
 +      client state
 +      (HBMessage. HBServerMessageType/DELETE_PATH nil)
 +
 +      (is (thrown? HBExecutionException
 +                   (.get_worker_hb_children state "/foo" false)))))
 +
 +    (testing "get_worker_hb_children - fail (bad data)"
 +    (with-mock-pacemaker-client-and-state
 +      client state
 +      (HBMessage. HBServerMessageType/GET_ALL_NODES_FOR_PATH_RESPONSE nil)
 +      
 +      (is (thrown? HBExecutionException
 +                   (.get_worker_hb_children state "/foo" false))))))

http://git-wip-us.apache.org/repos/asf/storm/blob/62d725a8/storm-core/test/clj/org/apache/storm/pacemaker_test.clj
----------------------------------------------------------------------
diff --cc storm-core/test/clj/org/apache/storm/pacemaker_test.clj
index ca7c693,0000000..4c5359f
mode 100644,000000..100644
--- a/storm-core/test/clj/org/apache/storm/pacemaker_test.clj
+++ b/storm-core/test/clj/org/apache/storm/pacemaker_test.clj
@@@ -1,227 -1,0 +1,242 @@@
++;; Licensed to the Apache Software Foundation (ASF) under one
++;; or more contributor license agreements.  See the NOTICE file
++;; distributed with this work for additional information
++;; regarding copyright ownership.  The ASF licenses this file
++;; to you under the Apache License, Version 2.0 (the
++;; "License"); you may not use this file except in compliance
++;; with the License.  You may obtain a copy of the License at
++;;
++;; http://www.apache.org/licenses/LICENSE-2.0
++;;
++;; Unless required by applicable law or agreed to in writing, software
++;; distributed under the License is distributed on an "AS IS" BASIS,
++;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++;; See the License for the specific language governing permissions and
++;; limitations under the License.
 +(ns org.apache.storm.pacemaker-test
 +  (:require [clojure.test :refer :all]
 +            [org.apache.storm.pacemaker [pacemaker :as pacemaker]]
 +            [conjure.core :as conjure])
 +  (:import [backtype.storm.generated
 +            HBExecutionException HBNodes HBRecords
 +            HBServerMessageType HBMessage HBMessageData HBPulse]))
 +
 +(defn- message-with-rand-id [type data]
 +  (let [mid (rand-int 1000)
 +        message (HBMessage. type data)]
 +    (.set_message_id message mid)
 +    [message mid]))
 +
 +(defn- string-to-bytes [string]
 +  (byte-array (map int string)))
 +
 +(defn- bytes-to-string [bytez]
 +  (apply str (map char bytez)))
 +
 +(defn- makenode [handler path]
 +  (.handleMessage handler
 +                  (HBMessage.
 +                   HBServerMessageType/SEND_PULSE
 +                   (HBMessageData/pulse
 +                    (doto (HBPulse.)
 +                      (.set_id path)
 +                      (.set_details (string-to-bytes "nothing")))))
 +                  true))
 +
 +(deftest pacemaker-server-create-path
 +  (conjure/stubbing
 +   [pacemaker/register nil]
 +   (let [handler (pacemaker/mk-handler {})]
 +     (testing "CREATE_PATH"
 +       (let [[message mid] (message-with-rand-id
 +                            HBServerMessageType/CREATE_PATH
 +                            (HBMessageData/path "/testpath"))
 +             response (.handleMessage handler message true)]
 +         (is (= (.get_message_id response) mid))
 +         (is (= (.get_type response) HBServerMessageType/CREATE_PATH_RESPONSE))
 +         (is (= (.get_data response) nil)))))))
 +
 +(deftest pacemaker-server-exists
 +  (conjure/stubbing
 +   [pacemaker/register nil]
 +   (let [handler (pacemaker/mk-handler {})]
 +     (testing "EXISTS - false"
 +       (let [[message mid] (message-with-rand-id HBServerMessageType/EXISTS
 +                                                 (HBMessageData/path "/testpath"))
 +             bad-response (.handleMessage handler message false)
 +             good-response (.handleMessage handler message true)]
 +         (is (= (.get_message_id bad-response) mid))
 +         (is (= (.get_type bad-response) HBServerMessageType/NOT_AUTHORIZED))
 +
 +         (is (= (.get_message_id good-response) mid))
 +         (is (= (.get_type good-response) HBServerMessageType/EXISTS_RESPONSE))
 +         (is (= (.get_boolval (.get_data good-response)) false))))
 +
 +     (testing "EXISTS - true"
 +       (let [path "/exists_path"
 +             data-string "pulse data"]
 +         (let [[send _] (message-with-rand-id
 +                         HBServerMessageType/SEND_PULSE
 +                         (HBMessageData/pulse
 +                          (doto (HBPulse.)
 +                            (.set_id path)
 +                            (.set_details (string-to-bytes data-string)))))
 +               _ (.handleMessage handler send true)
 +               [message mid] (message-with-rand-id HBServerMessageType/EXISTS
 +                                                   (HBMessageData/path path))
 +               bad-response (.handleMessage handler message false)
 +               good-response (.handleMessage handler message true)]
 +           (is (= (.get_message_id bad-response) mid))
 +           (is (= (.get_type bad-response) HBServerMessageType/NOT_AUTHORIZED))
 +
 +           (is (= (.get_message_id good-response) mid))
 +           (is (= (.get_type good-response) HBServerMessageType/EXISTS_RESPONSE))
 +           (is (= (.get_boolval (.get_data good-response)) true))))))))
 +
 +(deftest pacemaker-server-send-pulse-get-pulse
 +  (conjure/stubbing
 +   [pacemaker/register nil]
 +   (let [handler (pacemaker/mk-handler {})]
 +     (testing "SEND_PULSE - GET_PULSE"
 +       (let [path "/pulsepath"
 +             data-string "pulse data"]
 +         (let [[message mid] (message-with-rand-id
 +                              HBServerMessageType/SEND_PULSE
 +                              (HBMessageData/pulse
 +                               (doto (HBPulse.)
 +                                 (.set_id path)
 +                                 (.set_details (string-to-bytes data-string)))))
 +               response (.handleMessage handler message true)]
 +           (is (= (.get_message_id response) mid))
 +           (is (= (.get_type response) HBServerMessageType/SEND_PULSE_RESPONSE))
 +           (is (= (.get_data response) nil)))
 +         (let [[message mid] (message-with-rand-id
 +                              HBServerMessageType/GET_PULSE
 +                              (HBMessageData/path path))
 +               response (.handleMessage handler message true)]
 +           (is (= (.get_message_id response) mid))
 +           (is (= (.get_type response) HBServerMessageType/GET_PULSE_RESPONSE))
 +           (is (= (bytes-to-string (.get_details (.get_pulse (.get_data response)))) data-string))))))))
 +
 +(deftest pacemaker-server-get-all-pulse-for-path
 +  (conjure/stubbing
 +   [pacemaker/register nil]
 +   (let [handler (pacemaker/mk-handler {})]
 +     (testing "GET_ALL_PULSE_FOR_PATH"
 +       (let [[message mid] (message-with-rand-id HBServerMessageType/GET_ALL_PULSE_FOR_PATH
 +                                                 (HBMessageData/path "/testpath"))
 +             bad-response (.handleMessage handler message false)
 +             good-response (.handleMessage handler message true)]
 +         (is (= (.get_message_id bad-response) mid))
 +         (is (= (.get_type bad-response) HBServerMessageType/NOT_AUTHORIZED))
 +
 +         (is (= (.get_message_id good-response) mid))
 +         (is (= (.get_type good-response) HBServerMessageType/GET_ALL_PULSE_FOR_PATH_RESPONSE))
 +         (is (= (.get_data good-response) nil)))))))
 +
 +(deftest pacemaker-server-get-all-nodes-for-path
 +  (conjure/stubbing
 +   [pacemaker/register nil]
 +   (let [handler (pacemaker/mk-handler {})]
 +     (testing "GET_ALL_NODES_FOR_PATH"
 +       (makenode handler "/some-root-path/foo")
 +       (makenode handler "/some-root-path/bar")
 +       (makenode handler "/some-root-path/baz")
 +       (makenode handler "/some-root-path/boo")
 +       (let [[message mid] (message-with-rand-id HBServerMessageType/GET_ALL_NODES_FOR_PATH
 +                                                 (HBMessageData/path "/some-root-path"))
 +             bad-response (.handleMessage handler message false)
 +             good-response (.handleMessage handler message true)
 +             ids (into #{} (.get_pulseIds (.get_nodes (.get_data good-response))))]
 +         (is (= (.get_message_id bad-response) mid))
 +         (is (= (.get_type bad-response) HBServerMessageType/NOT_AUTHORIZED))
 +
 +         (is (= (.get_message_id good-response) mid))
 +         (is (= (.get_type good-response) HBServerMessageType/GET_ALL_NODES_FOR_PATH_RESPONSE))
 +         (is (contains? ids "foo"))
 +         (is (contains? ids "bar"))
 +         (is (contains? ids "baz"))
 +         (is (contains? ids "boo")))
 +
 +       (makenode handler "/some/deeper/path/foo")
 +       (makenode handler "/some/deeper/path/bar")
 +       (makenode handler "/some/deeper/path/baz")
 +       (let [[message mid] (message-with-rand-id HBServerMessageType/GET_ALL_NODES_FOR_PATH
 +                                                 (HBMessageData/path "/some/deeper/path"))
 +             bad-response (.handleMessage handler message false)
 +             good-response (.handleMessage handler message true)
 +             ids (into #{} (.get_pulseIds (.get_nodes (.get_data good-response))))]
 +         (is (= (.get_message_id bad-response) mid))
 +         (is (= (.get_type bad-response) HBServerMessageType/NOT_AUTHORIZED))
 +
 +         (is (= (.get_message_id good-response) mid))
 +         (is (= (.get_type good-response) HBServerMessageType/GET_ALL_NODES_FOR_PATH_RESPONSE))
 +         (is (contains? ids "foo"))
 +         (is (contains? ids "bar"))
 +         (is (contains? ids "baz")))))))
 +
 +(deftest pacemaker-server-get-pulse
 +  (conjure/stubbing
 +   [pacemaker/register nil]
 +   (let [handler (pacemaker/mk-handler {})]
 +     (testing "GET_PULSE"
 +       (makenode handler "/some-root/GET_PULSE")
 +       (let [[message mid] (message-with-rand-id HBServerMessageType/GET_PULSE
 +                                                 (HBMessageData/path "/some-root/GET_PULSE"))
 +             bad-response (.handleMessage handler message false)
 +             good-response (.handleMessage handler message true)
 +             good-pulse (.get_pulse (.get_data good-response))]
 +         (is (= (.get_message_id bad-response) mid))
 +         (is (= (.get_type bad-response) HBServerMessageType/NOT_AUTHORIZED))
 +         (is (= (.get_data bad-response) nil))
 +
 +         (is (= (.get_message_id good-response) mid))
 +         (is (= (.get_type good-response) HBServerMessageType/GET_PULSE_RESPONSE))
 +         (is (= (.get_id good-pulse) "/some-root/GET_PULSE"))
 +         (is (= (bytes-to-string (.get_details good-pulse)) "nothing")))))))
 +
 +(deftest pacemaker-server-delete-path
 +  (conjure/stubbing
 +   [pacemaker/register nil]
 +   (let [handler (pacemaker/mk-handler {})]
 +     (testing "DELETE_PATH"
 +       (makenode handler "/some-root/DELETE_PATH/foo")
 +       (makenode handler "/some-root/DELETE_PATH/bar")
 +       (makenode handler "/some-root/DELETE_PATH/baz")
 +       (makenode handler "/some-root/DELETE_PATH/boo")
 +       (let [[message mid] (message-with-rand-id HBServerMessageType/DELETE_PATH
 +                                                 (HBMessageData/path "/some-root/DELETE_PATH"))
 +             response (.handleMessage handler message true)]
 +         (is (= (.get_message_id response) mid))
 +         (is (= (.get_type response) HBServerMessageType/DELETE_PATH_RESPONSE))
 +         (is (= (.get_data response) nil)))
 +       (let [[message mid] (message-with-rand-id HBServerMessageType/GET_ALL_NODES_FOR_PATH
 +                                                 (HBMessageData/path "/some-root/DELETE_PATH"))
 +             response (.handleMessage handler message true)
 +             ids (into #{} (.get_pulseIds (.get_nodes (.get_data response))))]
 +         (is (= (.get_message_id response) mid))
 +         (is (= (.get_type response) HBServerMessageType/GET_ALL_NODES_FOR_PATH_RESPONSE))
 +         (is (empty? ids)))))))
 +
 +(deftest pacemaker-server-delete-pulse-id
 +  (conjure/stubbing
 +   [pacemaker/register nil]
 +   (let [handler (pacemaker/mk-handler {})]
 +     (testing "DELETE_PULSE_ID"
 +       (makenode handler "/some-root/DELETE_PULSE_ID/foo")
 +       (makenode handler "/some-root/DELETE_PULSE_ID/bar")
 +       (makenode handler "/some-root/DELETE_PULSE_ID/baz")
 +       (makenode handler "/some-root/DELETE_PULSE_ID/boo")
 +       (let [[message mid] (message-with-rand-id HBServerMessageType/DELETE_PULSE_ID
 +                                                 (HBMessageData/path "/some-root/DELETE_PULSE_ID/foo"))
 +             response (.handleMessage handler message true)]
 +         (is (= (.get_message_id response) mid))
 +         (is (= (.get_type response) HBServerMessageType/DELETE_PULSE_ID_RESPONSE))
 +         (is (= (.get_data response) nil)))
 +       (let [[message mid] (message-with-rand-id HBServerMessageType/GET_ALL_NODES_FOR_PATH
 +                                                 (HBMessageData/path "/some-root/DELETE_PULSE_ID"))
 +             response (.handleMessage handler message true)
 +             ids (into #{} (.get_pulseIds (.get_nodes (.get_data response))))]
 +         (is (= (.get_message_id response) mid))
 +         (is (= (.get_type response) HBServerMessageType/GET_ALL_NODES_FOR_PATH_RESPONSE))
 +         (is (not (contains? ids "foo"))))))))


[31/37] storm git commit: Addressing PR comments.

Posted by kn...@apache.org.
Addressing PR comments.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/aee48646
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/aee48646
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/aee48646

Branch: refs/heads/master
Commit: aee486467788de1a60d8d0194719829c2e313d14
Parents: 711e36f
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Fri Nov 20 17:16:52 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Fri Nov 20 17:16:52 2015 -0600

----------------------------------------------------------------------
 bin/storm.py                                    |  3 +-
 .../cluster_state/zookeeper_state_factory.clj   |  4 +-
 storm-core/src/clj/backtype/storm/util.clj      |  8 +--
 .../org/apache/storm/pacemaker/pacemaker.clj    | 62 ++++++++--------
 .../storm/pacemaker/pacemaker_state_factory.clj |  4 +-
 .../backtype/storm/cluster/ClusterState.java    |  4 +-
 .../storm/cluster/ClusterStateFactory.java      |  2 +-
 .../backtype/storm/messaging/netty/Client.java  | 74 ++++++++++----------
 .../storm/messaging/netty/ControlMessage.java   |  2 +-
 .../messaging/netty/INettySerializable.java     |  4 +-
 .../netty/KerberosSaslNettyClient.java          |  1 -
 .../netty/KerberosSaslNettyServer.java          | 17 +----
 .../netty/KerberosSaslServerHandler.java        | 14 ++--
 .../storm/messaging/netty/SaslMessageToken.java | 11 ++-
 .../storm/messaging/netty/SaslNettyServer.java  |  1 -
 .../messaging/netty/SaslStormServerHandler.java |  4 +-
 .../backtype/storm/messaging/netty/Server.java  | 36 ++++------
 .../messaging/netty/StormClientHandler.java     |  2 +-
 .../messaging/netty/StormServerHandler.java     |  4 +-
 .../backtype/storm/security/auth/AuthUtils.java | 22 ++++--
 .../apache/storm/pacemaker/PacemakerClient.java | 31 ++++----
 .../apache/storm/pacemaker/PacemakerServer.java | 25 ++++---
 .../storm/pacemaker/codec/ThriftDecoder.java    |  5 +-
 .../storm/pacemaker/codec/ThriftEncoder.java    |  6 +-
 .../clj/org/apache/storm/pacemaker_test.clj     |  4 +-
 25 files changed, 173 insertions(+), 177 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/bin/storm.py
----------------------------------------------------------------------
diff --git a/bin/storm.py b/bin/storm.py
index 497691d..52e5843 100755
--- a/bin/storm.py
+++ b/bin/storm.py
@@ -266,7 +266,8 @@ def upload_credentials(*args):
 def heartbeats(*args):
     """Syntax: [storm heartbeats [cmd]]
 
-    list [KEY...] - lists heartbeats nodes under KEY currently in pacemaker.
+    list PATH - lists heartbeats nodes under PATH currently in the ClusterState.
+    get  PATH - Get the heartbeat data at PATH
     """
     exec_storm_class(
         "backtype.storm.command.heartbeats",

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/clj/backtype/storm/cluster_state/zookeeper_state_factory.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/cluster_state/zookeeper_state_factory.clj b/storm-core/src/clj/backtype/storm/cluster_state/zookeeper_state_factory.clj
index ca45ec4..ff942db 100644
--- a/storm-core/src/clj/backtype/storm/cluster_state/zookeeper_state_factory.clj
+++ b/storm-core/src/clj/backtype/storm/cluster_state/zookeeper_state_factory.clj
@@ -16,8 +16,8 @@
 
 (ns backtype.storm.cluster-state.zookeeper-state-factory
   (:import [org.apache.curator.framework.state ConnectionStateListener])
-  (:import [org.apache.zookeeper KeeperException KeeperException$NoNodeException ZooDefs ZooDefs$Ids ZooDefs$Perms]
-           [backtype.storm.cluster ClusterState ClusterStateContext DaemonType])
+  (:import [org.apache.zookeeper KeeperException$NoNodeException]
+           [backtype.storm.cluster ClusterState DaemonType])
   (:use [backtype.storm cluster config log util])
   (:require [backtype.storm [zookeeper :as zk]])
   (:gen-class

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/clj/backtype/storm/util.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/util.clj b/storm-core/src/clj/backtype/storm/util.clj
index 0b21309..0fab3e7 100644
--- a/storm-core/src/clj/backtype/storm/util.clj
+++ b/storm-core/src/clj/backtype/storm/util.clj
@@ -1064,16 +1064,16 @@
 
 (defn retry-on-exception
   "Retries specific function on exception based on retries count"
-  [tries task-description f & args]
+  [retries task-description f & args]
   (let [res (try {:value (apply f args)}
               (catch Exception e
-                (if (= 0 tries)
+                (if (= 0 retries)
                   (throw e)
                   {:exception e})))]
     (if (:exception res)
       (do 
-        (log-error (:exception res) (str "Failed to " task-description ". Will make [" tries "] more attempts."))
-        (recur (dec tries) task-description f args))
+        (log-error (:exception res) (str "Failed to " task-description ". Will make [" retries "] more attempts."))
+        (recur (dec retries) task-description f args))
       (do 
         (log-debug (str "Successful " task-description "."))
         (:value res)))))

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj b/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
index cd48eb4..588f6aa 100644
--- a/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
+++ b/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
@@ -16,11 +16,9 @@
 
 (ns org.apache.storm.pacemaker.pacemaker
   (:import [org.apache.storm.pacemaker PacemakerServer IServerMessageHandler]
-           [java.util.concurrent ConcurrentHashMap ThreadPoolExecutor TimeUnit LinkedBlockingDeque]
+           [java.util.concurrent ConcurrentHashMap]
            [java.util.concurrent.atomic AtomicInteger]
-           [java.util Date]
-           [backtype.storm.generated
-            HBAuthorizationException HBExecutionException HBNodes HBRecords
+           [backtype.storm.generated HBNodes
             HBServerMessageType HBMessage HBMessageData HBPulse])
   (:use [clojure.string :only [replace-first split]]
         [backtype.storm log config util])
@@ -64,41 +62,41 @@
 
 (defn- report-stats [heartbeats stats last-five-s]
   (loop []
-      (let [send-count (.getAndSet (:send-pulse-count stats) 0)
-            received-size (.getAndSet (:total-received-size stats) 0)
-            get-count (.getAndSet (:get-pulse-count stats) 0)
-            sent-size (.getAndSet (:total-sent-size stats) 0)
-            largest (.getAndSet (:largest-heartbeat-size stats) 0)
-            average (.getAndSet (:average-heartbeat-size stats) 0)
-            total-keys (.size heartbeats)]
-        (log-debug "\nReceived " send-count " heartbeats totaling " received-size " bytes,\n"
-                   "Sent " get-count " heartbeats totaling " sent-size " bytes,\n"
-                   "The largest heartbeat was " largest " bytes,\n"
-                   "The average heartbeat was " average " bytes,\n"
-                   "Pacemaker contained " total-keys " total keys\n"
-                   "in the last " sleep-seconds " second(s)")
-        (dosync (ref-set last-five-s
-                         {:send-pulse-count send-count
-                          :total-received-size received-size
-                          :get-pulse-count get-count
-                          :total-sent-size sent-size
-                          :largest-heartbeat-size largest
-                          :average-heartbeat-size average
-                          :total-keys total-keys})))
-      (Thread/sleep (* 1000 sleep-seconds))
-      (recur)))
+    (let [send-count (.getAndSet (:send-pulse-count stats) 0)
+          received-size (.getAndSet (:total-received-size stats) 0)
+          get-count (.getAndSet (:get-pulse-count stats) 0)
+          sent-size (.getAndSet (:total-sent-size stats) 0)
+          largest (.getAndSet (:largest-heartbeat-size stats) 0)
+          average (.getAndSet (:average-heartbeat-size stats) 0)
+          total-keys (.size heartbeats)]
+      (log-debug "\nReceived " send-count " heartbeats totaling " received-size " bytes,\n"
+                 "Sent " get-count " heartbeats totaling " sent-size " bytes,\n"
+                 "The largest heartbeat was " largest " bytes,\n"
+                 "The average heartbeat was " average " bytes,\n"
+                 "Pacemaker contained " total-keys " total keys\n"
+                 "in the last " sleep-seconds " second(s)")
+      (dosync (ref-set last-five-s
+                       {:send-pulse-count send-count
+                        :total-received-size received-size
+                        :get-pulse-count get-count
+                        :total-sent-size sent-size
+                        :largest-heartbeat-size largest
+                        :average-heartbeat-size average
+                        :total-keys total-keys})))
+    (Thread/sleep (* 1000 sleep-seconds))
+    (recur)))
 
 ;; JMX stuff
 (defn register [last-five-s]
   (jmx/register-mbean
-   (jmx/create-bean
-    last-five-s)
-   "org.apache.storm.pacemaker.pacemaker:stats=Stats_Last_5_Seconds"))
+    (jmx/create-bean
+      last-five-s)
+    "org.apache.storm.pacemaker.pacemaker:stats=Stats_Last_5_Seconds"))
 
 
 ;; Pacemaker Functions
 
-(defn hb-data [conf]
+(defn hb-data []
   (ConcurrentHashMap.))
 
 (defn create-path [^String path heartbeats]
@@ -168,7 +166,7 @@
   (HBMessage. HBServerMessageType/NOT_AUTHORIZED nil))
 
 (defn mk-handler [conf]
-  (let [heartbeats ^ConcurrentHashMap (hb-data conf)
+  (let [heartbeats ^ConcurrentHashMap (hb-data)
         pacemaker-stats {:send-pulse-count (AtomicInteger.)
                          :total-received-size (AtomicInteger.)
                          :get-pulse-count (AtomicInteger.)

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker_state_factory.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker_state_factory.clj b/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker_state_factory.clj
index 9dc7809..537397a 100644
--- a/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker_state_factory.clj
+++ b/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker_state_factory.clj
@@ -23,8 +23,8 @@
              [log :refer :all]
              [util :as util]])
   (:import [backtype.storm.generated
-            HBExecutionException HBNodes HBRecords
-            HBServerMessageType HBMessage HBMessageData HBPulse]
+            HBExecutionException HBServerMessageType HBMessage
+            HBMessageData HBPulse]
            [backtype.storm.cluster_state zookeeper_state_factory]
            [backtype.storm.cluster ClusterState]
            [org.apache.storm.pacemaker PacemakerClient])

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java b/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
index e0ddc06..bca085f 100644
--- a/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
+++ b/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
@@ -89,7 +89,7 @@ public interface ClusterState {
      * @param acls The acls to apply to the path. May be null.
      * @return path
      */
-    void set_ephemeral_node(String path, byte[] data, List<ACL> acls);
+    String set_ephemeral_node(String path, byte[] data, List<ACL> acls);
 
     /**
      * Gets the 'version' of the node at a path. Optionally sets a watch
@@ -203,7 +203,7 @@ public interface ClusterState {
     /**
      * Force consistency on a path. Any writes committed on the path before
      * this call will be completely propagated when it returns.
-     * @param The path to synchronize.
+     * @param path The path to synchronize.
      */
     void sync_path(String path);
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/cluster/ClusterStateFactory.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/cluster/ClusterStateFactory.java b/storm-core/src/jvm/backtype/storm/cluster/ClusterStateFactory.java
index d33646b..1f946ee 100644
--- a/storm-core/src/jvm/backtype/storm/cluster/ClusterStateFactory.java
+++ b/storm-core/src/jvm/backtype/storm/cluster/ClusterStateFactory.java
@@ -23,6 +23,6 @@ import org.apache.zookeeper.data.ACL;
 
 public interface ClusterStateFactory {
     
-    public ClusterState mkState(APersistentMap config, APersistentMap auth_conf, List<ACL> acls, ClusterStateContext context);
+    ClusterState mkState(APersistentMap config, APersistentMap auth_conf, List<ACL> acls, ClusterStateContext context);
 
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java b/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java
index 23d8124..4f813ba 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java
@@ -243,7 +243,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
         if (closing) {
             int numMessages = iteratorSize(msgs);
             LOG.error("discarding {} messages because the Netty client to {} is being closed", numMessages,
-                      dstAddressPrefixedName);
+                    dstAddressPrefixedName);
             return;
         }
 
@@ -347,20 +347,20 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
 
         ChannelFuture future = channel.write(batch);
         future.addListener(new ChannelFutureListener() {
-                public void operationComplete(ChannelFuture future) throws Exception {
-                    pendingMessages.addAndGet(0 - numMessages);
-                    if (future.isSuccess()) {
-                        LOG.debug("sent {} messages to {}", numMessages, dstAddressPrefixedName);
-                        messagesSent.getAndAdd(batch.size());
-                    } else {
-                        LOG.error("failed to send {} messages to {}: {}", numMessages, dstAddressPrefixedName,
-                                  future.getCause());
-                        closeChannelAndReconnect(future.getChannel());
-                        messagesLost.getAndAdd(numMessages);
-                    }
+            public void operationComplete(ChannelFuture future) throws Exception {
+                pendingMessages.addAndGet(0 - numMessages);
+                if (future.isSuccess()) {
+                    LOG.debug("sent {} messages to {}", numMessages, dstAddressPrefixedName);
+                    messagesSent.getAndAdd(batch.size());
+                } else {
+                    LOG.error("failed to send {} messages to {}: {}", numMessages, dstAddressPrefixedName,
+                            future.getCause());
+                    closeChannelAndReconnect(future.getChannel());
+                    messagesLost.getAndAdd(numMessages);
                 }
+            }
 
-            });
+        });
     }
 
     /**
@@ -397,7 +397,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
 
     private void waitForPendingMessagesToBeSent() {
         LOG.info("waiting up to {} ms to send {} pending messages to {}",
-                 PENDING_MESSAGES_FLUSH_TIMEOUT_MS, pendingMessages.get(), dstAddressPrefixedName);
+                PENDING_MESSAGES_FLUSH_TIMEOUT_MS, pendingMessages.get(), dstAddressPrefixedName);
         long totalPendingMsgs = pendingMessages.get();
         long startMs = System.currentTimeMillis();
         while (pendingMessages.get() != 0) {
@@ -405,7 +405,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
                 long deltaMs = System.currentTimeMillis() - startMs;
                 if (deltaMs > PENDING_MESSAGES_FLUSH_TIMEOUT_MS) {
                     LOG.error("failed to send all pending messages to {} within timeout, {} of {} messages were not " +
-                              "sent", dstAddressPrefixedName, pendingMessages.get(), totalPendingMsgs);
+                            "sent", dstAddressPrefixedName, pendingMessages.get(), totalPendingMsgs);
                     break;
                 }
                 Thread.sleep(PENDING_MESSAGES_FLUSH_INTERVAL_MS);
@@ -528,7 +528,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
 
         private void reschedule(Throwable t) {
             String baseMsg = String.format("connection attempt %s to %s failed", connectionAttempts,
-                                           dstAddressPrefixedName);
+                    dstAddressPrefixedName);
             String failureMsg = (t == null) ? baseMsg : baseMsg + ": " + t.toString();
             LOG.error(failureMsg);
             long nextDelayMs = retryPolicy.getSleepTimeMs(connectionAttempts.get(), 0);
@@ -545,32 +545,32 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
                 LOG.debug("connecting to {} [attempt {}]", address.toString(), connectionAttempt);
                 ChannelFuture future = bootstrap.connect(address);
                 future.addListener(new ChannelFutureListener() {
-                        @Override
-                        public void operationComplete(ChannelFuture future) throws Exception {
-                            // This call returns immediately
-                            Channel newChannel = future.getChannel();
-
-                            if (future.isSuccess() && connectionEstablished(newChannel)) {
-                                boolean setChannel = channelRef.compareAndSet(null, newChannel);
-                                checkState(setChannel);
-                                LOG.debug("successfully connected to {}, {} [attempt {}]", address.toString(), newChannel.toString(),
-                                          connectionAttempt);
-                                if (messagesLost.get() > 0) {
-                                    LOG.warn("Re-connection to {} was successful but {} messages has been lost so far", address.toString(), messagesLost.get());
-                                }
-                            } else {
-                                Throwable cause = future.getCause();
-                                reschedule(cause);
-                                if (newChannel != null) {
-                                    newChannel.close();
-                                }
+                    @Override
+                    public void operationComplete(ChannelFuture future) throws Exception {
+                        // This call returns immediately
+                        Channel newChannel = future.getChannel();
+
+                        if (future.isSuccess() && connectionEstablished(newChannel)) {
+                            boolean setChannel = channelRef.compareAndSet(null, newChannel);
+                            checkState(setChannel);
+                            LOG.debug("successfully connected to {}, {} [attempt {}]", address.toString(), newChannel.toString(),
+                                    connectionAttempt);
+                            if (messagesLost.get() > 0) {
+                                LOG.warn("Re-connection to {} was successful but {} messages has been lost so far", address.toString(), messagesLost.get());
+                            }
+                        } else {
+                            Throwable cause = future.getCause();
+                            reschedule(cause);
+                            if (newChannel != null) {
+                                newChannel.close();
                             }
                         }
-                    });
+                    }
+                });
             } else {
                 close();
                 throw new RuntimeException("Giving up to scheduleConnect to " + dstAddressPrefixedName + " after " +
-                                           connectionAttempts + " failed attempts. " + messagesLost.get() + " messages were lost");
+                        connectionAttempts + " failed attempts. " + messagesLost.get() + " messages were lost");
 
             }
         }

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/messaging/netty/ControlMessage.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/ControlMessage.java b/storm-core/src/jvm/backtype/storm/messaging/netty/ControlMessage.java
index 769d010..7f60078 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/ControlMessage.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/ControlMessage.java
@@ -55,7 +55,7 @@ public enum ControlMessage implements INettySerializable {
     
     /**
      * encode the current Control Message into a channel buffer
-     * @throws Exception
+     * @throws IOException
      */
     public ChannelBuffer buffer() throws IOException {
         ChannelBufferOutputStream bout = new ChannelBufferOutputStream(ChannelBuffers.directBuffer(encodeLength()));      

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/messaging/netty/INettySerializable.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/INettySerializable.java b/storm-core/src/jvm/backtype/storm/messaging/netty/INettySerializable.java
index 945e6e9..6cc6de2 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/INettySerializable.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/INettySerializable.java
@@ -21,6 +21,6 @@ import java.io.IOException;
 import org.jboss.netty.buffer.ChannelBuffer;
 
 public interface INettySerializable {
-    public ChannelBuffer buffer() throws IOException;
-    public int encodeLength();
+    ChannelBuffer buffer() throws IOException;
+    int encodeLength();
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
index 1295394..90dc4b6 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
@@ -113,7 +113,6 @@ public class KerberosSaslNettyClient {
         try {
             Principal principal = (Principal)subject.getPrincipals().toArray()[0];
             final String fPrincipalName = principal.getName();
-            KerberosName kerbName = new KerberosName(principal.getName());
             final String fHost = (String)storm_conf.get(Config.PACEMAKER_HOST);
             final String fServiceName = serviceName;
             final CallbackHandler fch = ch;

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServer.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServer.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServer.java
index a935608..a16c01b 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServer.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServer.java
@@ -17,7 +17,6 @@
  */
 package backtype.storm.messaging.netty;
 
-import backtype.storm.Config;
 import backtype.storm.security.auth.AuthUtils;
 import backtype.storm.security.auth.KerberosPrincipalToLocal;
 import java.io.IOException;
@@ -53,7 +52,6 @@ class KerberosSaslNettyServer {
 
     private SaslServer saslServer;
     private Subject subject;
-    private String jaas_section;
     private List<String> authorizedUsers;
 
     KerberosSaslNettyServer(Map storm_conf, String jaas_section, List<String> authorizedUsers) {
@@ -70,7 +68,7 @@ class KerberosSaslNettyServer {
 
         LOG.debug("KerberosSaslNettyServer: authmethod {}", SaslUtils.KERBEROS);
 
-        KerberosSaslCallbackHandler ch = new KerberosSaslNettyServer.KerberosSaslCallbackHandler(storm_conf, authorizedUsers);
+        KerberosSaslCallbackHandler ch = new KerberosSaslNettyServer.KerberosSaslCallbackHandler(authorizedUsers);
 
         //login our principal
         subject = null;
@@ -138,25 +136,14 @@ class KerberosSaslNettyServer {
         return saslServer.getAuthorizationID();
     }
 
-    private String getPrincipal(Subject subject) {
-        Set<Principal> principals = (Set<Principal>)subject.getPrincipals();
-        if (principals==null || principals.size()<1) {
-            LOG.info("No principal found in login subject");
-            return null;
-        }
-        return ((Principal)(principals.toArray()[0])).getName();
-    }
-
     /** CallbackHandler for SASL DIGEST-MD5 mechanism */
     public static class KerberosSaslCallbackHandler implements CallbackHandler {
 
         /** Used to authenticate the clients */
-        private Map config;
         private List<String> authorizedUsers;
 
-        public KerberosSaslCallbackHandler(Map config, List<String> authorizedUsers) {
+        public KerberosSaslCallbackHandler(List<String> authorizedUsers) {
             LOG.debug("KerberosSaslCallback: Creating KerberosSaslCallback handler.");
-            this.config = config;
             this.authorizedUsers = authorizedUsers;
         }
 

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslServerHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslServerHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslServerHandler.java
index e4a6e29..c31da17 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslServerHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslServerHandler.java
@@ -51,8 +51,9 @@ public class KerberosSaslServerHandler extends SimpleChannelUpstreamHandler {
     public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)
         throws Exception {
         Object msg = e.getMessage();
-        if (msg == null)
+        if (msg == null) {
             return;
+        }
 
         Channel channel = ctx.getChannel();
 
@@ -70,14 +71,13 @@ public class KerberosSaslServerHandler extends SimpleChannelUpstreamHandler {
                     LOG.debug("No saslNettyServer for {}  yet; creating now, with topology token: ", channel);
                     try {
                         saslNettyServer = new KerberosSaslNettyServer(storm_conf, jaas_section, authorizedUsers);
+                        KerberosSaslNettyServerState.getKerberosSaslNettyServer.set(channel,
+                                                                                    saslNettyServer);
                     } catch (RuntimeException ioe) {
                         LOG.error("Error occurred while creating saslNettyServer on server {} for client {}",
                                   channel.getLocalAddress(), channel.getRemoteAddress());
-                        saslNettyServer = null;
+                        throw ioe;
                     }
-
-                    KerberosSaslNettyServerState.getKerberosSaslNettyServer.set(channel,
-                                                                                saslNettyServer);
                 } else {
                     LOG.debug("Found existing saslNettyServer on server: {} for client {}",
                               channel.getLocalAddress(), channel.getRemoteAddress());
@@ -125,7 +125,9 @@ public class KerberosSaslServerHandler extends SimpleChannelUpstreamHandler {
 
     @Override
     public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) {
-        if(server != null) server.closeChannel(e.getChannel());
+        if(server != null) {
+            server.closeChannel(e.getChannel());
+        }
     }
 
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/messaging/netty/SaslMessageToken.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslMessageToken.java b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslMessageToken.java
index 70e7089..8a133d1 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslMessageToken.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslMessageToken.java
@@ -72,17 +72,16 @@ public class SaslMessageToken implements INettySerializable {
         this.token = token;
     }
 
-
     public int encodeLength() {
         return 2 + 4 + token.length;
     }
 
     /**
      * encode the current SaslToken Message into a channel buffer
-     * SaslTokenMessageRequest is encoded as: identifier .... short(2) always it
-     * is -500 payload length .... int payload .... byte[]
+     * SaslTokenMessageRequest is encoded as: identifier .... short(2)
+     * payload length .... int payload .... byte[]
      * 
-     * @throws Exception
+     * @throws IOException
      */
     public ChannelBuffer buffer() throws IOException {
         ChannelBufferOutputStream bout = new ChannelBufferOutputStream(
@@ -92,7 +91,7 @@ public class SaslMessageToken implements INettySerializable {
             payload_len = token.length;
 
         bout.writeShort(IDENTIFIER);
-        bout.writeInt((int) payload_len);
+        bout.writeInt(payload_len);
 
         if (payload_len > 0) {
             bout.write(token);
@@ -105,7 +104,7 @@ public class SaslMessageToken implements INettySerializable {
         ChannelBuffer sm_buffer = ChannelBuffers.copiedBuffer(serial);
         short identifier = sm_buffer.readShort();
         int payload_len = sm_buffer.readInt();
-        if(identifier != -500) {
+        if(identifier != IDENTIFIER) {
             return null;
         }
         byte token[] = new byte[payload_len];

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServer.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServer.java b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServer.java
index 0c4b24e..887a6c6 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServer.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServer.java
@@ -35,7 +35,6 @@ import org.slf4j.LoggerFactory;
 
 class SaslNettyServer {
 
-
     private static final Logger LOG = LoggerFactory
         .getLogger(SaslNettyServer.class);
 

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerHandler.java
index 2836e80..a8f9978 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerHandler.java
@@ -147,7 +147,7 @@ public class SaslStormServerHandler extends SimpleChannelUpstreamHandler {
             token = secretKey.getBytes();
         }
 
-        LOG.debug("SASL credentials for storm topology " + topologyName
-                  + " is " + secretKey);
+        LOG.debug("SASL credentials for storm topology {} is {}",
+                  topologyName, secretKey);
     }
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java b/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
index 684e9ea..4990f63 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
@@ -19,42 +19,32 @@ package backtype.storm.messaging.netty;
 
 import backtype.storm.Config;
 import backtype.storm.grouping.Load;
+import backtype.storm.messaging.ConnectionWithStatus;
+import backtype.storm.messaging.IConnectionCallback;
 import backtype.storm.messaging.TaskMessage;
 import backtype.storm.metric.api.IStatefulObject;
 import backtype.storm.serialization.KryoValuesSerializer;
 import backtype.storm.utils.Utils;
-import org.jboss.netty.bootstrap.ServerBootstrap;
-import org.jboss.netty.channel.Channel;
-import org.jboss.netty.channel.ChannelFactory;
-import org.jboss.netty.channel.group.ChannelGroup;
-import org.jboss.netty.channel.group.DefaultChannelGroup;
-import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
+import java.io.IOException;
 import java.net.InetSocketAddress;
-import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import java.util.Collection;
-import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.Executors;
-import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadFactory;
-import java.io.IOException;
-
-
-import backtype.storm.messaging.ConnectionWithStatus;
-import backtype.storm.messaging.IConnection;
-import backtype.storm.messaging.IConnectionCallback;
-import backtype.storm.messaging.TaskMessage;
-import backtype.storm.metric.api.IStatefulObject;
-import backtype.storm.serialization.KryoValuesSerializer;
-import backtype.storm.utils.Utils;
+import java.util.concurrent.atomic.AtomicInteger;
+import org.jboss.netty.bootstrap.ServerBootstrap;
+import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelFactory;
+import org.jboss.netty.channel.group.ChannelGroup;
+import org.jboss.netty.channel.group.DefaultChannelGroup;
+import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 class Server extends ConnectionWithStatus implements IStatefulObject, ISaslServer {
 

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientHandler.java
index 696a2fc..877b6d8 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientHandler.java
@@ -75,7 +75,7 @@ public class StormClientHandler extends SimpleChannelUpstreamHandler  {
                                        + message + " (" + client.getDstAddress() + ")");
         }
     }
-        
+
     @Override
     public void channelInterestChanged(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
         client.notifyInterestChanged(e.getChannel());

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/messaging/netty/StormServerHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/StormServerHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/StormServerHandler.java
index 45f6542..8140df6 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/StormServerHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/StormServerHandler.java
@@ -43,8 +43,8 @@ public class StormServerHandler extends SimpleChannelUpstreamHandler  {
     public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent e) {
         server.channelConnected(e.getChannel());
         if(channel != null) {
-            LOG.debug("Replacing channel with new channel: "
-                      + channel.toString() + " -> " + e.getChannel().toString());
+            LOG.debug("Replacing channel with new channel: {} -> ",
+                      channel, e.getChannel());
         }
         channel = e.getChannel();
         server.channelConnected(channel);

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java b/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
index 943199c..429c712 100644
--- a/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
+++ b/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
@@ -22,6 +22,7 @@ import javax.security.auth.login.Configuration;
 import javax.security.auth.login.AppConfigurationEntry;
 import javax.security.auth.Subject;
 import java.security.URIParameter;
+import java.security.MessageDigest;
 
 import backtype.storm.security.INimbusCredentialPlugin;
 import org.slf4j.Logger;
@@ -32,10 +33,10 @@ import java.net.URI;
 import java.util.Collection;
 import java.util.Set;
 import java.util.HashSet;
-import java.util.HashMap;
 import java.util.Map;
 import java.util.SortedMap;
 import java.util.TreeMap;
+import java.lang.StringBuilder;
 
 public class AuthUtils {
     private static final Logger LOG = LoggerFactory.getLogger(AuthUtils.class);
@@ -75,10 +76,9 @@ public class AuthUtils {
 
     /**
      * Pull a set of keys out of a Configuration.
-     * @param configs_to_pull A set of config keys that you want the values of.
      * @param conf The config to pull the key/value pairs out of.
      * @param conf_entry The app configuration entry name to get stuff from.
-     * @return Return a map of the configs in configs_to_pull to their values.
+     * @return Return a map of the configs in conf.
      */
     public static SortedMap<String, ?> PullConfig(Configuration conf,
                                             String conf_entry) throws IOException {
@@ -93,7 +93,6 @@ public class AuthUtils {
         }
 
         TreeMap<String, Object> results = new TreeMap<>();
-        
 
         for(AppConfigurationEntry entry: configurationEntries) {
             Map<String, ?> options = entry.getOptions();
@@ -333,6 +332,19 @@ public class AuthUtils {
             return null;
         }
 
-        return username + ":" + password;
+        try {
+            MessageDigest digest = MessageDigest.getInstance("SHA-512");
+            byte[] output = digest.digest((username + ":" + password).getBytes());
+
+            StringBuilder builder = new StringBuilder();
+            for(byte b : output) {
+            builder.append(String.format("%02x", b));
+            }
+            return builder.toString();
+        }
+        catch(java.security.NoSuchAlgorithmException e) {
+            LOG.error("Cant run SHA-512 digest. Algorithm not available.", e);
+            throw new RuntimeException(e);
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerClient.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerClient.java b/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerClient.java
index c144982..4f4a737 100644
--- a/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerClient.java
+++ b/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerClient.java
@@ -19,7 +19,6 @@ package org.apache.storm.pacemaker;
 
 import backtype.storm.Config;
 import backtype.storm.generated.HBMessage;
-import backtype.storm.messaging.netty.Client;
 import backtype.storm.messaging.netty.ISaslClient;
 import backtype.storm.messaging.netty.NettyRenameThreadFactory;
 import backtype.storm.security.auth.AuthUtils;
@@ -60,7 +59,7 @@ public class PacemakerClient implements ISaslClient {
 
     private StormBoundedExponentialBackoffRetry backoff = new StormBoundedExponentialBackoffRetry(100, 5000, 20);
     private int retryTimes = 0;
-    
+
     public PacemakerClient(Map config) {
 
         String host = (String)config.get(Config.PACEMAKER_HOST);
@@ -73,8 +72,9 @@ public class PacemakerClient implements ISaslClient {
         String auth = (String)config.get(Config.PACEMAKER_AUTH_METHOD);
         ThriftNettyClientCodec.AuthMethod authMethod;
 
-        if(auth.equals("DIGEST")) {
+        switch(auth) {
 
+        case "DIGEST":
             Configuration login_conf = AuthUtils.GetConfiguration(config);
             authMethod = ThriftNettyClientCodec.AuthMethod.DIGEST;
             secret = AuthUtils.makeDigestPayload(login_conf, AuthUtils.LOGIN_CONTEXT_PACEMAKER_DIGEST);
@@ -82,17 +82,20 @@ public class PacemakerClient implements ISaslClient {
                 LOG.error("Can't start pacemaker server without digest secret.");
                 throw new RuntimeException("Can't start pacemaker server without digest secret.");
             }
+            break;
 
-        }
-        else if(auth.equals("KERBEROS")) {
+        case "KERBEROS":
             authMethod = ThriftNettyClientCodec.AuthMethod.KERBEROS;
-        }
-        else {
-            if(!auth.equals("NONE")) {
-                LOG.warn("Invalid auth scheme: '{}'. Falling back to 'NONE'", auth);
-            }
-            
+            break;
+
+        case "NONE":
+            authMethod = ThriftNettyClientCodec.AuthMethod.NONE;
+            break;
+
+        default:
             authMethod = ThriftNettyClientCodec.AuthMethod.NONE;
+            LOG.warn("Invalid auth scheme: '{}'. Falling back to 'NONE'", auth);
+            break;
         }
 
         closing = new AtomicBoolean(false);
@@ -199,10 +202,10 @@ public class PacemakerClient implements ISaslClient {
     public void gotMessage(HBMessage m) {
         int message_id = m.get_message_id();
         if(message_id >=0 && message_id < maxPending) {
-            
-            LOG.debug("Pacemaker Client got message: {}", m.toString());
+
+            LOG.debug("Pacemaker client got message: {}", m.toString());
             HBMessage request = messages[message_id];
-            
+
             if(request == null) {
                 LOG.debug("No message for slot: {}", Integer.toString(message_id));
             }

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerServer.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerServer.java b/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerServer.java
index adba9ab..0422dba 100644
--- a/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerServer.java
+++ b/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerServer.java
@@ -61,7 +61,9 @@ class PacemakerServer implements ISaslServer {
         this.topo_name = "pacemaker_server";
 
         String auth = (String)config.get(Config.PACEMAKER_AUTH_METHOD);
-        if(auth.equals("DIGEST")) {
+        switch(auth) {
+
+        case "DIGEST":
             Configuration login_conf = AuthUtils.GetConfiguration(config);
             authMethod = ThriftNettyServerCodec.AuthMethod.DIGEST;
             this.secret = AuthUtils.makeDigestPayload(login_conf, AuthUtils.LOGIN_CONTEXT_PACEMAKER_DIGEST);
@@ -69,14 +71,17 @@ class PacemakerServer implements ISaslServer {
                 LOG.error("Can't start pacemaker server without digest secret.");
                 throw new RuntimeException("Can't start pacemaker server without digest secret.");
             }
-        }
-        else if(auth.equals("KERBEROS")) {
+            break;
+
+        case "KERBEROS":
             authMethod = ThriftNettyServerCodec.AuthMethod.KERBEROS;
-        }
-        else if(auth.equals("NONE")) {
+            break;
+
+        case "NONE":
             authMethod = ThriftNettyServerCodec.AuthMethod.NONE;
-        }
-        else {
+            break;
+
+        default:
             LOG.error("Can't start pacemaker server without proper PACEMAKER_AUTH_METHOD.");
             throw new RuntimeException("Can't start pacemaker server without proper PACEMAKER_AUTH_METHOD.");
         }
@@ -115,7 +120,7 @@ class PacemakerServer implements ISaslServer {
 
     public void cleanPipeline(Channel channel) {
         boolean authenticated = authenticated_channels.contains(channel);
-        if(!authenticated) {       
+        if(!authenticated) {
             if(channel.getPipeline().get(ThriftNettyServerCodec.SASL_HANDLER) != null) {
                 channel.getPipeline().remove(ThriftNettyServerCodec.SASL_HANDLER);
             }
@@ -124,10 +129,10 @@ class PacemakerServer implements ISaslServer {
             }
         }
     }
-    
+
     public void received(Object mesg, String remote, Channel channel) throws InterruptedException {
         cleanPipeline(channel);
-        
+
         boolean authenticated = (authMethod == ThriftNettyServerCodec.AuthMethod.NONE) || authenticated_channels.contains(channel);
         HBMessage m = (HBMessage)mesg;
         LOG.debug("received message. Passing to handler. {} : {} : {}",

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftDecoder.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftDecoder.java b/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftDecoder.java
index c964df2..113594a 100644
--- a/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftDecoder.java
+++ b/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftDecoder.java
@@ -21,7 +21,6 @@ import org.jboss.netty.handler.codec.frame.FrameDecoder;
 import org.jboss.netty.channel.ChannelHandlerContext;
 import org.jboss.netty.channel.Channel;
 import backtype.storm.generated.HBMessage;
-import backtype.storm.generated.HBMessageData;
 import backtype.storm.generated.HBServerMessageType;
 import org.jboss.netty.buffer.ChannelBuffer;
 import backtype.storm.utils.Utils;
@@ -53,13 +52,13 @@ public class ThriftDecoder extends FrameDecoder {
 
         HBMessage m;
         if(buf.hasArray()) {
-            m = (HBMessage)Utils.thriftDeserialize(HBMessage.class, buf.array(), 0, thriftLen);
+            m = Utils.thriftDeserialize(HBMessage.class, buf.array(), 0, thriftLen);
             buf.readerIndex(buf.readerIndex() + thriftLen);
         }
         else {
             byte serialized[] = new byte[thriftLen];
             buf.readBytes(serialized, 0, thriftLen);
-            m = (HBMessage)Utils.thriftDeserialize(HBMessage.class, serialized);
+            m = Utils.thriftDeserialize(HBMessage.class, serialized);
         }
 
         if(m.get_type() == HBServerMessageType.CONTROL_MESSAGE) {

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftEncoder.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftEncoder.java b/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftEncoder.java
index fb2c5be..a6912f2 100644
--- a/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftEncoder.java
+++ b/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftEncoder.java
@@ -66,7 +66,9 @@ public class ThriftEncoder extends OneToOneEncoder {
 
     @Override
     protected Object encode(ChannelHandlerContext ctx, Channel channel, Object msg) {
-        if(msg == null) return null;
+        if(msg == null) {
+            return null;
+        }
 
         LOG.debug("Trying to encode: " + msg.getClass().toString() + " : " + msg.toString());
 
@@ -92,7 +94,7 @@ public class ThriftEncoder extends OneToOneEncoder {
         }
 
         try {
-            byte serialized[] = Utils.thriftSerialize((TBase)m);
+            byte serialized[] = Utils.thriftSerialize(m);
             ChannelBuffer ret = ChannelBuffers.directBuffer(serialized.length + 4);
 
             ret.writeInt(serialized.length);

http://git-wip-us.apache.org/repos/asf/storm/blob/aee48646/storm-core/test/clj/org/apache/storm/pacemaker_test.clj
----------------------------------------------------------------------
diff --git a/storm-core/test/clj/org/apache/storm/pacemaker_test.clj b/storm-core/test/clj/org/apache/storm/pacemaker_test.clj
index 4c5359f..0987923 100644
--- a/storm-core/test/clj/org/apache/storm/pacemaker_test.clj
+++ b/storm-core/test/clj/org/apache/storm/pacemaker_test.clj
@@ -18,8 +18,8 @@
             [org.apache.storm.pacemaker [pacemaker :as pacemaker]]
             [conjure.core :as conjure])
   (:import [backtype.storm.generated
-            HBExecutionException HBNodes HBRecords
-            HBServerMessageType HBMessage HBMessageData HBPulse]))
+            HBExecutionException HBServerMessageType
+            HBMessage HBMessageData HBPulse]))
 
 (defn- message-with-rand-id [type data]
   (let [mid (rand-int 1000)


[02/37] storm git commit: PACEMAKER OPEN SOURCE!

Posted by kn...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyClient.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyClient.java b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyClient.java
index 023e950..a1cfbb5 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyClient.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyClient.java
@@ -52,9 +52,8 @@ public class SaslNettyClient {
      */
     public SaslNettyClient(String topologyName, byte[] token) {
         try {
-            LOG.debug("SaslNettyClient: Creating SASL "
-                    + SaslUtils.AUTH_DIGEST_MD5
-                    + " client to authenticate to server ");
+            LOG.debug("SaslNettyClient: Creating SASL {} client to authenticate to server ",
+                      SaslUtils.AUTH_DIGEST_MD5);
 
             saslClient = Sasl.createSaslClient(
                     new String[] { SaslUtils.AUTH_DIGEST_MD5 }, null, null,
@@ -141,26 +140,19 @@ public class SaslNettyClient {
                 }
             }
             if (nc != null) {
-                if (LOG.isDebugEnabled()) {
-                    LOG.debug("handle: SASL client callback: setting username: "
-                            + userName);
-                }
+                LOG.debug("handle: SASL client callback: setting username: {}",
+                          userName);
                 nc.setName(userName);
             }
             if (pc != null) {
-                if (LOG.isDebugEnabled()) {
-                    LOG.debug("handle: SASL client callback: setting userPassword");
-                }
+                LOG.debug("handle: SASL client callback: setting userPassword");
                 pc.setPassword(userPassword);
             }
             if (rc != null) {
-                if (LOG.isDebugEnabled()) {
-                    LOG.debug("handle: SASL client callback: setting realm: "
-                            + rc.getDefaultText());
-                }
+                LOG.debug("handle: SASL client callback: setting realm: {}",
+                        rc.getDefaultText());
                 rc.setText(rc.getDefaultText());
             }
         }
     }
-
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServer.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServer.java b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServer.java
index 2cb47d9..0c4b24e 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServer.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServer.java
@@ -18,9 +18,6 @@
 package backtype.storm.messaging.netty;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
-import java.util.HashMap;
-import java.util.Map;
 
 import javax.security.auth.callback.Callback;
 import javax.security.auth.callback.CallbackHandler;
@@ -33,133 +30,129 @@ import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 import javax.security.sasl.SaslServer;
 
-import org.apache.commons.codec.binary.Base64;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 class SaslNettyServer {
 
-	private static final Logger LOG = LoggerFactory
-			.getLogger(SaslNettyServer.class);
-
-	private SaslServer saslServer;
-
-	SaslNettyServer(String topologyName, byte[] token) throws IOException {
-		LOG.debug("SaslNettyServer: Topology token is: " + topologyName
-				+ " with authmethod " + SaslUtils.AUTH_DIGEST_MD5);
-
-		try {
-
-			SaslDigestCallbackHandler ch = new SaslNettyServer.SaslDigestCallbackHandler(
-					topologyName, token);
-
-			saslServer = Sasl.createSaslServer(SaslUtils.AUTH_DIGEST_MD5, null,
-					SaslUtils.DEFAULT_REALM, SaslUtils.getSaslProps(), ch);
-
-		} catch (SaslException e) {
-			LOG.error("SaslNettyServer: Could not create SaslServer: " + e);
-		}
-
-	}
-
-	public boolean isComplete() {
-		return saslServer.isComplete();
-	}
-
-	public String getUserName() {
-		return saslServer.getAuthorizationID();
-	}
-
-	/** CallbackHandler for SASL DIGEST-MD5 mechanism */
-	public static class SaslDigestCallbackHandler implements CallbackHandler {
-
-		/** Used to authenticate the clients */
-		private byte[] userPassword;
-		private String userName;
-
-		public SaslDigestCallbackHandler(String topologyName, byte[] token) {
-			LOG.debug("SaslDigestCallback: Creating SaslDigestCallback handler "
-					+ "with topology token: " + topologyName);
-			this.userName = topologyName;
-			this.userPassword = token;
-		}
-
-		@Override
-		public void handle(Callback[] callbacks) throws IOException,
-				UnsupportedCallbackException {
-			NameCallback nc = null;
-			PasswordCallback pc = null;
-			AuthorizeCallback ac = null;
-
-			for (Callback callback : callbacks) {
-				if (callback instanceof AuthorizeCallback) {
-					ac = (AuthorizeCallback) callback;
-				} else if (callback instanceof NameCallback) {
-					nc = (NameCallback) callback;
-				} else if (callback instanceof PasswordCallback) {
-					pc = (PasswordCallback) callback;
-				} else if (callback instanceof RealmCallback) {
-					continue; // realm is ignored
-				} else {
-					throw new UnsupportedCallbackException(callback,
-							"handle: Unrecognized SASL DIGEST-MD5 Callback");
-				}
-			}
-
-			if (nc != null) {
-				LOG.debug("handle: SASL server DIGEST-MD5 callback: setting "
-						+ "username for client: " + userName);
-
-				nc.setName(userName);
-			}
-
-			if (pc != null) {
-				char[] password = SaslUtils.encodePassword(userPassword);
-
-				LOG.debug("handle: SASL server DIGEST-MD5 callback: setting "
-						+ "password for client: " + userPassword);
-
-				pc.setPassword(password);
-			}
-			if (ac != null) {
-
-				String authid = ac.getAuthenticationID();
-				String authzid = ac.getAuthorizationID();
-
-				if (authid.equals(authzid)) {
-					ac.setAuthorized(true);
-				} else {
-					ac.setAuthorized(false);
-				}
-
-				if (ac.isAuthorized()) {
-					LOG.debug("handle: SASL server DIGEST-MD5 callback: setting "
-							+ "canonicalized client ID: " + userName);
-					ac.setAuthorizedID(authzid);
-				}
-			}
-		}
-	}
-
-	/**
-	 * Used by SaslTokenMessage::processToken() to respond to server SASL
-	 * tokens.
-	 * 
-	 * @param token
-	 *            Server's SASL token
-	 * @return token to send back to the server.
-	 */
-	public byte[] response(byte[] token) {
-		try {
-			LOG.debug("response: Responding to input token of length: "
-					+ token.length);
-			byte[] retval = saslServer.evaluateResponse(token);
-			LOG.debug("response: Response token length: " + retval.length);
-			return retval;
-		} catch (SaslException e) {
-			LOG.error("response: Failed to evaluate client token of length: "
-					+ token.length + " : " + e);
-			return null;
-		}
-	}
-}
\ No newline at end of file
+
+    private static final Logger LOG = LoggerFactory
+        .getLogger(SaslNettyServer.class);
+
+        private SaslServer saslServer;
+
+    SaslNettyServer(String topologyName, byte[] token) throws IOException {
+        LOG.debug("SaslNettyServer: Topology token is: {} with authmethod {}",
+                  topologyName, SaslUtils.AUTH_DIGEST_MD5);
+
+        try {
+            SaslDigestCallbackHandler ch = new SaslNettyServer.SaslDigestCallbackHandler(
+                topologyName, token);
+
+            saslServer = Sasl.createSaslServer(SaslUtils.AUTH_DIGEST_MD5, null,
+                                               SaslUtils.DEFAULT_REALM,
+                                               SaslUtils.getSaslProps(), ch);
+        } catch (SaslException e) {
+            LOG.error("SaslNettyServer: Could not create SaslServer: ", e);
+        }
+    }
+
+    public boolean isComplete() {
+        return saslServer.isComplete();
+    }
+
+    public String getUserName() {
+        return saslServer.getAuthorizationID();
+    }
+
+    /** CallbackHandler for SASL DIGEST-MD5 mechanism */
+    public static class SaslDigestCallbackHandler implements CallbackHandler {
+
+        /** Used to authenticate the clients */
+        private byte[] userPassword;
+        private String userName;
+
+        public SaslDigestCallbackHandler(String topologyName, byte[] token) {
+            LOG.debug("SaslDigestCallback: Creating SaslDigestCallback handler with topology token: {}", topologyName);
+            this.userName = topologyName;
+            this.userPassword = token;
+        }
+
+        @Override
+        public void handle(Callback[] callbacks) throws IOException,
+            UnsupportedCallbackException {
+            NameCallback nc = null;
+            PasswordCallback pc = null;
+            AuthorizeCallback ac = null;
+
+            for (Callback callback : callbacks) {
+                if (callback instanceof AuthorizeCallback) {
+                    ac = (AuthorizeCallback) callback;
+                } else if (callback instanceof NameCallback) {
+                    nc = (NameCallback) callback;
+                } else if (callback instanceof PasswordCallback) {
+                    pc = (PasswordCallback) callback;
+                } else if (callback instanceof RealmCallback) {
+                    continue; // realm is ignored
+                } else {
+                    throw new UnsupportedCallbackException(callback,
+                                                           "handle: Unrecognized SASL DIGEST-MD5 Callback");
+                }
+            }
+
+            if (nc != null) {
+                LOG.debug("handle: SASL server DIGEST-MD5 callback: setting username for client: {}",
+                          userName);
+                nc.setName(userName);
+            }
+
+            if (pc != null) {
+                char[] password = SaslUtils.encodePassword(userPassword);
+
+                LOG.debug("handle: SASL server DIGEST-MD5 callback: setting password for client: ",
+                          userPassword);
+
+                pc.setPassword(password);
+            }
+            if (ac != null) {
+
+                String authid = ac.getAuthenticationID();
+                String authzid = ac.getAuthorizationID();
+
+                if (authid.equals(authzid)) {
+                    ac.setAuthorized(true);
+                } else {
+                    ac.setAuthorized(false);
+                }
+
+                if (ac.isAuthorized()) {
+                    LOG.debug("handle: SASL server DIGEST-MD5 callback: setting canonicalized client ID: ",
+                              userName);
+                    ac.setAuthorizedID(authzid);
+                }
+            }
+        }
+    }
+
+    /**
+     * Used by SaslTokenMessage::processToken() to respond to server SASL
+     * tokens.
+     *
+     * @param token
+     *            Server's SASL token
+     * @return token to send back to the server.
+     */
+    public byte[] response(byte[] token) {
+        try {
+            LOG.debug("response: Responding to input token of length: {}",
+                      token.length);
+            byte[] retval = saslServer.evaluateResponse(token);
+            LOG.debug("response: Response token length: {}", retval.length);
+            return retval;
+        } catch (SaslException e) {
+            LOG.error("response: Failed to evaluate client token of length: {} : {}",
+                      token.length, e);
+            return null;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServerState.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServerState.java b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServerState.java
index 9800959..4264e51 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServerState.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslNettyServerState.java
@@ -22,10 +22,9 @@ import org.jboss.netty.channel.ChannelLocal;
 
 final class SaslNettyServerState {
 
-	public static final ChannelLocal<SaslNettyServer> getSaslNettyServer = new ChannelLocal<SaslNettyServer>() {
-		protected SaslNettyServer initialValue(Channel channel) {
-			return null;
-		}
-	};
-
-}
\ No newline at end of file
+    public static final ChannelLocal<SaslNettyServer> getSaslNettyServer = new ChannelLocal<SaslNettyServer>() {
+            protected SaslNettyServer initialValue(Channel channel) {
+                return null;
+            }
+        };
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormClientHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormClientHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormClientHandler.java
index 12b466c..2a5ae99 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormClientHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormClientHandler.java
@@ -28,19 +28,18 @@ import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import backtype.storm.Config;
-
 public class SaslStormClientHandler extends SimpleChannelUpstreamHandler {
 
     private static final Logger LOG = LoggerFactory
             .getLogger(SaslStormClientHandler.class);
-    private Client client;
+
+    private ISaslClient client;
     long start_time;
     /** Used for client or server's token to send or receive from each other. */
     private byte[] token;
-    private String topologyName;
+    private String name;
 
-    public SaslStormClientHandler(Client client) throws IOException {
+    public SaslStormClientHandler(ISaslClient client) throws IOException {
         this.client = client;
         start_time = System.currentTimeMillis();
         getSASLCredentials();
@@ -51,9 +50,7 @@ public class SaslStormClientHandler extends SimpleChannelUpstreamHandler {
             ChannelStateEvent event) {
         // register the newly established channel
         Channel channel = ctx.getChannel();
-
-        LOG.info("Connection established from " + channel.getLocalAddress()
-                + " to " + channel.getRemoteAddress());
+        client.channelConnected(channel);
 
         try {
             SaslNettyClient saslNettyClient = SaslNettyClientState.getSaslNettyClient
@@ -62,10 +59,11 @@ public class SaslStormClientHandler extends SimpleChannelUpstreamHandler {
             if (saslNettyClient == null) {
                 LOG.debug("Creating saslNettyClient now " + "for channel: "
                         + channel);
-                saslNettyClient = new SaslNettyClient(topologyName, token);
+                saslNettyClient = new SaslNettyClient(name, token);
                 SaslNettyClientState.getSaslNettyClient.set(channel,
                         saslNettyClient);
             }
+            LOG.debug("Sending SASL_TOKEN_MESSAGE_REQUEST");
             channel.write(ControlMessage.SASL_TOKEN_MESSAGE_REQUEST);
         } catch (Exception e) {
             LOG.error("Failed to authenticate with server " + "due to error: ",
@@ -96,7 +94,7 @@ public class SaslStormClientHandler extends SimpleChannelUpstreamHandler {
             ControlMessage msg = (ControlMessage) event.getMessage();
             if (msg == ControlMessage.SASL_COMPLETE_REQUEST) {
                 LOG.debug("Server has sent us the SaslComplete "
-                        + "message. Allowing normal work to proceed.");
+                          + "message. Allowing normal work to proceed.");
 
                 if (!saslNettyClient.isComplete()) {
                     LOG.error("Server returned a Sasl-complete message, "
@@ -106,6 +104,8 @@ public class SaslStormClientHandler extends SimpleChannelUpstreamHandler {
                             + "we can tell, we are not authenticated yet.");
                 }
                 ctx.getPipeline().remove(this);
+                this.client.channelReady();
+
                 // We call fireMessageReceived since the client is allowed to
                 // perform this request. The client's request will now proceed
                 // to the next pipeline component namely StormClientHandler.
@@ -116,7 +116,7 @@ public class SaslStormClientHandler extends SimpleChannelUpstreamHandler {
         SaslMessageToken saslTokenMessage = (SaslMessageToken) event
                 .getMessage();
         LOG.debug("Responding to server's token of length: "
-                + saslTokenMessage.getSaslToken().length);
+                  + saslTokenMessage.getSaslToken().length);
 
         // Generate SASL response (but we only actually send the response if
         // it's non-null.
@@ -127,17 +127,18 @@ public class SaslStormClientHandler extends SimpleChannelUpstreamHandler {
             // (if not, warn), and return without sending a response back to the
             // server.
             LOG.debug("Response to server is null: "
-                    + "authentication should now be complete.");
+                      + "authentication should now be complete.");
             if (!saslNettyClient.isComplete()) {
                 LOG.warn("Generated a null response, "
                         + "but authentication is not complete.");
                 throw new Exception("Server reponse is null, but as far as "
                         + "we can tell, we are not authenticated yet.");
             }
+            this.client.channelReady();
             return;
         } else {
             LOG.debug("Response to server token has length:"
-                    + responseToServer.length);
+                      + responseToServer.length);
         }
         // Construct a message containing the SASL response and send it to the
         // server.
@@ -146,12 +147,14 @@ public class SaslStormClientHandler extends SimpleChannelUpstreamHandler {
     }
 
     private void getSASLCredentials() throws IOException {
-        topologyName = (String) this.client.getStormConf().get(Config.TOPOLOGY_NAME);
-        String secretKey = SaslUtils.getSecretKey(this.client.getStormConf());
+        String secretKey;
+        name = client.name();
+        secretKey = client.secretKey();
+
         if (secretKey != null) {
             token = secretKey.getBytes();
         }
-        LOG.debug("SASL credentials for storm topology " + topologyName
+        LOG.debug("SASL credentials for storm topology " + name
                 + " is " + secretKey);
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerAuthorizeHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerAuthorizeHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerAuthorizeHandler.java
index 04cd66e..8b3d1c0 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerAuthorizeHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerAuthorizeHandler.java
@@ -80,4 +80,4 @@ public class SaslStormServerAuthorizeHandler extends SimpleChannelUpstreamHandle
 		// pipeline component.
 		Channels.fireMessageReceived(ctx, msg);
 	}
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerHandler.java
index 02448e2..15fe9fb 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerHandler.java
@@ -28,11 +28,9 @@ import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import backtype.storm.Config;
-
 public class SaslStormServerHandler extends SimpleChannelUpstreamHandler {
 
-    Server server;
+    ISaslServer server;
     /** Used for client or server's token to send or receive from each other. */
     private byte[] token;
     private String topologyName;
@@ -40,7 +38,7 @@ public class SaslStormServerHandler extends SimpleChannelUpstreamHandler {
     private static final Logger LOG = LoggerFactory
             .getLogger(SaslStormServerHandler.class);
 
-    public SaslStormServerHandler(Server server) throws IOException {
+    public SaslStormServerHandler(ISaslServer server) throws IOException {
         this.server = server;
         getSASLCredentials();
     }
@@ -123,6 +121,7 @@ public class SaslStormServerHandler extends SimpleChannelUpstreamHandler {
                 LOG.debug("Removing SaslServerHandler from pipeline since SASL "
                         + "authentication is complete.");
                 ctx.getPipeline().remove(this);
+                server.authenticated(channel);
             }
             return;
         } else {
@@ -139,17 +138,19 @@ public class SaslStormServerHandler extends SimpleChannelUpstreamHandler {
 
     @Override
     public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) {
-        server.closeChannel(e.getChannel());
+        if(server != null) server.closeChannel(e.getChannel());
     }
 
     private void getSASLCredentials() throws IOException {
-        topologyName = (String) this.server.storm_conf
-                .get(Config.TOPOLOGY_NAME);
-        String secretKey = SaslUtils.getSecretKey(this.server.storm_conf);
+        String secretKey;
+        topologyName = server.name();
+        secretKey = server.secretKey();
+
         if (secretKey != null) {
             token = secretKey.getBytes();
         }
+
         LOG.debug("SASL credentials for storm topology " + topologyName
-                + " is " + secretKey);
+                  + " is " + secretKey);
     }
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/SaslUtils.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslUtils.java b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslUtils.java
index a2d0b26..d9980c5 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslUtils.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslUtils.java
@@ -29,6 +29,7 @@ import org.apache.commons.io.Charsets;
 import backtype.storm.Config;
 
 class SaslUtils {
+    public static final String KERBEROS = "GSSAPI";
     public static final String AUTH_DIGEST_MD5 = "DIGEST-MD5";
     public static final String DEFAULT_REALM = "default";
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java b/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
index e984144..719c84c 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
@@ -17,6 +17,20 @@
  */
 package backtype.storm.messaging.netty;
 
+import backtype.storm.Config;
+import backtype.storm.messaging.TaskMessage;
+import backtype.storm.metric.api.IStatefulObject;
+import backtype.storm.serialization.KryoValuesSerializer;
+import backtype.storm.utils.Utils;
+import org.jboss.netty.bootstrap.ServerBootstrap;
+import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelFactory;
+import org.jboss.netty.channel.group.ChannelGroup;
+import org.jboss.netty.channel.group.DefaultChannelGroup;
+import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.net.InetSocketAddress;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -24,29 +38,17 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+import java.util.Collection;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.Executors;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadFactory;
+import java.io.IOException;
 
-import org.jboss.netty.bootstrap.ServerBootstrap;
-import org.jboss.netty.channel.Channel;
-import org.jboss.netty.channel.ChannelFactory;
-import org.jboss.netty.channel.group.ChannelGroup;
-import org.jboss.netty.channel.group.DefaultChannelGroup;
-import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.Config;
 import backtype.storm.messaging.ConnectionWithStatus;
-import backtype.storm.messaging.IConnection;
-import backtype.storm.messaging.TaskMessage;
-import backtype.storm.metric.api.IStatefulObject;
-import backtype.storm.utils.Utils;
 
-class Server extends ConnectionWithStatus implements IStatefulObject {
+class Server extends ConnectionWithStatus implements IStatefulObject, ISaslServer {
 
     private static final Logger LOG = LoggerFactory.getLogger(Server.class);
     @SuppressWarnings("rawtypes")
@@ -55,58 +57,58 @@ class Server extends ConnectionWithStatus implements IStatefulObject {
     private final ConcurrentHashMap<String, AtomicInteger> messagesEnqueued = new ConcurrentHashMap<String, AtomicInteger>();
     private final AtomicInteger messagesDequeued = new AtomicInteger(0);
     private final AtomicInteger[] pendingMessages;
-    
-    
+
     // Create multiple queues for incoming messages. The size equals the number of receiver threads.
     // For message which is sent to same task, it will be stored in the same queue to preserve the message order.
     private LinkedBlockingQueue<ArrayList<TaskMessage>>[] message_queue;
-    
+
     volatile ChannelGroup allChannels = new DefaultChannelGroup("storm-server");
     final ChannelFactory factory;
     final ServerBootstrap bootstrap;
-    
+
     private int queueCount;
     private volatile HashMap<Integer, Integer> taskToQueueId = null;
     int roundRobinQueueId;
-	
+
     private volatile boolean closing = false;
     List<TaskMessage> closeMessage = Arrays.asList(new TaskMessage(-1, null));
-    
-    
+    private KryoValuesSerializer _ser;
+
     @SuppressWarnings("rawtypes")
     Server(Map storm_conf, int port) {
         this.storm_conf = storm_conf;
         this.port = port;
-        
+        _ser = new KryoValuesSerializer(storm_conf);
+
         queueCount = Utils.getInt(storm_conf.get(Config.WORKER_RECEIVER_THREAD_COUNT), 1);
         roundRobinQueueId = 0;
         taskToQueueId = new HashMap<Integer, Integer>();
-    
+
         message_queue = new LinkedBlockingQueue[queueCount];
         pendingMessages = new AtomicInteger[queueCount];
         for (int i = 0; i < queueCount; i++) {
             message_queue[i] = new LinkedBlockingQueue<ArrayList<TaskMessage>>();
             pendingMessages[i] = new AtomicInteger(0);
         }
-        
+
         // Configure the server.
         int buffer_size = Utils.getInt(storm_conf.get(Config.STORM_MESSAGING_NETTY_BUFFER_SIZE));
         int backlog = Utils.getInt(storm_conf.get(Config.STORM_MESSAGING_NETTY_SOCKET_BACKLOG), 500);
         int maxWorkers = Utils.getInt(storm_conf.get(Config.STORM_MESSAGING_NETTY_SERVER_WORKER_THREADS));
 
-        ThreadFactory bossFactory = new NettyRenameThreadFactory(name() + "-boss");
-        ThreadFactory workerFactory = new NettyRenameThreadFactory(name() + "-worker");
-        
+        ThreadFactory bossFactory = new NettyRenameThreadFactory(netty_name() + "-boss");
+        ThreadFactory workerFactory = new NettyRenameThreadFactory(netty_name() + "-worker");
+
         if (maxWorkers > 0) {
-            factory = new NioServerSocketChannelFactory(Executors.newCachedThreadPool(bossFactory), 
+            factory = new NioServerSocketChannelFactory(Executors.newCachedThreadPool(bossFactory),
                 Executors.newCachedThreadPool(workerFactory), maxWorkers);
         } else {
-            factory = new NioServerSocketChannelFactory(Executors.newCachedThreadPool(bossFactory), 
+            factory = new NioServerSocketChannelFactory(Executors.newCachedThreadPool(bossFactory),
                 Executors.newCachedThreadPool(workerFactory));
         }
-        
-        LOG.info("Create Netty Server " + name() + ", buffer_size: " + buffer_size + ", maxWorkers: " + maxWorkers);
-        
+
+        LOG.info("Create Netty Server " + netty_name() + ", buffer_size: " + buffer_size + ", maxWorkers: " + maxWorkers);
+
         bootstrap = new ServerBootstrap(factory);
         bootstrap.setOption("child.tcpNoDelay", true);
         bootstrap.setOption("child.receiveBufferSize", buffer_size);
@@ -120,7 +122,7 @@ class Server extends ConnectionWithStatus implements IStatefulObject {
         Channel channel = bootstrap.bind(new InetSocketAddress(port));
         allChannels.add(channel);
     }
-    
+
     private ArrayList<TaskMessage>[] groupMessages(List<TaskMessage> msgs) {
         ArrayList<TaskMessage> messageGroups[] = new ArrayList[queueCount];
 
@@ -142,7 +144,7 @@ class Server extends ConnectionWithStatus implements IStatefulObject {
         }
         return messageGroups;
     }
-    
+
     private Integer getMessageQueueId(int task) {
         // try to construct the map from taskId -> queueId in round robin manner.
         Integer queueId = taskToQueueId.get(task);
@@ -181,9 +183,8 @@ class Server extends ConnectionWithStatus implements IStatefulObject {
         }
     }
 
-
     /**
-     * enqueue a received message 
+     * enqueue a received message
      * @throws InterruptedException
      */
     protected void enqueue(List<TaskMessage> msgs, String from) throws InterruptedException {
@@ -206,7 +207,7 @@ class Server extends ConnectionWithStatus implements IStatefulObject {
         }
     }
 
-    public Iterator<TaskMessage> recv(int flags, int receiverId) {
+    public Iterator<TaskMessage> recv(int flags, int receiverId)  {
         if (closing) {
             return closeMessage.iterator();
         }
@@ -236,7 +237,7 @@ class Server extends ConnectionWithStatus implements IStatefulObject {
         }
         return null;
     }
-   
+
     /**
      * register a newly created channel
      * @param channel
@@ -244,12 +245,12 @@ class Server extends ConnectionWithStatus implements IStatefulObject {
     protected void addChannel(Channel channel) {
         allChannels.add(channel);
     }
-    
+
     /**
      * close a channel
      * @param channel
      */
-    protected void closeChannel(Channel channel) {
+    public void closeChannel(Channel channel) {
         channel.close().awaitUninterruptibly();
         allChannels.remove(channel);
     }
@@ -265,15 +266,17 @@ class Server extends ConnectionWithStatus implements IStatefulObject {
         }
     }
 
+    @Override
     public void send(int task, byte[] message) {
         throw new UnsupportedOperationException("Server connection should not send any messages");
     }
-    
+
+    @Override
     public void send(Iterator<TaskMessage> msgs) {
       throw new UnsupportedOperationException("Server connection should not send any messages");
     }
-	
-    public String name() {
+
+    public String netty_name() {
       return "Netty-server-localhost-" + port;
     }
 
@@ -306,7 +309,7 @@ class Server extends ConnectionWithStatus implements IStatefulObject {
     }
 
     public Object getState() {
-        LOG.info("Getting metrics for server on port {}", port);
+        LOG.debug("Getting metrics for server on port {}", port);
         HashMap<String, Object> ret = new HashMap<String, Object>();
         ret.put("dequeuedMessages", messagesDequeued.getAndSet(0));
         ArrayList<Integer> pending = new ArrayList<Integer>(pendingMessages.length);
@@ -330,8 +333,31 @@ class Server extends ConnectionWithStatus implements IStatefulObject {
         return ret;
     }
 
-    @Override public String toString() {
-       return String.format("Netty server listening on port %s", port);
+    /** Implementing IServer. **/
+    public void channelConnected(Channel c) {
+        addChannel(c);
+    }
+
+    public void received(Object message, String remote, Channel channel)  throws InterruptedException {
+        List<TaskMessage>msgs = (List<TaskMessage>)message;
+        enqueue(msgs, remote);
+    }
+
+    public String name() {
+        return (String)storm_conf.get(Config.TOPOLOGY_NAME);
+    }
+
+    public String secretKey() {
+        return SaslUtils.getSecretKey(storm_conf);
+    }
+
+    public void authenticated(Channel c) {
+        return;
+    }
+
+    @Override
+    public String toString() {
+        return String.format("Netty server listening on port %s", port);
     }
 
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientHandler.java
index 2d25001..2c8eae9 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientHandler.java
@@ -17,7 +17,20 @@
  */
 package backtype.storm.messaging.netty;
 
+import backtype.storm.messaging.TaskMessage;
+import backtype.storm.serialization.KryoValuesDeserializer;
+
 import java.net.ConnectException;
+import java.util.Map;
+import java.util.List;
+import java.io.IOException;
+
+import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.channel.ChannelStateEvent;
+import org.jboss.netty.channel.ExceptionEvent;
+import org.jboss.netty.channel.MessageEvent;
+import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
 
 import org.jboss.netty.channel.*;
 import org.slf4j.Logger;
@@ -26,12 +39,28 @@ import org.slf4j.LoggerFactory;
 public class StormClientHandler extends SimpleChannelUpstreamHandler  {
     private static final Logger LOG = LoggerFactory.getLogger(StormClientHandler.class);
     private Client client;
-    
-    StormClientHandler(Client client) {
+    private KryoValuesDeserializer _des;
+
+    StormClientHandler(Client client, Map conf) {
         this.client = client;
+        _des = new KryoValuesDeserializer(conf);
     }
 
     @Override
+    public void messageReceived(ChannelHandlerContext ctx, MessageEvent event) {
+        //examine the response message from server
+        Object message = event.getMessage();
+        if (message instanceof ControlMessage) {
+          ControlMessage msg = (ControlMessage)message;
+          if (msg==ControlMessage.FAILURE_RESPONSE)
+              LOG.info("failure response:{}", msg);
+
+        } else {
+          throw new RuntimeException("Don't know how to handle a message of type "+message+" ("+client.getDstAddress()+")");
+        }
+    }
+        
+    @Override
     public void channelInterestChanged(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
         client.notifyInterestChanged(e.getChannel());
     }
@@ -40,7 +69,7 @@ public class StormClientHandler extends SimpleChannelUpstreamHandler  {
     public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent event) {
         Throwable cause = event.getCause();
         if (!(cause instanceof ConnectException)) {
-            LOG.info("Connection failed " + client.dstAddressPrefixedName, cause);
+            LOG.info("Connection to "+client.getDstAddress()+" failed:", cause);
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientPipelineFactory.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientPipelineFactory.java b/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientPipelineFactory.java
index 4be06cd..6158eef 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientPipelineFactory.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/StormClientPipelineFactory.java
@@ -22,12 +22,15 @@ import org.jboss.netty.channel.ChannelPipelineFactory;
 import org.jboss.netty.channel.Channels;
 
 import backtype.storm.Config;
+import java.util.Map;
 
 class StormClientPipelineFactory implements ChannelPipelineFactory {
     private Client client;
+    private Map conf;
 
-    StormClientPipelineFactory(Client client) {
+    StormClientPipelineFactory(Client client, Map conf) {
         this.client = client;
+        this.conf = conf;
     }
 
     public ChannelPipeline getPipeline() throws Exception {
@@ -39,15 +42,15 @@ class StormClientPipelineFactory implements ChannelPipelineFactory {
         // Encoder
         pipeline.addLast("encoder", new MessageEncoder());
 
-        boolean isNettyAuth = (Boolean) this.client.getStormConf().get(Config.STORM_MESSAGING_NETTY_AUTHENTICATION);
+        boolean isNettyAuth = (Boolean) conf
+                .get(Config.STORM_MESSAGING_NETTY_AUTHENTICATION);
         if (isNettyAuth) {
             // Authenticate: Removed after authentication completes
             pipeline.addLast("saslClientHandler", new SaslStormClientHandler(
                     client));
         }
         // business logic.
-        pipeline.addLast("handler", new StormClientHandler(client));
-
+        pipeline.addLast("handler", new StormClientHandler(client, conf));
         return pipeline;
     }
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/StormServerHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/StormServerHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/StormServerHandler.java
index 6b71171..45f6542 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/StormServerHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/StormServerHandler.java
@@ -17,7 +17,8 @@
  */
 package backtype.storm.messaging.netty;
 
-import backtype.storm.messaging.TaskMessage;
+import backtype.storm.utils.Utils;
+import org.jboss.netty.channel.Channel;
 import org.jboss.netty.channel.ChannelHandlerContext;
 import org.jboss.netty.channel.ChannelStateEvent;
 import org.jboss.netty.channel.ExceptionEvent;
@@ -25,33 +26,39 @@ import org.jboss.netty.channel.MessageEvent;
 import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import java.util.List;
 import java.util.concurrent.atomic.AtomicInteger;
 
-class StormServerHandler extends SimpleChannelUpstreamHandler  {
+public class StormServerHandler extends SimpleChannelUpstreamHandler  {
     private static final Logger LOG = LoggerFactory.getLogger(StormServerHandler.class);
-    Server server;
+    IServer server;
     private AtomicInteger failure_count; 
+    private Channel channel;
     
-    StormServerHandler(Server server) {
+    public StormServerHandler(IServer server) {
         this.server = server;
         failure_count = new AtomicInteger(0);
     }
     
     @Override
     public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent e) {
-        server.addChannel(e.getChannel());
+        server.channelConnected(e.getChannel());
+        if(channel != null) {
+            LOG.debug("Replacing channel with new channel: "
+                      + channel.toString() + " -> " + e.getChannel().toString());
+        }
+        channel = e.getChannel();
+        server.channelConnected(channel);
     }
     
     @Override
     public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) {
-      List<TaskMessage> msgs = (List<TaskMessage>) e.getMessage();
+      Object msgs = e.getMessage();
       if (msgs == null) {
         return;
       }
       
       try {
-        server.enqueue(msgs, e.getRemoteAddress().toString());
+        server.received(msgs, e.getRemoteAddress().toString(), channel);
       } catch (InterruptedException e1) {
         LOG.info("failed to enqueue a request message", e);
         failure_count.incrementAndGet();
@@ -61,6 +68,7 @@ class StormServerHandler extends SimpleChannelUpstreamHandler  {
     @Override
     public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) {
         LOG.error("server errors in handling the request", e.getCause());
+        Utils.handleUncaughtException(e.getCause());
         server.closeChannel(e.getChannel());
     }
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java b/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
index ac3fb53..cd79f4e 100644
--- a/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
+++ b/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
@@ -34,13 +34,19 @@ import java.net.URI;
 import java.util.Collection;
 import java.util.Set;
 import java.util.HashSet;
+import java.util.HashMap;
 import java.util.Map;
+import java.util.SortedMap;
+import java.util.TreeMap;
 import java.util.concurrent.ExecutorService;
 
 public class AuthUtils {
     private static final Logger LOG = LoggerFactory.getLogger(AuthUtils.class);
     public static final String LOGIN_CONTEXT_SERVER = "StormServer";
     public static final String LOGIN_CONTEXT_CLIENT = "StormClient";
+    public static final String LOGIN_CONTEXT_PACEMAKER_DIGEST = "PacemakerDigest";
+    public static final String LOGIN_CONTEXT_PACEMAKER_SERVER = "PacemakerServer";
+    public static final String LOGIN_CONTEXT_PACEMAKER_CLIENT = "PacemakerClient";
     public static final String SERVICE = "storm_thrift_server";
 
     /**
@@ -71,6 +77,37 @@ public class AuthUtils {
     }
 
     /**
+     * Pull a set of keys out of a Configuration.
+     * @param configs_to_pull A set of config keys that you want the values of.
+     * @param conf The config to pull the key/value pairs out of.
+     * @param conf_entry The app configuration entry name to get stuff from.
+     * @return Return a map of the configs in configs_to_pull to their values.
+     */
+    public static SortedMap<String, ?> PullConfig(Configuration conf,
+                                            String conf_entry) throws IOException {
+        if(conf == null) {
+            return null;
+        }
+        AppConfigurationEntry configurationEntries[] = conf.getAppConfigurationEntry(conf_entry);
+        if(configurationEntries == null) {
+            String errorMessage = "Could not find a '" + conf_entry
+                + "' entry in this configuration: Client cannot start.";
+            throw new IOException(errorMessage);
+        }
+
+        TreeMap<String, Object> results = new TreeMap<>();
+        
+
+        for(AppConfigurationEntry entry: configurationEntries) {
+            Map<String, ?> options = entry.getOptions();
+            for(String key : options.keySet()) {
+                results.put(key, options.get(key));
+            }
+        }
+        return results;
+    }
+
+    /**
      * Construct a principal to local plugin
      * @param conf storm configuration
      * @return the plugin
@@ -281,4 +318,26 @@ public class AuthUtils {
         }
         return null;
     }
+
+    private static final String USERNAME = "username";
+    private static final String PASSWORD = "password";
+
+    public static String makeDigestPayload(Configuration login_config, String config_section) {
+        String username = null;
+        String password = null;
+        try {
+            Map<String, ?> results = AuthUtils.PullConfig(login_config, config_section);
+            username = (String)results.get(USERNAME);
+            password = (String)results.get(PASSWORD);
+        }
+        catch (Exception e) {
+            LOG.error("Failed to pull username/password out of jaas conf", e);
+        }
+
+        if(username == null || password == null) {
+            return null;
+        }
+
+        return username + ":" + password;
+    }
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/utils/Utils.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/utils/Utils.java b/storm-core/src/jvm/backtype/storm/utils/Utils.java
index 8660739..c3de9ba 100644
--- a/storm-core/src/jvm/backtype/storm/utils/Utils.java
+++ b/storm-core/src/jvm/backtype/storm/utils/Utils.java
@@ -74,9 +74,15 @@ import java.util.zip.GZIPOutputStream;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipFile;
 
+import org.apache.thrift.TBase;
+import org.apache.thrift.TDeserializer;
+import org.apache.thrift.TSerializer;
+
 public class Utils {
     private static final Logger LOG = LoggerFactory.getLogger(Utils.class);
     public static final String DEFAULT_STREAM_ID = "default";
+    private static ThreadLocal<TSerializer> threadSer = new ThreadLocal<TSerializer>();
+    private static ThreadLocal<TDeserializer> threadDes = new ThreadLocal<TDeserializer>();
 
     private static SerializationDelegate serializationDelegate;
 
@@ -102,6 +108,51 @@ public class Utils {
         return serializationDelegate.deserialize(serialized, clazz);
     }
 
+    public static byte[] thriftSerialize(TBase t) {
+        try {
+            TSerializer ser = threadSer.get();
+            if (ser == null) {
+                ser = new TSerializer();
+                threadSer.set(ser);
+            } 
+            return ser.serialize(t);
+        } catch (TException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    private static TDeserializer getDes() {
+        TDeserializer des = threadDes.get();
+        if(des == null) {
+            des = new TDeserializer();
+            threadDes.set(des);
+        }
+        return des;
+    }
+
+    public static <T> T thriftDeserialize(Class c, byte[] b, int offset, int length) {
+        try {
+            T ret = (T) c.newInstance();
+            TDeserializer des = getDes();
+            des.deserialize((TBase)ret, b, offset, length);
+            return ret;
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    public static <T> T thriftDeserialize(Class c, byte[] b) {
+        try {
+            T ret = (T) c.newInstance();
+            TDeserializer des = getDes();
+            des.deserialize((TBase) ret, b);
+            return ret;
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+
+    }
+    
     public static byte[] javaSerialize(Object obj) {
         try {
             ByteArrayOutputStream bos = new ByteArrayOutputStream();

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/org/apache/storm/pacemaker/IServerMessageHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/org/apache/storm/pacemaker/IServerMessageHandler.java b/storm-core/src/jvm/org/apache/storm/pacemaker/IServerMessageHandler.java
new file mode 100644
index 0000000..14dd4ad
--- /dev/null
+++ b/storm-core/src/jvm/org/apache/storm/pacemaker/IServerMessageHandler.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.pacemaker;
+
+import backtype.storm.generated.HBMessage;
+
+public interface IServerMessageHandler {
+
+    public HBMessage handleMessage(HBMessage m, boolean authenticated);
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerClient.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerClient.java b/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerClient.java
new file mode 100644
index 0000000..c144982
--- /dev/null
+++ b/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerClient.java
@@ -0,0 +1,252 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.pacemaker;
+
+import backtype.storm.Config;
+import backtype.storm.generated.HBMessage;
+import backtype.storm.messaging.netty.Client;
+import backtype.storm.messaging.netty.ISaslClient;
+import backtype.storm.messaging.netty.NettyRenameThreadFactory;
+import backtype.storm.security.auth.AuthUtils;
+import backtype.storm.utils.StormBoundedExponentialBackoffRetry;
+import java.net.InetSocketAddress;
+import java.util.Map;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
+import javax.security.auth.login.Configuration;
+import org.apache.storm.pacemaker.codec.ThriftNettyClientCodec;
+import org.jboss.netty.bootstrap.ClientBootstrap;
+import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class PacemakerClient implements ISaslClient {
+
+    private static final Logger LOG = LoggerFactory.getLogger(PacemakerClient.class);
+
+    private String topo_name;
+    private String secret;
+    private boolean ready = false;
+    private final ClientBootstrap bootstrap;
+    private AtomicReference<Channel> channelRef;
+    private AtomicBoolean closing;
+    private InetSocketAddress remote_addr;
+    private int maxPending = 100;
+    private HBMessage messages[];
+    private LinkedBlockingQueue<Integer> availableMessageSlots;
+    private ThriftNettyClientCodec.AuthMethod authMethod;
+
+    private StormBoundedExponentialBackoffRetry backoff = new StormBoundedExponentialBackoffRetry(100, 5000, 20);
+    private int retryTimes = 0;
+    
+    public PacemakerClient(Map config) {
+
+        String host = (String)config.get(Config.PACEMAKER_HOST);
+        int port = (int)config.get(Config.PACEMAKER_PORT);
+        topo_name = (String)config.get(Config.TOPOLOGY_NAME);
+        if(topo_name == null) {
+            topo_name = "pacemaker-client";
+        }
+
+        String auth = (String)config.get(Config.PACEMAKER_AUTH_METHOD);
+        ThriftNettyClientCodec.AuthMethod authMethod;
+
+        if(auth.equals("DIGEST")) {
+
+            Configuration login_conf = AuthUtils.GetConfiguration(config);
+            authMethod = ThriftNettyClientCodec.AuthMethod.DIGEST;
+            secret = AuthUtils.makeDigestPayload(login_conf, AuthUtils.LOGIN_CONTEXT_PACEMAKER_DIGEST);
+            if(secret == null) {
+                LOG.error("Can't start pacemaker server without digest secret.");
+                throw new RuntimeException("Can't start pacemaker server without digest secret.");
+            }
+
+        }
+        else if(auth.equals("KERBEROS")) {
+            authMethod = ThriftNettyClientCodec.AuthMethod.KERBEROS;
+        }
+        else {
+            if(!auth.equals("NONE")) {
+                LOG.warn("Invalid auth scheme: '{}'. Falling back to 'NONE'", auth);
+            }
+            
+            authMethod = ThriftNettyClientCodec.AuthMethod.NONE;
+        }
+
+        closing = new AtomicBoolean(false);
+        channelRef = new AtomicReference<Channel>(null);
+        setupMessaging();
+
+        ThreadFactory bossFactory = new NettyRenameThreadFactory("client-boss");
+        ThreadFactory workerFactory = new NettyRenameThreadFactory("client-worker");
+        NioClientSocketChannelFactory factory =
+            new NioClientSocketChannelFactory(Executors.newCachedThreadPool(bossFactory),
+                                              Executors.newCachedThreadPool(workerFactory));
+        bootstrap = new ClientBootstrap(factory);
+        bootstrap.setOption("tcpNoDelay", true);
+        bootstrap.setOption("sendBufferSize", 5242880);
+        bootstrap.setOption("keepAlive", true);
+
+        remote_addr = new InetSocketAddress(host, port);
+        ChannelPipelineFactory pipelineFactory = new ThriftNettyClientCodec(this, config, authMethod).pipelineFactory();
+        bootstrap.setPipelineFactory(pipelineFactory);
+        bootstrap.connect(remote_addr);
+    }
+
+    private void setupMessaging() {
+        messages = new HBMessage[maxPending];
+        availableMessageSlots = new LinkedBlockingQueue<Integer>();
+        for(int i = 0; i < maxPending; i++) {
+            availableMessageSlots.add(i);
+        }
+    }
+
+    public synchronized void channelConnected(Channel channel) {
+        LOG.debug("Channel is connected: {}", channel.toString());
+        channelRef.set(channel);
+
+        //If we're not going to authenticate, we can begin sending.
+        if(authMethod == ThriftNettyClientCodec.AuthMethod.NONE) {
+            ready = true;
+            this.notifyAll();
+        }
+        retryTimes = 0;
+    }
+
+    public synchronized void channelReady() {
+        LOG.debug("Channel is ready.");
+        ready = true;
+        this.notifyAll();
+    }
+
+    public String name() {
+        return topo_name;
+    }
+
+    public String secretKey() {
+        return secret;
+    }
+
+    public HBMessage send(HBMessage m) {
+        // Wait for 'ready' (channel connected and maybe authentication)
+        if(!ready) {
+            synchronized(this) {
+                if(!ready) {
+                    LOG.debug("Waiting for netty channel to be ready.");
+                    try {
+                        this.wait(1000);
+                        if(!ready) {
+                            throw new RuntimeException("Timed out waiting for channel ready.");
+                        }
+                    } catch (java.lang.InterruptedException e) {
+                        throw new RuntimeException(e);
+                    }
+                }
+            }
+        }
+
+        LOG.debug("Sending message: {}", m.toString());
+        try {
+
+            int next = availableMessageSlots.take();
+            synchronized (m) {
+                m.set_message_id(next);
+                messages[next] = m;
+                LOG.debug("Put message in slot: {}", Integer.toString(next));
+                do {
+                    channelRef.get().write(m);
+                    m.wait(1000);
+                } while (messages[next] == m);
+            }
+
+            HBMessage ret = messages[next];
+            if(ret == null) {
+                // This can happen if we lost the connection and subsequently reconnected or timed out.
+                send(m);
+            }
+            messages[next] = null;
+            LOG.debug("Got Response: {}", ret);
+            return ret;
+        }
+        catch (InterruptedException e) {
+            LOG.error("PacemakerClient send interrupted: ", e);
+            throw new RuntimeException(e);
+        }
+    }
+
+    public void gotMessage(HBMessage m) {
+        int message_id = m.get_message_id();
+        if(message_id >=0 && message_id < maxPending) {
+            
+            LOG.debug("Pacemaker Client got message: {}", m.toString());
+            HBMessage request = messages[message_id];
+            
+            if(request == null) {
+                LOG.debug("No message for slot: {}", Integer.toString(message_id));
+            }
+            else {
+                synchronized(request) {
+                    messages[message_id] = m;
+                    request.notifyAll();
+                    availableMessageSlots.add(message_id);
+                }
+            }
+        }
+        else {
+            LOG.error("Got Message with bad id: {}", m.toString());
+        }
+    }
+
+    public void reconnect() {
+        final PacemakerClient client = this;
+        Timer t = new Timer(true);
+        t.schedule(new TimerTask() {
+                public void run() {
+                    client.doReconnect();
+                }
+            },
+            backoff.getSleepTimeMs(retryTimes++, 0));
+        ready = false;
+        setupMessaging();
+    }
+
+    public synchronized void doReconnect() {
+        close_channel();
+        if(closing.get()) return;
+        bootstrap.connect(remote_addr);
+    }
+
+    synchronized void close_channel() {
+        if (channelRef.get() != null) {
+            channelRef.get().close();
+            LOG.debug("channel {} closed", remote_addr);
+            channelRef.set(null);
+        }
+    }
+
+    public void close() {
+        close_channel();
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerClientHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerClientHandler.java b/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerClientHandler.java
new file mode 100644
index 0000000..02cfde6
--- /dev/null
+++ b/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerClientHandler.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.pacemaker;
+
+import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.channel.ChannelStateEvent;
+import org.jboss.netty.channel.ExceptionEvent;
+import org.jboss.netty.channel.MessageEvent;
+import org.jboss.netty.channel.Channel;
+import backtype.storm.generated.HBMessage;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import backtype.storm.messaging.netty.ControlMessage;
+
+public class PacemakerClientHandler extends SimpleChannelUpstreamHandler {
+    private static final Logger LOG = LoggerFactory.getLogger(PacemakerClientHandler.class);
+
+    private PacemakerClient client;
+
+
+
+    public PacemakerClientHandler(PacemakerClient client) {
+        this.client = client;
+    }
+
+    @Override
+    public void channelConnected(ChannelHandlerContext ctx,
+                                 ChannelStateEvent event) {
+        // register the newly established channel
+        Channel channel = ctx.getChannel();
+        client.channelConnected(channel);
+
+        LOG.info("Connection established from {} to {}",
+                 channel.getLocalAddress(), channel.getRemoteAddress());
+    }
+
+    @Override
+    public void messageReceived(ChannelHandlerContext ctx, MessageEvent event) {
+        LOG.debug("Got Message: {}", event.getMessage().toString());
+        Object evm = event.getMessage();
+
+        if(evm instanceof ControlMessage) {
+            LOG.debug("Got control message: {}", evm.toString());
+            return;
+        }
+        else if(evm instanceof HBMessage) {
+            client.gotMessage((HBMessage)evm);
+        }
+        else {
+            LOG.warn("Got unexpected message: {} from server.", evm);
+        }
+    }
+
+    @Override
+    public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent event) {
+        LOG.error("Connection to pacemaker failed", event.getCause());
+        client.reconnect();
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerServer.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerServer.java b/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerServer.java
new file mode 100644
index 0000000..adba9ab
--- /dev/null
+++ b/storm-core/src/jvm/org/apache/storm/pacemaker/PacemakerServer.java
@@ -0,0 +1,158 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.pacemaker;
+
+import backtype.storm.Config;
+import backtype.storm.generated.HBMessage;
+import backtype.storm.messaging.netty.ISaslServer;
+import backtype.storm.messaging.netty.NettyRenameThreadFactory;
+import backtype.storm.security.auth.AuthUtils;
+import java.lang.InterruptedException;
+import java.net.InetSocketAddress;
+import java.util.Map;
+import java.util.concurrent.ConcurrentSkipListSet;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ThreadFactory;
+import javax.security.auth.login.Configuration;
+import org.apache.storm.pacemaker.codec.ThriftNettyServerCodec;
+import org.jboss.netty.bootstrap.ServerBootstrap;
+import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.channel.group.ChannelGroup;
+import org.jboss.netty.channel.group.DefaultChannelGroup;
+import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+class PacemakerServer implements ISaslServer {
+
+    private static final long FIVE_MB_IN_BYTES = 5 * 1024 * 1024;
+
+    private static final Logger LOG = LoggerFactory.getLogger(PacemakerServer.class);
+
+    private final ServerBootstrap bootstrap;
+    private int port;
+    private IServerMessageHandler handler;
+    private String secret;
+    private String topo_name;
+    private volatile ChannelGroup allChannels = new DefaultChannelGroup("storm-server");
+    private ConcurrentSkipListSet<Channel> authenticated_channels = new ConcurrentSkipListSet<Channel>();
+    private ThriftNettyServerCodec.AuthMethod authMethod;
+
+    public PacemakerServer(IServerMessageHandler handler, Map config){
+        int maxWorkers = (int)config.get(Config.PACEMAKER_MAX_THREADS);
+        this.port = (int)config.get(Config.PACEMAKER_PORT);
+        this.handler = handler;
+        this.topo_name = "pacemaker_server";
+
+        String auth = (String)config.get(Config.PACEMAKER_AUTH_METHOD);
+        if(auth.equals("DIGEST")) {
+            Configuration login_conf = AuthUtils.GetConfiguration(config);
+            authMethod = ThriftNettyServerCodec.AuthMethod.DIGEST;
+            this.secret = AuthUtils.makeDigestPayload(login_conf, AuthUtils.LOGIN_CONTEXT_PACEMAKER_DIGEST);
+            if(this.secret == null) {
+                LOG.error("Can't start pacemaker server without digest secret.");
+                throw new RuntimeException("Can't start pacemaker server without digest secret.");
+            }
+        }
+        else if(auth.equals("KERBEROS")) {
+            authMethod = ThriftNettyServerCodec.AuthMethod.KERBEROS;
+        }
+        else if(auth.equals("NONE")) {
+            authMethod = ThriftNettyServerCodec.AuthMethod.NONE;
+        }
+        else {
+            LOG.error("Can't start pacemaker server without proper PACEMAKER_AUTH_METHOD.");
+            throw new RuntimeException("Can't start pacemaker server without proper PACEMAKER_AUTH_METHOD.");
+        }
+
+        ThreadFactory bossFactory = new NettyRenameThreadFactory("server-boss");
+        ThreadFactory workerFactory = new NettyRenameThreadFactory("server-worker");
+        NioServerSocketChannelFactory factory;
+        if(maxWorkers > 0) {
+            factory =
+                new NioServerSocketChannelFactory(Executors.newCachedThreadPool(bossFactory),
+                                                  Executors.newCachedThreadPool(workerFactory),
+                                                  maxWorkers);
+        }
+        else {
+            factory =
+                new NioServerSocketChannelFactory(Executors.newCachedThreadPool(bossFactory),
+                                                  Executors.newCachedThreadPool(workerFactory));
+        }
+
+        bootstrap = new ServerBootstrap(factory);
+        bootstrap.setOption("tcpNoDelay", true);
+        bootstrap.setOption("sendBufferSize", FIVE_MB_IN_BYTES);
+        bootstrap.setOption("keepAlive", true);
+
+        ChannelPipelineFactory pipelineFactory = new ThriftNettyServerCodec(this, config, authMethod).pipelineFactory();
+        bootstrap.setPipelineFactory(pipelineFactory);
+        Channel channel = bootstrap.bind(new InetSocketAddress(port));
+        allChannels.add(channel);
+        LOG.info("Bound server to port: {}", Integer.toString(port));
+    }
+
+    /** Implementing IServer. **/
+    public void channelConnected(Channel c) {
+        allChannels.add(c);
+    }
+
+    public void cleanPipeline(Channel channel) {
+        boolean authenticated = authenticated_channels.contains(channel);
+        if(!authenticated) {       
+            if(channel.getPipeline().get(ThriftNettyServerCodec.SASL_HANDLER) != null) {
+                channel.getPipeline().remove(ThriftNettyServerCodec.SASL_HANDLER);
+            }
+            else if(channel.getPipeline().get(ThriftNettyServerCodec.KERBEROS_HANDLER) != null) {
+                channel.getPipeline().remove(ThriftNettyServerCodec.KERBEROS_HANDLER);
+            }
+        }
+    }
+    
+    public void received(Object mesg, String remote, Channel channel) throws InterruptedException {
+        cleanPipeline(channel);
+        
+        boolean authenticated = (authMethod == ThriftNettyServerCodec.AuthMethod.NONE) || authenticated_channels.contains(channel);
+        HBMessage m = (HBMessage)mesg;
+        LOG.debug("received message. Passing to handler. {} : {} : {}",
+                  handler.toString(), m.toString(), channel.toString());
+        HBMessage response = handler.handleMessage(m, authenticated);
+        LOG.debug("Got Response from handler: {}", response.toString());
+        channel.write(response);
+    }
+
+    public void closeChannel(Channel c) {
+        c.close().awaitUninterruptibly();
+        allChannels.remove(c);
+        authenticated_channels.remove(c);
+    }
+
+    public String name() {
+        return topo_name;
+    }
+
+    public String secretKey() {
+        return secret;
+    }
+
+    public void authenticated(Channel c) {
+        LOG.debug("Pacemaker server authenticated channel: {}", c.toString());
+        authenticated_channels.add(c);
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftDecoder.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftDecoder.java b/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftDecoder.java
new file mode 100644
index 0000000..c964df2
--- /dev/null
+++ b/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftDecoder.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.pacemaker.codec;
+
+import org.jboss.netty.handler.codec.frame.FrameDecoder;
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.channel.Channel;
+import backtype.storm.generated.HBMessage;
+import backtype.storm.generated.HBMessageData;
+import backtype.storm.generated.HBServerMessageType;
+import org.jboss.netty.buffer.ChannelBuffer;
+import backtype.storm.utils.Utils;
+import backtype.storm.messaging.netty.ControlMessage;
+import backtype.storm.messaging.netty.SaslMessageToken;
+
+public class ThriftDecoder extends FrameDecoder {
+
+    @Override
+    protected Object decode(ChannelHandlerContext ctx, Channel channel, ChannelBuffer buf) throws Exception {
+
+        long available = buf.readableBytes();
+        if(available < 2) {
+            return null;
+        }
+
+        buf.markReaderIndex();
+
+        int thriftLen = buf.readInt();
+        available -= 4;
+
+        if(available < thriftLen) {
+            // We haven't received the entire object yet, return and wait for more bytes.
+            buf.resetReaderIndex();
+            return null;
+        }
+
+        buf.discardReadBytes();
+
+        HBMessage m;
+        if(buf.hasArray()) {
+            m = (HBMessage)Utils.thriftDeserialize(HBMessage.class, buf.array(), 0, thriftLen);
+            buf.readerIndex(buf.readerIndex() + thriftLen);
+        }
+        else {
+            byte serialized[] = new byte[thriftLen];
+            buf.readBytes(serialized, 0, thriftLen);
+            m = (HBMessage)Utils.thriftDeserialize(HBMessage.class, serialized);
+        }
+
+        if(m.get_type() == HBServerMessageType.CONTROL_MESSAGE) {
+            ControlMessage cm = ControlMessage.read(m.get_data().get_message_blob());
+            return cm;
+        }
+        else if(m.get_type() == HBServerMessageType.SASL_MESSAGE_TOKEN) {
+            SaslMessageToken sm = SaslMessageToken.read(m.get_data().get_message_blob());
+            return sm;
+        }
+        else {
+            return m;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftEncoder.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftEncoder.java b/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftEncoder.java
new file mode 100644
index 0000000..fb2c5be
--- /dev/null
+++ b/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftEncoder.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.pacemaker.codec;
+
+import org.jboss.netty.handler.codec.oneone.OneToOneEncoder;
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.channel.Channel;
+import backtype.storm.generated.HBMessage;
+import backtype.storm.generated.HBMessageData;
+import backtype.storm.generated.HBServerMessageType;
+import org.jboss.netty.buffer.ChannelBuffers;
+import org.jboss.netty.buffer.ChannelBuffer;
+import backtype.storm.utils.Utils;
+import backtype.storm.messaging.netty.ControlMessage;
+import backtype.storm.messaging.netty.SaslMessageToken;
+import backtype.storm.messaging.netty.INettySerializable;
+import java.io.IOException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.thrift.TBase;
+
+public class ThriftEncoder extends OneToOneEncoder {
+
+    private static final Logger LOG = LoggerFactory
+        .getLogger(ThriftEncoder.class);
+
+    private HBMessage encodeNettySerializable(INettySerializable netty_message,
+                                              HBServerMessageType mType) {
+        
+        HBMessageData message_data = new HBMessageData();
+        HBMessage m = new HBMessage();
+        try {
+            ChannelBuffer cbuffer = netty_message.buffer();
+            if(cbuffer.hasArray()) {
+                message_data.set_message_blob(cbuffer.array());
+            }
+            else {
+                byte buff[] = new byte[netty_message.encodeLength()];
+                cbuffer.readBytes(buff, 0, netty_message.encodeLength());
+                message_data.set_message_blob(buff);
+            }
+            m.set_type(mType);
+            m.set_data(message_data);
+            return m;
+        }
+        catch( IOException e) {
+            LOG.error("Failed to encode NettySerializable: ", e);
+            throw new RuntimeException(e);
+        }
+    }
+
+    @Override
+    protected Object encode(ChannelHandlerContext ctx, Channel channel, Object msg) {
+        if(msg == null) return null;
+
+        LOG.debug("Trying to encode: " + msg.getClass().toString() + " : " + msg.toString());
+
+        HBMessage m;
+        if(msg instanceof INettySerializable) {
+            INettySerializable nettyMsg = (INettySerializable)msg;
+
+            HBServerMessageType type;
+            if(msg instanceof ControlMessage) {
+                type = HBServerMessageType.CONTROL_MESSAGE;
+            }
+            else if(msg instanceof SaslMessageToken) {
+                type = HBServerMessageType.SASL_MESSAGE_TOKEN;
+            }
+            else {
+                LOG.error("Didn't recognise INettySerializable: " + nettyMsg.toString());
+                throw new RuntimeException("Unrecognized INettySerializable.");
+            }
+            m = encodeNettySerializable(nettyMsg, type);
+        }
+        else {
+            m = (HBMessage)msg;
+        }
+
+        try {
+            byte serialized[] = Utils.thriftSerialize((TBase)m);
+            ChannelBuffer ret = ChannelBuffers.directBuffer(serialized.length + 4);
+
+            ret.writeInt(serialized.length);
+            ret.writeBytes(serialized);
+
+            return ret;
+        }
+        catch (RuntimeException e) {
+            LOG.error("Failed to serialize.", e);
+            throw e;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftNettyClientCodec.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftNettyClientCodec.java b/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftNettyClientCodec.java
new file mode 100644
index 0000000..5ba90fd
--- /dev/null
+++ b/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftNettyClientCodec.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.pacemaker.codec;
+
+import backtype.storm.messaging.netty.KerberosSaslClientHandler;
+import backtype.storm.messaging.netty.SaslStormClientHandler;
+import backtype.storm.security.auth.AuthUtils;
+import java.io.IOException;
+import java.util.Map;
+import org.apache.storm.pacemaker.PacemakerClient;
+import org.apache.storm.pacemaker.PacemakerClientHandler;
+import org.jboss.netty.channel.ChannelPipeline;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.channel.Channels;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class ThriftNettyClientCodec {
+
+    public static final String SASL_HANDLER = "sasl-handler";
+    public static final String KERBEROS_HANDLER = "kerberos-handler";
+    
+    public enum AuthMethod {
+        DIGEST,
+        KERBEROS,
+        NONE
+    };
+
+    private static final Logger LOG = LoggerFactory
+        .getLogger(ThriftNettyClientCodec.class);
+
+    private PacemakerClient client;
+    private AuthMethod authMethod;
+    private Map storm_conf;
+
+    public ThriftNettyClientCodec(PacemakerClient pacemaker_client, Map storm_conf, AuthMethod authMethod) {
+        client = pacemaker_client;
+        this.authMethod = authMethod;
+        this.storm_conf = storm_conf;
+    }
+
+    public ChannelPipelineFactory pipelineFactory() {
+        return new ChannelPipelineFactory() {
+            public ChannelPipeline getPipeline() {
+                ChannelPipeline pipeline = Channels.pipeline();
+                pipeline.addLast("encoder", new ThriftEncoder());
+                pipeline.addLast("decoder", new ThriftDecoder());
+
+                if (authMethod == AuthMethod.KERBEROS) {
+                    try {
+                        LOG.debug("Adding KerberosSaslClientHandler to pacemaker client pipeline.");
+                        pipeline.addLast(KERBEROS_HANDLER,
+                                         new KerberosSaslClientHandler(client,
+                                                                       storm_conf,
+                                                                       AuthUtils.LOGIN_CONTEXT_PACEMAKER_CLIENT));
+                    }
+                    catch (IOException e) {
+                        throw new RuntimeException(e);
+                    }
+                }
+                else if(authMethod == AuthMethod.DIGEST) {
+                    try {
+                        LOG.debug("Adding SaslStormClientHandler to pacemaker client pipeline.");
+                        pipeline.addLast(SASL_HANDLER, new SaslStormClientHandler(client));
+                    }
+                    catch (IOException e) {
+                        throw new RuntimeException(e);
+                    }
+                }
+                else {
+                    client.channelReady();
+                }
+
+                pipeline.addLast("PacemakerClientHandler", new PacemakerClientHandler(client));
+                return pipeline;
+            }
+        };
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftNettyServerCodec.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftNettyServerCodec.java b/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftNettyServerCodec.java
new file mode 100644
index 0000000..7d3018d
--- /dev/null
+++ b/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftNettyServerCodec.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.storm.pacemaker.codec;
+
+import backtype.storm.Config;
+import backtype.storm.messaging.netty.ISaslServer;
+import backtype.storm.messaging.netty.IServer;
+import backtype.storm.messaging.netty.KerberosSaslServerHandler;
+import backtype.storm.messaging.netty.SaslStormServerHandler;
+import backtype.storm.messaging.netty.StormServerHandler;
+import backtype.storm.security.auth.AuthUtils;
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import org.jboss.netty.channel.ChannelPipeline;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.channel.Channels;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class ThriftNettyServerCodec {
+
+    public static final String SASL_HANDLER = "sasl-handler";
+    public static final String KERBEROS_HANDLER = "kerberos-handler";
+    
+    public enum AuthMethod {
+        DIGEST,
+        KERBEROS,
+        NONE
+    };
+    
+    private IServer server;
+    private AuthMethod authMethod;
+    private Map storm_conf;
+    
+    private static final Logger LOG = LoggerFactory
+        .getLogger(ThriftNettyServerCodec.class);
+
+    public ThriftNettyServerCodec(IServer server, Map storm_conf, AuthMethod authMethod) {
+        this.server = server;
+        this.authMethod = authMethod;
+        this.storm_conf = storm_conf;
+    }
+
+    public ChannelPipelineFactory pipelineFactory() {
+        return new ChannelPipelineFactory() {
+            public ChannelPipeline getPipeline() {
+
+                ChannelPipeline pipeline = Channels.pipeline();
+                pipeline.addLast("encoder", new ThriftEncoder());
+                pipeline.addLast("decoder", new ThriftDecoder());
+                if(authMethod == AuthMethod.DIGEST) {
+                    try {
+                        LOG.debug("Adding SaslStormServerHandler to pacemaker server pipeline.");
+                        pipeline.addLast(SASL_HANDLER, new SaslStormServerHandler((ISaslServer)server));
+                    }
+                    catch (IOException e) {
+                        throw new RuntimeException(e);
+                    }
+                }
+                else if(authMethod == AuthMethod.KERBEROS) {
+                    try {
+                        LOG.debug("Adding KerberosSaslServerHandler to pacemaker server pipeline.");
+                        pipeline.addLast(KERBEROS_HANDLER, new KerberosSaslServerHandler((ISaslServer)server,
+                                                                                         storm_conf,
+                                                                                         AuthUtils.LOGIN_CONTEXT_PACEMAKER_SERVER,
+                                                                                         (List)storm_conf.get(Config.PACEMAKER_KERBEROS_USERS)));
+                    }
+                    catch (IOException e) {
+                        throw new RuntimeException(e);
+                    }
+                }
+                else if(authMethod == AuthMethod.NONE) {
+                    LOG.debug("Not authenticating any clients. AuthMethod is NONE");
+                }
+
+                pipeline.addLast("handler", new StormServerHandler(server));
+                return pipeline;
+            }
+        };
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/storm/trident/util/TridentUtils.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/storm/trident/util/TridentUtils.java b/storm-core/src/jvm/storm/trident/util/TridentUtils.java
index 0cdec0a..214bcf2 100644
--- a/storm-core/src/jvm/storm/trident/util/TridentUtils.java
+++ b/storm-core/src/jvm/storm/trident/util/TridentUtils.java
@@ -30,11 +30,10 @@ import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import org.apache.thrift.TBase;
-import org.apache.thrift.TDeserializer;
-import org.apache.thrift.TException;
-import org.apache.thrift.TSerializer;
 import org.jgrapht.DirectedGraph;
 
+import backtype.storm.utils.Utils;
+
 public class TridentUtils {
     public static Fields fieldsUnion(Fields... fields) {
         Set<String> ret = new HashSet<String>();
@@ -108,35 +107,11 @@ public class TridentUtils {
         return parents.get(0);
     }
     
-    private static ThreadLocal<TSerializer> threadSer = new ThreadLocal<TSerializer>();
-    private static ThreadLocal<TDeserializer> threadDes = new ThreadLocal<TDeserializer>();
-    
     public static byte[] thriftSerialize(TBase t) {
-        try {
-            TSerializer ser = threadSer.get();
-            if (ser == null) {
-                ser = new TSerializer();
-                threadSer.set(ser);
-            } 
-            return ser.serialize(t);
-        } catch (TException e) {
-            throw new RuntimeException(e);
-        }
+        return Utils.thriftSerialize(t);
     }
 
     public static <T> T thriftDeserialize(Class c, byte[] b) {
-        try {
-            T ret = (T) c.newInstance();
-            TDeserializer des = threadDes.get();
-            if (des == null) {
-                des = new TDeserializer();
-                threadDes.set(des);
-            }
-            des.deserialize((TBase) ret, b);
-            return ret;
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-        
+        return Utils.thriftDeserialize(c,b);
     }
 }


[14/37] storm git commit: More fixes.

Posted by kn...@apache.org.
More fixes.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/25aae7b9
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/25aae7b9
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/25aae7b9

Branch: refs/heads/master
Commit: 25aae7b9efa7943b924806e3d6b5b2bd97b92f51
Parents: 3147955
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Mon Nov 9 16:53:42 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Mon Nov 9 16:53:42 2015 -0600

----------------------------------------------------------------------
 storm-core/src/genthrift.sh                                  | 0
 .../storm/messaging/netty/KerberosSaslNettyClient.java       | 8 --------
 2 files changed, 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/25aae7b9/storm-core/src/genthrift.sh
----------------------------------------------------------------------
diff --git a/storm-core/src/genthrift.sh b/storm-core/src/genthrift.sh
old mode 100755
new mode 100644

http://git-wip-us.apache.org/repos/asf/storm/blob/25aae7b9/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
index e540a4c..1295394 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
@@ -187,14 +187,6 @@ public class KerberosSaslNettyClient {
     private static class SaslClientCallbackHandler implements CallbackHandler {
 
         /**
-         * Set private members using topology token.
-         *
-         * @param topologyToken
-         */
-        public SaslClientCallbackHandler() {
-        }
-
-        /**
          * Implementation used to respond to SASL tokens from server.
          *
          * @param callbacks


[15/37] storm git commit: More fixes.

Posted by kn...@apache.org.
More fixes.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/80c60d84
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/80c60d84
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/80c60d84

Branch: refs/heads/master
Commit: 80c60d84df16ce35f750f981526e57cfef18e48f
Parents: 25aae7b
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Mon Nov 9 16:56:05 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Mon Nov 9 16:56:05 2015 -0600

----------------------------------------------------------------------
 .../backtype/storm/messaging/netty/KerberosSaslClientHandler.java | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/80c60d84/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslClientHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslClientHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslClientHandler.java
index ee0e41d..698800d 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslClientHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslClientHandler.java
@@ -145,8 +145,7 @@ public class KerberosSaslClientHandler extends SimpleChannelUpstreamHandler {
             // server.
             SaslMessageToken saslResponse = new SaslMessageToken(responseToServer);
             channel.write(saslResponse);
-        }
-        else {
+        } else {
             LOG.error("Unexpected message from server: {}", event.getMessage());
         }
     }


[34/37] storm git commit: Adding config validation for pacemaker auth.

Posted by kn...@apache.org.
Adding config validation for pacemaker auth.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/9bfb26c8
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/9bfb26c8
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/9bfb26c8

Branch: refs/heads/master
Commit: 9bfb26c840b0b56bb12cd857645625b96e9102e7
Parents: c2f1da0
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Mon Nov 23 13:52:10 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Mon Nov 23 13:52:10 2015 -0600

----------------------------------------------------------------------
 storm-core/src/jvm/backtype/storm/Config.java   |  2 +-
 .../storm/validation/ConfigValidation.java      | 20 +++++++++++++++++++-
 .../jvm/backtype/storm/TestConfigValidate.java  | 18 ++++++++++++++++++
 3 files changed, 38 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/9bfb26c8/storm-core/src/jvm/backtype/storm/Config.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/Config.java b/storm-core/src/jvm/backtype/storm/Config.java
index ee7533b..13102b9 100644
--- a/storm-core/src/jvm/backtype/storm/Config.java
+++ b/storm-core/src/jvm/backtype/storm/Config.java
@@ -829,7 +829,7 @@ public class Config extends HashMap<String, Object> {
      * DIGEST or KERBEROS, the client can only write to the server (no reads).
      * This is intended to provide a primitive form of access-control.
      */
-    @isString
+    @CustomValidator(validatorClass=PacemakerAuthTypeValidator.class)
     public static final String PACEMAKER_AUTH_METHOD = "pacemaker.auth.method";
     
     /**

http://git-wip-us.apache.org/repos/asf/storm/blob/9bfb26c8/storm-core/src/jvm/backtype/storm/validation/ConfigValidation.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/validation/ConfigValidation.java b/storm-core/src/jvm/backtype/storm/validation/ConfigValidation.java
index 4e4a4f4..44ec967 100644
--- a/storm-core/src/jvm/backtype/storm/validation/ConfigValidation.java
+++ b/storm-core/src/jvm/backtype/storm/validation/ConfigValidation.java
@@ -477,6 +477,24 @@ public class ConfigValidation {
         }
     }
 
+    public static class PacemakerAuthTypeValidator extends Validator {
+
+        @Override
+        public void validateField(String name, Object o) {
+            if(o == null) {
+                throw new IllegalArgumentException( "Field " + name + " must be set.");
+            }
+
+            if(o instanceof String &&
+               (((String)o).equals("NONE") ||
+                ((String)o).equals("DIGEST") ||
+                ((String)o).equals("KERBEROS"))) {
+                return;
+            }
+            throw new IllegalArgumentException( "Field " + name + " must be one of \"NONE\", \"DIGEST\", or \"KERBEROS\"");
+        }
+    }
+
     /**
      * Methods for validating confs
      */
@@ -625,4 +643,4 @@ public class ConfigValidation {
         }
         return true;
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/9bfb26c8/storm-core/test/jvm/backtype/storm/TestConfigValidate.java
----------------------------------------------------------------------
diff --git a/storm-core/test/jvm/backtype/storm/TestConfigValidate.java b/storm-core/test/jvm/backtype/storm/TestConfigValidate.java
index 0f53634..7f193cc 100644
--- a/storm-core/test/jvm/backtype/storm/TestConfigValidate.java
+++ b/storm-core/test/jvm/backtype/storm/TestConfigValidate.java
@@ -41,6 +41,24 @@ public class TestConfigValidate {
     private static final Logger LOG = LoggerFactory.getLogger(TestConfigValidate.class);
 
     @Test
+    public void validPacemakerAuthTest() throws InstantiationException, IllegalAccessException, NoSuchFieldException, NoSuchMethodException, InvocationTargetException {
+        Map<String, Object> conf = new HashMap<String, Object>();
+        conf.put(Config.PACEMAKER_AUTH_METHOD, "NONE");
+        ConfigValidation.validateFields(conf);
+        conf.put(Config.PACEMAKER_AUTH_METHOD, "DIGEST");
+        ConfigValidation.validateFields(conf);
+        conf.put(Config.PACEMAKER_AUTH_METHOD, "KERBEROS");
+        ConfigValidation.validateFields(conf);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void invalidPacemakerAuthTest() throws InstantiationException, IllegalAccessException, NoSuchFieldException, NoSuchMethodException, InvocationTargetException {
+        Map<String, Object> conf = new HashMap<String, Object>();
+        conf.put(Config.PACEMAKER_AUTH_METHOD, "invalid");
+        ConfigValidation.validateFields(conf);
+    }
+    
+    @Test
     public void validConfigTest() throws InstantiationException, IllegalAccessException, NoSuchFieldException, NoSuchMethodException, InvocationTargetException {
 
 


[26/37] storm git commit: Minor tweaks to documentation.

Posted by kn...@apache.org.
Minor tweaks to documentation.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/8c1ad3fb
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/8c1ad3fb
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/8c1ad3fb

Branch: refs/heads/master
Commit: 8c1ad3fb374f1f6311b9853c1c18820834e1f204
Parents: 37768ef
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Tue Nov 17 15:50:38 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Tue Nov 17 15:50:38 2015 -0600

----------------------------------------------------------------------
 docs/documentation/Pacemaker.md               | 2 +-
 storm-core/src/jvm/backtype/storm/Config.java | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/8c1ad3fb/docs/documentation/Pacemaker.md
----------------------------------------------------------------------
diff --git a/docs/documentation/Pacemaker.md b/docs/documentation/Pacemaker.md
index 06f2fa1..a9df08d 100644
--- a/docs/documentation/Pacemaker.md
+++ b/docs/documentation/Pacemaker.md
@@ -14,7 +14,7 @@ The corresponding Pacemaker client is a plugin for the `ClusterState` interface,
  - `pacemaker.host` : The host that the Pacemaker daemon is running on
  - `pacemaker.port` : The port that Pacemaker will listen on
  - `pacemaker.max.threads` : Maximum number of threads Pacemaker daemon will use to handle requests.
- - `pacemaker.childopts` : Any JVM parameters that need to go to the Pacemaker.
+ - `pacemaker.childopts` : Any JVM parameters that need to go to the Pacemaker. (used by storm-deploy project)
  - `pacemaker.auth.method` : The authentication method that is used (more info below)
 
 #### Example

http://git-wip-us.apache.org/repos/asf/storm/blob/8c1ad3fb/storm-core/src/jvm/backtype/storm/Config.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/Config.java b/storm-core/src/jvm/backtype/storm/Config.java
index 7e441c5..ee7533b 100644
--- a/storm-core/src/jvm/backtype/storm/Config.java
+++ b/storm-core/src/jvm/backtype/storm/Config.java
@@ -816,7 +816,7 @@ public class Config extends HashMap<String, Object> {
 
     /**
      * This parameter is used by the storm-deploy project to configure the
-     * jvm options for the nimbus daemon.
+     * jvm options for the pacemaker daemon.
      */
     @isString
     public static final String PACEMAKER_CHILDOPTS = "pacemaker.childopts";


[05/37] storm git commit: PACEMAKER OPEN SOURCE!

Posted by kn...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/ClusterWorkerHeartbeat.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ClusterWorkerHeartbeat.java b/storm-core/src/jvm/backtype/storm/generated/ClusterWorkerHeartbeat.java
index 6c84d4c..52f2fe9 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ClusterWorkerHeartbeat.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ClusterWorkerHeartbeat.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class ClusterWorkerHeartbeat implements org.apache.thrift.TBase<ClusterWorkerHeartbeat, ClusterWorkerHeartbeat._Fields>, java.io.Serializable, Cloneable, Comparable<ClusterWorkerHeartbeat> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ClusterWorkerHeartbeat");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/CommonAggregateStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/CommonAggregateStats.java b/storm-core/src/jvm/backtype/storm/generated/CommonAggregateStats.java
index 108f74f..14eae4b 100644
--- a/storm-core/src/jvm/backtype/storm/generated/CommonAggregateStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/CommonAggregateStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class CommonAggregateStats implements org.apache.thrift.TBase<CommonAggregateStats, CommonAggregateStats._Fields>, java.io.Serializable, Cloneable, Comparable<CommonAggregateStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("CommonAggregateStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/ComponentAggregateStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ComponentAggregateStats.java b/storm-core/src/jvm/backtype/storm/generated/ComponentAggregateStats.java
index 8fe77b3..8b2d2e9 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ComponentAggregateStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ComponentAggregateStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class ComponentAggregateStats implements org.apache.thrift.TBase<ComponentAggregateStats, ComponentAggregateStats._Fields>, java.io.Serializable, Cloneable, Comparable<ComponentAggregateStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ComponentAggregateStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/ComponentCommon.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ComponentCommon.java b/storm-core/src/jvm/backtype/storm/generated/ComponentCommon.java
index c69edf2..e9998ac 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ComponentCommon.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ComponentCommon.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class ComponentCommon implements org.apache.thrift.TBase<ComponentCommon, ComponentCommon._Fields>, java.io.Serializable, Cloneable, Comparable<ComponentCommon> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ComponentCommon");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/ComponentPageInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ComponentPageInfo.java b/storm-core/src/jvm/backtype/storm/generated/ComponentPageInfo.java
index c8fcf7b..97c205b 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ComponentPageInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ComponentPageInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class ComponentPageInfo implements org.apache.thrift.TBase<ComponentPageInfo, ComponentPageInfo._Fields>, java.io.Serializable, Cloneable, Comparable<ComponentPageInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ComponentPageInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/Credentials.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/Credentials.java b/storm-core/src/jvm/backtype/storm/generated/Credentials.java
index 71a764f..5b5ecfa 100644
--- a/storm-core/src/jvm/backtype/storm/generated/Credentials.java
+++ b/storm-core/src/jvm/backtype/storm/generated/Credentials.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class Credentials implements org.apache.thrift.TBase<Credentials, Credentials._Fields>, java.io.Serializable, Cloneable, Comparable<Credentials> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Credentials");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/DRPCExecutionException.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/DRPCExecutionException.java b/storm-core/src/jvm/backtype/storm/generated/DRPCExecutionException.java
index 1471537..aaae659 100644
--- a/storm-core/src/jvm/backtype/storm/generated/DRPCExecutionException.java
+++ b/storm-core/src/jvm/backtype/storm/generated/DRPCExecutionException.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class DRPCExecutionException extends TException implements org.apache.thrift.TBase<DRPCExecutionException, DRPCExecutionException._Fields>, java.io.Serializable, Cloneable, Comparable<DRPCExecutionException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DRPCExecutionException");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/DRPCRequest.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/DRPCRequest.java b/storm-core/src/jvm/backtype/storm/generated/DRPCRequest.java
index 4648c36..db7e485 100644
--- a/storm-core/src/jvm/backtype/storm/generated/DRPCRequest.java
+++ b/storm-core/src/jvm/backtype/storm/generated/DRPCRequest.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class DRPCRequest implements org.apache.thrift.TBase<DRPCRequest, DRPCRequest._Fields>, java.io.Serializable, Cloneable, Comparable<DRPCRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DRPCRequest");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/DebugOptions.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/DebugOptions.java b/storm-core/src/jvm/backtype/storm/generated/DebugOptions.java
index da7a45a..80104c6 100644
--- a/storm-core/src/jvm/backtype/storm/generated/DebugOptions.java
+++ b/storm-core/src/jvm/backtype/storm/generated/DebugOptions.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class DebugOptions implements org.apache.thrift.TBase<DebugOptions, DebugOptions._Fields>, java.io.Serializable, Cloneable, Comparable<DebugOptions> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DebugOptions");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/DistributedRPC.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/DistributedRPC.java b/storm-core/src/jvm/backtype/storm/generated/DistributedRPC.java
index 475746b..7a42f95 100644
--- a/storm-core/src/jvm/backtype/storm/generated/DistributedRPC.java
+++ b/storm-core/src/jvm/backtype/storm/generated/DistributedRPC.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class DistributedRPC {
 
   public interface Iface {

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/DistributedRPCInvocations.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/DistributedRPCInvocations.java b/storm-core/src/jvm/backtype/storm/generated/DistributedRPCInvocations.java
index d0c7f64..31d4ab0 100644
--- a/storm-core/src/jvm/backtype/storm/generated/DistributedRPCInvocations.java
+++ b/storm-core/src/jvm/backtype/storm/generated/DistributedRPCInvocations.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class DistributedRPCInvocations {
 
   public interface Iface {

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/ErrorInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ErrorInfo.java b/storm-core/src/jvm/backtype/storm/generated/ErrorInfo.java
index 515a436..f525c87 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ErrorInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ErrorInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class ErrorInfo implements org.apache.thrift.TBase<ErrorInfo, ErrorInfo._Fields>, java.io.Serializable, Cloneable, Comparable<ErrorInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ErrorInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/ExecutorAggregateStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ExecutorAggregateStats.java b/storm-core/src/jvm/backtype/storm/generated/ExecutorAggregateStats.java
index 4993cc5..386de44 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ExecutorAggregateStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ExecutorAggregateStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class ExecutorAggregateStats implements org.apache.thrift.TBase<ExecutorAggregateStats, ExecutorAggregateStats._Fields>, java.io.Serializable, Cloneable, Comparable<ExecutorAggregateStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ExecutorAggregateStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/ExecutorInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ExecutorInfo.java b/storm-core/src/jvm/backtype/storm/generated/ExecutorInfo.java
index 3f27a51..b00502d 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ExecutorInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ExecutorInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class ExecutorInfo implements org.apache.thrift.TBase<ExecutorInfo, ExecutorInfo._Fields>, java.io.Serializable, Cloneable, Comparable<ExecutorInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ExecutorInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/ExecutorStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ExecutorStats.java b/storm-core/src/jvm/backtype/storm/generated/ExecutorStats.java
index 4783c82..6d24371 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ExecutorStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ExecutorStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class ExecutorStats implements org.apache.thrift.TBase<ExecutorStats, ExecutorStats._Fields>, java.io.Serializable, Cloneable, Comparable<ExecutorStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ExecutorStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/ExecutorSummary.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ExecutorSummary.java b/storm-core/src/jvm/backtype/storm/generated/ExecutorSummary.java
index 4f70a32..0a81e6e 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ExecutorSummary.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ExecutorSummary.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class ExecutorSummary implements org.apache.thrift.TBase<ExecutorSummary, ExecutorSummary._Fields>, java.io.Serializable, Cloneable, Comparable<ExecutorSummary> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ExecutorSummary");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/GetInfoOptions.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/GetInfoOptions.java b/storm-core/src/jvm/backtype/storm/generated/GetInfoOptions.java
index 90c811f..ef091d3 100644
--- a/storm-core/src/jvm/backtype/storm/generated/GetInfoOptions.java
+++ b/storm-core/src/jvm/backtype/storm/generated/GetInfoOptions.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class GetInfoOptions implements org.apache.thrift.TBase<GetInfoOptions, GetInfoOptions._Fields>, java.io.Serializable, Cloneable, Comparable<GetInfoOptions> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GetInfoOptions");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/GlobalStreamId.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/GlobalStreamId.java b/storm-core/src/jvm/backtype/storm/generated/GlobalStreamId.java
index 8a0e1c7..a1cac06 100644
--- a/storm-core/src/jvm/backtype/storm/generated/GlobalStreamId.java
+++ b/storm-core/src/jvm/backtype/storm/generated/GlobalStreamId.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class GlobalStreamId implements org.apache.thrift.TBase<GlobalStreamId, GlobalStreamId._Fields>, java.io.Serializable, Cloneable, Comparable<GlobalStreamId> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GlobalStreamId");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/HBAuthorizationException.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/HBAuthorizationException.java b/storm-core/src/jvm/backtype/storm/generated/HBAuthorizationException.java
new file mode 100644
index 0000000..c5f0d35
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/generated/HBAuthorizationException.java
@@ -0,0 +1,406 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+public class HBAuthorizationException extends TException implements org.apache.thrift.TBase<HBAuthorizationException, HBAuthorizationException._Fields>, java.io.Serializable, Cloneable, Comparable<HBAuthorizationException> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBAuthorizationException");
+
+  private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.STRING, (short)1);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new HBAuthorizationExceptionStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new HBAuthorizationExceptionTupleSchemeFactory());
+  }
+
+  private String msg; // required
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    MSG((short)1, "msg");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // MSG
+          return MSG;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HBAuthorizationException.class, metaDataMap);
+  }
+
+  public HBAuthorizationException() {
+  }
+
+  public HBAuthorizationException(
+    String msg)
+  {
+    this();
+    this.msg = msg;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public HBAuthorizationException(HBAuthorizationException other) {
+    if (other.is_set_msg()) {
+      this.msg = other.msg;
+    }
+  }
+
+  public HBAuthorizationException deepCopy() {
+    return new HBAuthorizationException(this);
+  }
+
+  @Override
+  public void clear() {
+    this.msg = null;
+  }
+
+  public String get_msg() {
+    return this.msg;
+  }
+
+  public void set_msg(String msg) {
+    this.msg = msg;
+  }
+
+  public void unset_msg() {
+    this.msg = null;
+  }
+
+  /** Returns true if field msg is set (has been assigned a value) and false otherwise */
+  public boolean is_set_msg() {
+    return this.msg != null;
+  }
+
+  public void set_msg_isSet(boolean value) {
+    if (!value) {
+      this.msg = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case MSG:
+      if (value == null) {
+        unset_msg();
+      } else {
+        set_msg((String)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case MSG:
+      return get_msg();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case MSG:
+      return is_set_msg();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof HBAuthorizationException)
+      return this.equals((HBAuthorizationException)that);
+    return false;
+  }
+
+  public boolean equals(HBAuthorizationException that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_msg = true && this.is_set_msg();
+    boolean that_present_msg = true && that.is_set_msg();
+    if (this_present_msg || that_present_msg) {
+      if (!(this_present_msg && that_present_msg))
+        return false;
+      if (!this.msg.equals(that.msg))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    List<Object> list = new ArrayList<Object>();
+
+    boolean present_msg = true && (is_set_msg());
+    list.add(present_msg);
+    if (present_msg)
+      list.add(msg);
+
+    return list.hashCode();
+  }
+
+  @Override
+  public int compareTo(HBAuthorizationException other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = Boolean.valueOf(is_set_msg()).compareTo(other.is_set_msg());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_msg()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, other.msg);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("HBAuthorizationException(");
+    boolean first = true;
+
+    sb.append("msg:");
+    if (this.msg == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.msg);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    if (!is_set_msg()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 'msg' is unset! Struct:" + toString());
+    }
+
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class HBAuthorizationExceptionStandardSchemeFactory implements SchemeFactory {
+    public HBAuthorizationExceptionStandardScheme getScheme() {
+      return new HBAuthorizationExceptionStandardScheme();
+    }
+  }
+
+  private static class HBAuthorizationExceptionStandardScheme extends StandardScheme<HBAuthorizationException> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, HBAuthorizationException struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // MSG
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.msg = iprot.readString();
+              struct.set_msg_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, HBAuthorizationException struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.msg != null) {
+        oprot.writeFieldBegin(MSG_FIELD_DESC);
+        oprot.writeString(struct.msg);
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class HBAuthorizationExceptionTupleSchemeFactory implements SchemeFactory {
+    public HBAuthorizationExceptionTupleScheme getScheme() {
+      return new HBAuthorizationExceptionTupleScheme();
+    }
+  }
+
+  private static class HBAuthorizationExceptionTupleScheme extends TupleScheme<HBAuthorizationException> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, HBAuthorizationException struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      oprot.writeString(struct.msg);
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, HBAuthorizationException struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      struct.msg = iprot.readString();
+      struct.set_msg_isSet(true);
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/HBExecutionException.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/HBExecutionException.java b/storm-core/src/jvm/backtype/storm/generated/HBExecutionException.java
new file mode 100644
index 0000000..2e21b43
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/generated/HBExecutionException.java
@@ -0,0 +1,406 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+public class HBExecutionException extends TException implements org.apache.thrift.TBase<HBExecutionException, HBExecutionException._Fields>, java.io.Serializable, Cloneable, Comparable<HBExecutionException> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBExecutionException");
+
+  private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.STRING, (short)1);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new HBExecutionExceptionStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new HBExecutionExceptionTupleSchemeFactory());
+  }
+
+  private String msg; // required
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    MSG((short)1, "msg");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // MSG
+          return MSG;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HBExecutionException.class, metaDataMap);
+  }
+
+  public HBExecutionException() {
+  }
+
+  public HBExecutionException(
+    String msg)
+  {
+    this();
+    this.msg = msg;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public HBExecutionException(HBExecutionException other) {
+    if (other.is_set_msg()) {
+      this.msg = other.msg;
+    }
+  }
+
+  public HBExecutionException deepCopy() {
+    return new HBExecutionException(this);
+  }
+
+  @Override
+  public void clear() {
+    this.msg = null;
+  }
+
+  public String get_msg() {
+    return this.msg;
+  }
+
+  public void set_msg(String msg) {
+    this.msg = msg;
+  }
+
+  public void unset_msg() {
+    this.msg = null;
+  }
+
+  /** Returns true if field msg is set (has been assigned a value) and false otherwise */
+  public boolean is_set_msg() {
+    return this.msg != null;
+  }
+
+  public void set_msg_isSet(boolean value) {
+    if (!value) {
+      this.msg = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case MSG:
+      if (value == null) {
+        unset_msg();
+      } else {
+        set_msg((String)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case MSG:
+      return get_msg();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case MSG:
+      return is_set_msg();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof HBExecutionException)
+      return this.equals((HBExecutionException)that);
+    return false;
+  }
+
+  public boolean equals(HBExecutionException that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_msg = true && this.is_set_msg();
+    boolean that_present_msg = true && that.is_set_msg();
+    if (this_present_msg || that_present_msg) {
+      if (!(this_present_msg && that_present_msg))
+        return false;
+      if (!this.msg.equals(that.msg))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    List<Object> list = new ArrayList<Object>();
+
+    boolean present_msg = true && (is_set_msg());
+    list.add(present_msg);
+    if (present_msg)
+      list.add(msg);
+
+    return list.hashCode();
+  }
+
+  @Override
+  public int compareTo(HBExecutionException other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = Boolean.valueOf(is_set_msg()).compareTo(other.is_set_msg());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_msg()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, other.msg);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("HBExecutionException(");
+    boolean first = true;
+
+    sb.append("msg:");
+    if (this.msg == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.msg);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    if (!is_set_msg()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 'msg' is unset! Struct:" + toString());
+    }
+
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class HBExecutionExceptionStandardSchemeFactory implements SchemeFactory {
+    public HBExecutionExceptionStandardScheme getScheme() {
+      return new HBExecutionExceptionStandardScheme();
+    }
+  }
+
+  private static class HBExecutionExceptionStandardScheme extends StandardScheme<HBExecutionException> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, HBExecutionException struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // MSG
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.msg = iprot.readString();
+              struct.set_msg_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, HBExecutionException struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.msg != null) {
+        oprot.writeFieldBegin(MSG_FIELD_DESC);
+        oprot.writeString(struct.msg);
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class HBExecutionExceptionTupleSchemeFactory implements SchemeFactory {
+    public HBExecutionExceptionTupleScheme getScheme() {
+      return new HBExecutionExceptionTupleScheme();
+    }
+  }
+
+  private static class HBExecutionExceptionTupleScheme extends TupleScheme<HBExecutionException> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, HBExecutionException struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      oprot.writeString(struct.msg);
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, HBExecutionException struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      struct.msg = iprot.readString();
+      struct.set_msg_isSet(true);
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/HBMessage.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/HBMessage.java b/storm-core/src/jvm/backtype/storm/generated/HBMessage.java
new file mode 100644
index 0000000..0a2290d
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/generated/HBMessage.java
@@ -0,0 +1,636 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+public class HBMessage implements org.apache.thrift.TBase<HBMessage, HBMessage._Fields>, java.io.Serializable, Cloneable, Comparable<HBMessage> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBMessage");
+
+  private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("type", org.apache.thrift.protocol.TType.I32, (short)1);
+  private static final org.apache.thrift.protocol.TField DATA_FIELD_DESC = new org.apache.thrift.protocol.TField("data", org.apache.thrift.protocol.TType.STRUCT, (short)2);
+  private static final org.apache.thrift.protocol.TField MESSAGE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("message_id", org.apache.thrift.protocol.TType.I32, (short)3);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new HBMessageStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new HBMessageTupleSchemeFactory());
+  }
+
+  private HBServerMessageType type; // required
+  private HBMessageData data; // required
+  private int message_id; // optional
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    /**
+     * 
+     * @see HBServerMessageType
+     */
+    TYPE((short)1, "type"),
+    DATA((short)2, "data"),
+    MESSAGE_ID((short)3, "message_id");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // TYPE
+          return TYPE;
+        case 2: // DATA
+          return DATA;
+        case 3: // MESSAGE_ID
+          return MESSAGE_ID;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  private static final int __MESSAGE_ID_ISSET_ID = 0;
+  private byte __isset_bitfield = 0;
+  private static final _Fields optionals[] = {_Fields.MESSAGE_ID};
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData("type", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, HBServerMessageType.class)));
+    tmpMap.put(_Fields.DATA, new org.apache.thrift.meta_data.FieldMetaData("data", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, HBMessageData.class)));
+    tmpMap.put(_Fields.MESSAGE_ID, new org.apache.thrift.meta_data.FieldMetaData("message_id", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HBMessage.class, metaDataMap);
+  }
+
+  public HBMessage() {
+    this.message_id = -1;
+
+  }
+
+  public HBMessage(
+    HBServerMessageType type,
+    HBMessageData data)
+  {
+    this();
+    this.type = type;
+    this.data = data;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public HBMessage(HBMessage other) {
+    __isset_bitfield = other.__isset_bitfield;
+    if (other.is_set_type()) {
+      this.type = other.type;
+    }
+    if (other.is_set_data()) {
+      this.data = new HBMessageData(other.data);
+    }
+    this.message_id = other.message_id;
+  }
+
+  public HBMessage deepCopy() {
+    return new HBMessage(this);
+  }
+
+  @Override
+  public void clear() {
+    this.type = null;
+    this.data = null;
+    this.message_id = -1;
+
+  }
+
+  /**
+   * 
+   * @see HBServerMessageType
+   */
+  public HBServerMessageType get_type() {
+    return this.type;
+  }
+
+  /**
+   * 
+   * @see HBServerMessageType
+   */
+  public void set_type(HBServerMessageType type) {
+    this.type = type;
+  }
+
+  public void unset_type() {
+    this.type = null;
+  }
+
+  /** Returns true if field type is set (has been assigned a value) and false otherwise */
+  public boolean is_set_type() {
+    return this.type != null;
+  }
+
+  public void set_type_isSet(boolean value) {
+    if (!value) {
+      this.type = null;
+    }
+  }
+
+  public HBMessageData get_data() {
+    return this.data;
+  }
+
+  public void set_data(HBMessageData data) {
+    this.data = data;
+  }
+
+  public void unset_data() {
+    this.data = null;
+  }
+
+  /** Returns true if field data is set (has been assigned a value) and false otherwise */
+  public boolean is_set_data() {
+    return this.data != null;
+  }
+
+  public void set_data_isSet(boolean value) {
+    if (!value) {
+      this.data = null;
+    }
+  }
+
+  public int get_message_id() {
+    return this.message_id;
+  }
+
+  public void set_message_id(int message_id) {
+    this.message_id = message_id;
+    set_message_id_isSet(true);
+  }
+
+  public void unset_message_id() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __MESSAGE_ID_ISSET_ID);
+  }
+
+  /** Returns true if field message_id is set (has been assigned a value) and false otherwise */
+  public boolean is_set_message_id() {
+    return EncodingUtils.testBit(__isset_bitfield, __MESSAGE_ID_ISSET_ID);
+  }
+
+  public void set_message_id_isSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __MESSAGE_ID_ISSET_ID, value);
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case TYPE:
+      if (value == null) {
+        unset_type();
+      } else {
+        set_type((HBServerMessageType)value);
+      }
+      break;
+
+    case DATA:
+      if (value == null) {
+        unset_data();
+      } else {
+        set_data((HBMessageData)value);
+      }
+      break;
+
+    case MESSAGE_ID:
+      if (value == null) {
+        unset_message_id();
+      } else {
+        set_message_id((Integer)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case TYPE:
+      return get_type();
+
+    case DATA:
+      return get_data();
+
+    case MESSAGE_ID:
+      return Integer.valueOf(get_message_id());
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case TYPE:
+      return is_set_type();
+    case DATA:
+      return is_set_data();
+    case MESSAGE_ID:
+      return is_set_message_id();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof HBMessage)
+      return this.equals((HBMessage)that);
+    return false;
+  }
+
+  public boolean equals(HBMessage that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_type = true && this.is_set_type();
+    boolean that_present_type = true && that.is_set_type();
+    if (this_present_type || that_present_type) {
+      if (!(this_present_type && that_present_type))
+        return false;
+      if (!this.type.equals(that.type))
+        return false;
+    }
+
+    boolean this_present_data = true && this.is_set_data();
+    boolean that_present_data = true && that.is_set_data();
+    if (this_present_data || that_present_data) {
+      if (!(this_present_data && that_present_data))
+        return false;
+      if (!this.data.equals(that.data))
+        return false;
+    }
+
+    boolean this_present_message_id = true && this.is_set_message_id();
+    boolean that_present_message_id = true && that.is_set_message_id();
+    if (this_present_message_id || that_present_message_id) {
+      if (!(this_present_message_id && that_present_message_id))
+        return false;
+      if (this.message_id != that.message_id)
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    List<Object> list = new ArrayList<Object>();
+
+    boolean present_type = true && (is_set_type());
+    list.add(present_type);
+    if (present_type)
+      list.add(type.getValue());
+
+    boolean present_data = true && (is_set_data());
+    list.add(present_data);
+    if (present_data)
+      list.add(data);
+
+    boolean present_message_id = true && (is_set_message_id());
+    list.add(present_message_id);
+    if (present_message_id)
+      list.add(message_id);
+
+    return list.hashCode();
+  }
+
+  @Override
+  public int compareTo(HBMessage other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = Boolean.valueOf(is_set_type()).compareTo(other.is_set_type());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_type()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type, other.type);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(is_set_data()).compareTo(other.is_set_data());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_data()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.data, other.data);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(is_set_message_id()).compareTo(other.is_set_message_id());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_message_id()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.message_id, other.message_id);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("HBMessage(");
+    boolean first = true;
+
+    sb.append("type:");
+    if (this.type == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.type);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("data:");
+    if (this.data == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.data);
+    }
+    first = false;
+    if (is_set_message_id()) {
+      if (!first) sb.append(", ");
+      sb.append("message_id:");
+      sb.append(this.message_id);
+      first = false;
+    }
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+      __isset_bitfield = 0;
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class HBMessageStandardSchemeFactory implements SchemeFactory {
+    public HBMessageStandardScheme getScheme() {
+      return new HBMessageStandardScheme();
+    }
+  }
+
+  private static class HBMessageStandardScheme extends StandardScheme<HBMessage> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, HBMessage struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // TYPE
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.type = backtype.storm.generated.HBServerMessageType.findByValue(iprot.readI32());
+              struct.set_type_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 2: // DATA
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+              struct.data = new HBMessageData();
+              struct.data.read(iprot);
+              struct.set_data_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 3: // MESSAGE_ID
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.message_id = iprot.readI32();
+              struct.set_message_id_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, HBMessage struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.type != null) {
+        oprot.writeFieldBegin(TYPE_FIELD_DESC);
+        oprot.writeI32(struct.type.getValue());
+        oprot.writeFieldEnd();
+      }
+      if (struct.data != null) {
+        oprot.writeFieldBegin(DATA_FIELD_DESC);
+        struct.data.write(oprot);
+        oprot.writeFieldEnd();
+      }
+      if (struct.is_set_message_id()) {
+        oprot.writeFieldBegin(MESSAGE_ID_FIELD_DESC);
+        oprot.writeI32(struct.message_id);
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class HBMessageTupleSchemeFactory implements SchemeFactory {
+    public HBMessageTupleScheme getScheme() {
+      return new HBMessageTupleScheme();
+    }
+  }
+
+  private static class HBMessageTupleScheme extends TupleScheme<HBMessage> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, HBMessage struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      BitSet optionals = new BitSet();
+      if (struct.is_set_type()) {
+        optionals.set(0);
+      }
+      if (struct.is_set_data()) {
+        optionals.set(1);
+      }
+      if (struct.is_set_message_id()) {
+        optionals.set(2);
+      }
+      oprot.writeBitSet(optionals, 3);
+      if (struct.is_set_type()) {
+        oprot.writeI32(struct.type.getValue());
+      }
+      if (struct.is_set_data()) {
+        struct.data.write(oprot);
+      }
+      if (struct.is_set_message_id()) {
+        oprot.writeI32(struct.message_id);
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, HBMessage struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      BitSet incoming = iprot.readBitSet(3);
+      if (incoming.get(0)) {
+        struct.type = backtype.storm.generated.HBServerMessageType.findByValue(iprot.readI32());
+        struct.set_type_isSet(true);
+      }
+      if (incoming.get(1)) {
+        struct.data = new HBMessageData();
+        struct.data.read(iprot);
+        struct.set_data_isSet(true);
+      }
+      if (incoming.get(2)) {
+        struct.message_id = iprot.readI32();
+        struct.set_message_id_isSet(true);
+      }
+    }
+  }
+
+}
+


[19/37] storm git commit: Adding documentation for Pacemaker.

Posted by kn...@apache.org.
Adding documentation for Pacemaker.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/0d98bbef
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/0d98bbef
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/0d98bbef

Branch: refs/heads/master
Commit: 0d98bbef6a9c3a38a70b2aa15cebb1d743c4fc9b
Parents: 4645c19
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Tue Nov 17 15:17:37 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Tue Nov 17 15:17:37 2015 -0600

----------------------------------------------------------------------
 docs/documentation/Pacemaker.md | 90 ++++++++++++++++++++++++++++++++++++
 1 file changed, 90 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/0d98bbef/docs/documentation/Pacemaker.md
----------------------------------------------------------------------
diff --git a/docs/documentation/Pacemaker.md b/docs/documentation/Pacemaker.md
new file mode 100644
index 0000000..827c139
--- /dev/null
+++ b/docs/documentation/Pacemaker.md
@@ -0,0 +1,90 @@
+# Pacemaker
+------
+
+### Intro
+Pacemaker is a storm daemon designed to process heartbeats from workers. As Storm is scaled up, ZooKeeper begins to become a bottleneck due to high volumes of writes from workers doing heartbeats. Lots of writes to disk and traffic across the network is generated as ZooKeeper tries to maintain consistency.
+
+Because heartbeats are of an ephemeral nature, they do not need to be persisted to disk or synced across nodes; an in-memory store will do. This is the role of Pacemaker. Pacemaker functions as a simple in-memory key/value store with ZooKeeper-like, directory-style keys and byte array values.
+
+The corresponding Pacemaker client is a plugin for the `ClusterState` interface, `org.apache.storm.pacemaker.pacemaker_state_factory`. Heartbeat calls are funneled by the `ClusterState` produced by `pacemaker_state_factory` into the Pacemaker daemon, while other set/get operations are forwarded to ZooKeeper.
+
+
+### Configuration
+
+ - `pacemaker.host` : The host that the Pacemaker daemon is running on
+ - `pacemaker.port` : The port that Pacemaker will listen on
+ - `pacemaker.max.threads` : Maximum number of threads Pacemaker daemon will use to handle requests.
+ - `pacemaker.childopts` : Any JVM parameters that need to go to the Pacemaker.
+ - `pacemaker.auth.method` : The authentication method that is used (more info below)
+
+#### Example
+
+To get Pacemaker up and running, set the following option in the cluster config:
+```
+storm.cluster.state.store: "org.apache.storm.pacemaker.pacemaker_state_factory"
+```
+
+The Pacemaker host also needs to be set:
+```
+pacemaker.host: somehost.mycompany.com
+```
+
+And then start all of your daemons
+
+(including Pacemaker):
+```
+$ storm pacemaker
+```
+
+The Storm cluster should now be pushing all worker heartbeats through Pacemaker.
+
+### Security
+
+Currently digest (password-based) and Kerberos security are supported. Security is currently only around reads, not writes. Writes may be performed by anyone, whereas reads may only be performed by authorized and authenticated users. This is an area for future development, as it leaves the cluster open to DoS attacks, but it prevents any sensitive information from reaching unauthorized eyes, which was the main goal.
+
+#### Digest
+To configure digest authentication, set `pacemaker.auth.method: DIGEST` in the cluster config on the nodes hosting Nimbus and Pacemaker.
+The nodes must also have `java.security.auth.login.config` set to point to a jaas config file containing the following structure:
+```
+PacemakerDigest {
+    username="some username"
+    password="some password"
+};
+```
+
+Any node with these settings configured will be able to read from Pacemaker.
+Worker nodes need not have these configs set, and may keep `pacemaker.auth.method: NONE` set, since they do not need to read from Pacemaker daemon.
+
+#### Kerberos
+To configure Kerberos authentication, set `pacemaker.auth.method: KERBEROS` in the cluster config on the nodes hosting Nimbus and Pacemaker.
+The nodes must also have `java.security.auth.login.config` set to point to a jaas config.
+
+The jaas config on Nimbus must look something like this:
+```
+PacemakerClient {
+    com.sun.security.auth.module.Krb5LoginModule required
+    useKeyTab=true
+    keyTab="/etc/keytabs/nimbus.keytab"
+    storeKey=true
+    useTicketCache=false
+    serviceName="pacemaker"
+    principal="nimbus@MY.COMPANY.COM";
+};
+                         
+```
+
+The jaas config on Pacemaker must look something like this:
+```
+PacemakerServer {
+   com.sun.security.auth.module.Krb5LoginModule required
+   useKeyTab=true
+   keyTab="/etc/keytabs/pacemaker.keytab"
+   storeKey=true
+   useTicketCache=false
+   principal="pacemaker@MY.COMPANY.COM";
+};
+```
+
+The client's user principal in the `PacemakerClient` section on the Nimbus host must match the `nimbus.daemon.user` storm cluster config value.
+The client's `serviceName` value must match the server's user principal in the `PacemakerServer` section on the Pacemaker host.
+


[08/37] storm git commit: Reverting date change.

Posted by kn...@apache.org.
Reverting date change.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/a8ceb1ca
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/a8ceb1ca
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/a8ceb1ca

Branch: refs/heads/master
Commit: a8ceb1ca39dedd8b8b0ac9a599d0ebf771adca9a
Parents: 7f9d00d
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Fri Oct 30 17:36:38 2015 -0500
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Fri Oct 30 17:36:38 2015 -0500

----------------------------------------------------------------------
 .../src/jvm/backtype/storm/generated/AlreadyAliveException.java    | 2 +-
 storm-core/src/jvm/backtype/storm/generated/Assignment.java        | 2 +-
 .../src/jvm/backtype/storm/generated/AuthorizationException.java   | 2 +-
 storm-core/src/jvm/backtype/storm/generated/Bolt.java              | 2 +-
 .../src/jvm/backtype/storm/generated/BoltAggregateStats.java       | 2 +-
 storm-core/src/jvm/backtype/storm/generated/BoltStats.java         | 2 +-
 storm-core/src/jvm/backtype/storm/generated/ClusterSummary.java    | 2 +-
 .../src/jvm/backtype/storm/generated/ClusterWorkerHeartbeat.java   | 2 +-
 .../src/jvm/backtype/storm/generated/CommonAggregateStats.java     | 2 +-
 .../src/jvm/backtype/storm/generated/ComponentAggregateStats.java  | 2 +-
 storm-core/src/jvm/backtype/storm/generated/ComponentCommon.java   | 2 +-
 storm-core/src/jvm/backtype/storm/generated/ComponentPageInfo.java | 2 +-
 storm-core/src/jvm/backtype/storm/generated/Credentials.java       | 2 +-
 .../src/jvm/backtype/storm/generated/DRPCExecutionException.java   | 2 +-
 storm-core/src/jvm/backtype/storm/generated/DRPCRequest.java       | 2 +-
 storm-core/src/jvm/backtype/storm/generated/DebugOptions.java      | 2 +-
 storm-core/src/jvm/backtype/storm/generated/DistributedRPC.java    | 2 +-
 .../jvm/backtype/storm/generated/DistributedRPCInvocations.java    | 2 +-
 storm-core/src/jvm/backtype/storm/generated/ErrorInfo.java         | 2 +-
 .../src/jvm/backtype/storm/generated/ExecutorAggregateStats.java   | 2 +-
 storm-core/src/jvm/backtype/storm/generated/ExecutorInfo.java      | 2 +-
 storm-core/src/jvm/backtype/storm/generated/ExecutorStats.java     | 2 +-
 storm-core/src/jvm/backtype/storm/generated/ExecutorSummary.java   | 2 +-
 storm-core/src/jvm/backtype/storm/generated/GetInfoOptions.java    | 2 +-
 storm-core/src/jvm/backtype/storm/generated/GlobalStreamId.java    | 2 +-
 .../src/jvm/backtype/storm/generated/HBAuthorizationException.java | 2 +-
 .../src/jvm/backtype/storm/generated/HBExecutionException.java     | 2 +-
 storm-core/src/jvm/backtype/storm/generated/HBMessage.java         | 2 +-
 storm-core/src/jvm/backtype/storm/generated/HBNodes.java           | 2 +-
 storm-core/src/jvm/backtype/storm/generated/HBPulse.java           | 2 +-
 storm-core/src/jvm/backtype/storm/generated/HBRecords.java         | 2 +-
 .../src/jvm/backtype/storm/generated/InvalidTopologyException.java | 2 +-
 storm-core/src/jvm/backtype/storm/generated/JavaObject.java        | 2 +-
 storm-core/src/jvm/backtype/storm/generated/KillOptions.java       | 2 +-
 storm-core/src/jvm/backtype/storm/generated/LSApprovedWorkers.java | 2 +-
 .../src/jvm/backtype/storm/generated/LSSupervisorAssignments.java  | 2 +-
 storm-core/src/jvm/backtype/storm/generated/LSSupervisorId.java    | 2 +-
 storm-core/src/jvm/backtype/storm/generated/LSWorkerHeartbeat.java | 2 +-
 storm-core/src/jvm/backtype/storm/generated/LocalAssignment.java   | 2 +-
 storm-core/src/jvm/backtype/storm/generated/LocalStateData.java    | 2 +-
 storm-core/src/jvm/backtype/storm/generated/LogConfig.java         | 2 +-
 storm-core/src/jvm/backtype/storm/generated/LogLevel.java          | 2 +-
 storm-core/src/jvm/backtype/storm/generated/Nimbus.java            | 2 +-
 storm-core/src/jvm/backtype/storm/generated/NimbusSummary.java     | 2 +-
 storm-core/src/jvm/backtype/storm/generated/NodeInfo.java          | 2 +-
 storm-core/src/jvm/backtype/storm/generated/NotAliveException.java | 2 +-
 storm-core/src/jvm/backtype/storm/generated/NullStruct.java        | 2 +-
 storm-core/src/jvm/backtype/storm/generated/RebalanceOptions.java  | 2 +-
 storm-core/src/jvm/backtype/storm/generated/ShellComponent.java    | 2 +-
 .../src/jvm/backtype/storm/generated/SpoutAggregateStats.java      | 2 +-
 storm-core/src/jvm/backtype/storm/generated/SpoutSpec.java         | 2 +-
 storm-core/src/jvm/backtype/storm/generated/SpoutStats.java        | 2 +-
 storm-core/src/jvm/backtype/storm/generated/StateSpoutSpec.java    | 2 +-
 storm-core/src/jvm/backtype/storm/generated/StormBase.java         | 2 +-
 storm-core/src/jvm/backtype/storm/generated/StormTopology.java     | 2 +-
 storm-core/src/jvm/backtype/storm/generated/StreamInfo.java        | 2 +-
 storm-core/src/jvm/backtype/storm/generated/SubmitOptions.java     | 2 +-
 storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java    | 2 +-
 storm-core/src/jvm/backtype/storm/generated/SupervisorSummary.java | 2 +-
 .../src/jvm/backtype/storm/generated/ThriftSerializedObject.java   | 2 +-
 storm-core/src/jvm/backtype/storm/generated/TopologyInfo.java      | 2 +-
 storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java  | 2 +-
 storm-core/src/jvm/backtype/storm/generated/TopologyStats.java     | 2 +-
 storm-core/src/jvm/backtype/storm/generated/TopologySummary.java   | 2 +-
 storm-core/src/jvm/backtype/storm/generated/WorkerResources.java   | 2 +-
 65 files changed, 65 insertions(+), 65 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/AlreadyAliveException.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/AlreadyAliveException.java b/storm-core/src/jvm/backtype/storm/generated/AlreadyAliveException.java
index eb0d93f..fb2eee3 100644
--- a/storm-core/src/jvm/backtype/storm/generated/AlreadyAliveException.java
+++ b/storm-core/src/jvm/backtype/storm/generated/AlreadyAliveException.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class AlreadyAliveException extends TException implements org.apache.thrift.TBase<AlreadyAliveException, AlreadyAliveException._Fields>, java.io.Serializable, Cloneable, Comparable<AlreadyAliveException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AlreadyAliveException");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/Assignment.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/Assignment.java b/storm-core/src/jvm/backtype/storm/generated/Assignment.java
index 05198bf..dbc1cc9 100644
--- a/storm-core/src/jvm/backtype/storm/generated/Assignment.java
+++ b/storm-core/src/jvm/backtype/storm/generated/Assignment.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class Assignment implements org.apache.thrift.TBase<Assignment, Assignment._Fields>, java.io.Serializable, Cloneable, Comparable<Assignment> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Assignment");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/AuthorizationException.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/AuthorizationException.java b/storm-core/src/jvm/backtype/storm/generated/AuthorizationException.java
index 2330391..69fff12 100644
--- a/storm-core/src/jvm/backtype/storm/generated/AuthorizationException.java
+++ b/storm-core/src/jvm/backtype/storm/generated/AuthorizationException.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class AuthorizationException extends TException implements org.apache.thrift.TBase<AuthorizationException, AuthorizationException._Fields>, java.io.Serializable, Cloneable, Comparable<AuthorizationException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AuthorizationException");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/Bolt.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/Bolt.java b/storm-core/src/jvm/backtype/storm/generated/Bolt.java
index 0c14b60..9ea4bef 100644
--- a/storm-core/src/jvm/backtype/storm/generated/Bolt.java
+++ b/storm-core/src/jvm/backtype/storm/generated/Bolt.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class Bolt implements org.apache.thrift.TBase<Bolt, Bolt._Fields>, java.io.Serializable, Cloneable, Comparable<Bolt> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Bolt");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/BoltAggregateStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/BoltAggregateStats.java b/storm-core/src/jvm/backtype/storm/generated/BoltAggregateStats.java
index 3b8e38c..940e0b9 100644
--- a/storm-core/src/jvm/backtype/storm/generated/BoltAggregateStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/BoltAggregateStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class BoltAggregateStats implements org.apache.thrift.TBase<BoltAggregateStats, BoltAggregateStats._Fields>, java.io.Serializable, Cloneable, Comparable<BoltAggregateStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BoltAggregateStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/BoltStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/BoltStats.java b/storm-core/src/jvm/backtype/storm/generated/BoltStats.java
index 6f64f14..c3ffc9f 100644
--- a/storm-core/src/jvm/backtype/storm/generated/BoltStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/BoltStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class BoltStats implements org.apache.thrift.TBase<BoltStats, BoltStats._Fields>, java.io.Serializable, Cloneable, Comparable<BoltStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BoltStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/ClusterSummary.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ClusterSummary.java b/storm-core/src/jvm/backtype/storm/generated/ClusterSummary.java
index 7935567..d23cdcb 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ClusterSummary.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ClusterSummary.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class ClusterSummary implements org.apache.thrift.TBase<ClusterSummary, ClusterSummary._Fields>, java.io.Serializable, Cloneable, Comparable<ClusterSummary> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ClusterSummary");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/ClusterWorkerHeartbeat.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ClusterWorkerHeartbeat.java b/storm-core/src/jvm/backtype/storm/generated/ClusterWorkerHeartbeat.java
index 52f2fe9..6c84d4c 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ClusterWorkerHeartbeat.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ClusterWorkerHeartbeat.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class ClusterWorkerHeartbeat implements org.apache.thrift.TBase<ClusterWorkerHeartbeat, ClusterWorkerHeartbeat._Fields>, java.io.Serializable, Cloneable, Comparable<ClusterWorkerHeartbeat> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ClusterWorkerHeartbeat");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/CommonAggregateStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/CommonAggregateStats.java b/storm-core/src/jvm/backtype/storm/generated/CommonAggregateStats.java
index 14eae4b..108f74f 100644
--- a/storm-core/src/jvm/backtype/storm/generated/CommonAggregateStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/CommonAggregateStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class CommonAggregateStats implements org.apache.thrift.TBase<CommonAggregateStats, CommonAggregateStats._Fields>, java.io.Serializable, Cloneable, Comparable<CommonAggregateStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("CommonAggregateStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/ComponentAggregateStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ComponentAggregateStats.java b/storm-core/src/jvm/backtype/storm/generated/ComponentAggregateStats.java
index 8b2d2e9..8fe77b3 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ComponentAggregateStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ComponentAggregateStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class ComponentAggregateStats implements org.apache.thrift.TBase<ComponentAggregateStats, ComponentAggregateStats._Fields>, java.io.Serializable, Cloneable, Comparable<ComponentAggregateStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ComponentAggregateStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/ComponentCommon.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ComponentCommon.java b/storm-core/src/jvm/backtype/storm/generated/ComponentCommon.java
index e9998ac..c69edf2 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ComponentCommon.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ComponentCommon.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class ComponentCommon implements org.apache.thrift.TBase<ComponentCommon, ComponentCommon._Fields>, java.io.Serializable, Cloneable, Comparable<ComponentCommon> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ComponentCommon");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/ComponentPageInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ComponentPageInfo.java b/storm-core/src/jvm/backtype/storm/generated/ComponentPageInfo.java
index 97c205b..c8fcf7b 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ComponentPageInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ComponentPageInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class ComponentPageInfo implements org.apache.thrift.TBase<ComponentPageInfo, ComponentPageInfo._Fields>, java.io.Serializable, Cloneable, Comparable<ComponentPageInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ComponentPageInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/Credentials.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/Credentials.java b/storm-core/src/jvm/backtype/storm/generated/Credentials.java
index 5b5ecfa..71a764f 100644
--- a/storm-core/src/jvm/backtype/storm/generated/Credentials.java
+++ b/storm-core/src/jvm/backtype/storm/generated/Credentials.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class Credentials implements org.apache.thrift.TBase<Credentials, Credentials._Fields>, java.io.Serializable, Cloneable, Comparable<Credentials> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Credentials");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/DRPCExecutionException.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/DRPCExecutionException.java b/storm-core/src/jvm/backtype/storm/generated/DRPCExecutionException.java
index aaae659..1471537 100644
--- a/storm-core/src/jvm/backtype/storm/generated/DRPCExecutionException.java
+++ b/storm-core/src/jvm/backtype/storm/generated/DRPCExecutionException.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class DRPCExecutionException extends TException implements org.apache.thrift.TBase<DRPCExecutionException, DRPCExecutionException._Fields>, java.io.Serializable, Cloneable, Comparable<DRPCExecutionException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DRPCExecutionException");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/DRPCRequest.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/DRPCRequest.java b/storm-core/src/jvm/backtype/storm/generated/DRPCRequest.java
index db7e485..4648c36 100644
--- a/storm-core/src/jvm/backtype/storm/generated/DRPCRequest.java
+++ b/storm-core/src/jvm/backtype/storm/generated/DRPCRequest.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class DRPCRequest implements org.apache.thrift.TBase<DRPCRequest, DRPCRequest._Fields>, java.io.Serializable, Cloneable, Comparable<DRPCRequest> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DRPCRequest");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/DebugOptions.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/DebugOptions.java b/storm-core/src/jvm/backtype/storm/generated/DebugOptions.java
index 80104c6..da7a45a 100644
--- a/storm-core/src/jvm/backtype/storm/generated/DebugOptions.java
+++ b/storm-core/src/jvm/backtype/storm/generated/DebugOptions.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class DebugOptions implements org.apache.thrift.TBase<DebugOptions, DebugOptions._Fields>, java.io.Serializable, Cloneable, Comparable<DebugOptions> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DebugOptions");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/DistributedRPC.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/DistributedRPC.java b/storm-core/src/jvm/backtype/storm/generated/DistributedRPC.java
index 7a42f95..475746b 100644
--- a/storm-core/src/jvm/backtype/storm/generated/DistributedRPC.java
+++ b/storm-core/src/jvm/backtype/storm/generated/DistributedRPC.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class DistributedRPC {
 
   public interface Iface {

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/DistributedRPCInvocations.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/DistributedRPCInvocations.java b/storm-core/src/jvm/backtype/storm/generated/DistributedRPCInvocations.java
index 31d4ab0..d0c7f64 100644
--- a/storm-core/src/jvm/backtype/storm/generated/DistributedRPCInvocations.java
+++ b/storm-core/src/jvm/backtype/storm/generated/DistributedRPCInvocations.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class DistributedRPCInvocations {
 
   public interface Iface {

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/ErrorInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ErrorInfo.java b/storm-core/src/jvm/backtype/storm/generated/ErrorInfo.java
index f525c87..515a436 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ErrorInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ErrorInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class ErrorInfo implements org.apache.thrift.TBase<ErrorInfo, ErrorInfo._Fields>, java.io.Serializable, Cloneable, Comparable<ErrorInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ErrorInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/ExecutorAggregateStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ExecutorAggregateStats.java b/storm-core/src/jvm/backtype/storm/generated/ExecutorAggregateStats.java
index 386de44..4993cc5 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ExecutorAggregateStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ExecutorAggregateStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class ExecutorAggregateStats implements org.apache.thrift.TBase<ExecutorAggregateStats, ExecutorAggregateStats._Fields>, java.io.Serializable, Cloneable, Comparable<ExecutorAggregateStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ExecutorAggregateStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/ExecutorInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ExecutorInfo.java b/storm-core/src/jvm/backtype/storm/generated/ExecutorInfo.java
index b00502d..3f27a51 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ExecutorInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ExecutorInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class ExecutorInfo implements org.apache.thrift.TBase<ExecutorInfo, ExecutorInfo._Fields>, java.io.Serializable, Cloneable, Comparable<ExecutorInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ExecutorInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/ExecutorStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ExecutorStats.java b/storm-core/src/jvm/backtype/storm/generated/ExecutorStats.java
index 6d24371..4783c82 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ExecutorStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ExecutorStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class ExecutorStats implements org.apache.thrift.TBase<ExecutorStats, ExecutorStats._Fields>, java.io.Serializable, Cloneable, Comparable<ExecutorStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ExecutorStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/ExecutorSummary.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ExecutorSummary.java b/storm-core/src/jvm/backtype/storm/generated/ExecutorSummary.java
index 0a81e6e..4f70a32 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ExecutorSummary.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ExecutorSummary.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class ExecutorSummary implements org.apache.thrift.TBase<ExecutorSummary, ExecutorSummary._Fields>, java.io.Serializable, Cloneable, Comparable<ExecutorSummary> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ExecutorSummary");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/GetInfoOptions.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/GetInfoOptions.java b/storm-core/src/jvm/backtype/storm/generated/GetInfoOptions.java
index ef091d3..90c811f 100644
--- a/storm-core/src/jvm/backtype/storm/generated/GetInfoOptions.java
+++ b/storm-core/src/jvm/backtype/storm/generated/GetInfoOptions.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class GetInfoOptions implements org.apache.thrift.TBase<GetInfoOptions, GetInfoOptions._Fields>, java.io.Serializable, Cloneable, Comparable<GetInfoOptions> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GetInfoOptions");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/GlobalStreamId.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/GlobalStreamId.java b/storm-core/src/jvm/backtype/storm/generated/GlobalStreamId.java
index a1cac06..8a0e1c7 100644
--- a/storm-core/src/jvm/backtype/storm/generated/GlobalStreamId.java
+++ b/storm-core/src/jvm/backtype/storm/generated/GlobalStreamId.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class GlobalStreamId implements org.apache.thrift.TBase<GlobalStreamId, GlobalStreamId._Fields>, java.io.Serializable, Cloneable, Comparable<GlobalStreamId> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GlobalStreamId");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/HBAuthorizationException.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/HBAuthorizationException.java b/storm-core/src/jvm/backtype/storm/generated/HBAuthorizationException.java
index c5f0d35..cd5fed7 100644
--- a/storm-core/src/jvm/backtype/storm/generated/HBAuthorizationException.java
+++ b/storm-core/src/jvm/backtype/storm/generated/HBAuthorizationException.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class HBAuthorizationException extends TException implements org.apache.thrift.TBase<HBAuthorizationException, HBAuthorizationException._Fields>, java.io.Serializable, Cloneable, Comparable<HBAuthorizationException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBAuthorizationException");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/HBExecutionException.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/HBExecutionException.java b/storm-core/src/jvm/backtype/storm/generated/HBExecutionException.java
index 2e21b43..bbc0ef4 100644
--- a/storm-core/src/jvm/backtype/storm/generated/HBExecutionException.java
+++ b/storm-core/src/jvm/backtype/storm/generated/HBExecutionException.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class HBExecutionException extends TException implements org.apache.thrift.TBase<HBExecutionException, HBExecutionException._Fields>, java.io.Serializable, Cloneable, Comparable<HBExecutionException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBExecutionException");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/HBMessage.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/HBMessage.java b/storm-core/src/jvm/backtype/storm/generated/HBMessage.java
index 0a2290d..25bbd16 100644
--- a/storm-core/src/jvm/backtype/storm/generated/HBMessage.java
+++ b/storm-core/src/jvm/backtype/storm/generated/HBMessage.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class HBMessage implements org.apache.thrift.TBase<HBMessage, HBMessage._Fields>, java.io.Serializable, Cloneable, Comparable<HBMessage> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBMessage");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/HBNodes.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/HBNodes.java b/storm-core/src/jvm/backtype/storm/generated/HBNodes.java
index f23a49e..a6dc65d 100644
--- a/storm-core/src/jvm/backtype/storm/generated/HBNodes.java
+++ b/storm-core/src/jvm/backtype/storm/generated/HBNodes.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class HBNodes implements org.apache.thrift.TBase<HBNodes, HBNodes._Fields>, java.io.Serializable, Cloneable, Comparable<HBNodes> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBNodes");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/HBPulse.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/HBPulse.java b/storm-core/src/jvm/backtype/storm/generated/HBPulse.java
index 942a0ee..e9afbfc 100644
--- a/storm-core/src/jvm/backtype/storm/generated/HBPulse.java
+++ b/storm-core/src/jvm/backtype/storm/generated/HBPulse.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class HBPulse implements org.apache.thrift.TBase<HBPulse, HBPulse._Fields>, java.io.Serializable, Cloneable, Comparable<HBPulse> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBPulse");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/HBRecords.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/HBRecords.java b/storm-core/src/jvm/backtype/storm/generated/HBRecords.java
index 2df69bd..90ee8bd 100644
--- a/storm-core/src/jvm/backtype/storm/generated/HBRecords.java
+++ b/storm-core/src/jvm/backtype/storm/generated/HBRecords.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class HBRecords implements org.apache.thrift.TBase<HBRecords, HBRecords._Fields>, java.io.Serializable, Cloneable, Comparable<HBRecords> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBRecords");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/InvalidTopologyException.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/InvalidTopologyException.java b/storm-core/src/jvm/backtype/storm/generated/InvalidTopologyException.java
index 0a1a45d..003dae7 100644
--- a/storm-core/src/jvm/backtype/storm/generated/InvalidTopologyException.java
+++ b/storm-core/src/jvm/backtype/storm/generated/InvalidTopologyException.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class InvalidTopologyException extends TException implements org.apache.thrift.TBase<InvalidTopologyException, InvalidTopologyException._Fields>, java.io.Serializable, Cloneable, Comparable<InvalidTopologyException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InvalidTopologyException");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/JavaObject.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/JavaObject.java b/storm-core/src/jvm/backtype/storm/generated/JavaObject.java
index bf6f800..15d8fd7 100644
--- a/storm-core/src/jvm/backtype/storm/generated/JavaObject.java
+++ b/storm-core/src/jvm/backtype/storm/generated/JavaObject.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class JavaObject implements org.apache.thrift.TBase<JavaObject, JavaObject._Fields>, java.io.Serializable, Cloneable, Comparable<JavaObject> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("JavaObject");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/KillOptions.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/KillOptions.java b/storm-core/src/jvm/backtype/storm/generated/KillOptions.java
index a3bb76a..bebfbb1 100644
--- a/storm-core/src/jvm/backtype/storm/generated/KillOptions.java
+++ b/storm-core/src/jvm/backtype/storm/generated/KillOptions.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class KillOptions implements org.apache.thrift.TBase<KillOptions, KillOptions._Fields>, java.io.Serializable, Cloneable, Comparable<KillOptions> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("KillOptions");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/LSApprovedWorkers.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LSApprovedWorkers.java b/storm-core/src/jvm/backtype/storm/generated/LSApprovedWorkers.java
index 505b362..75da228 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LSApprovedWorkers.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LSApprovedWorkers.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class LSApprovedWorkers implements org.apache.thrift.TBase<LSApprovedWorkers, LSApprovedWorkers._Fields>, java.io.Serializable, Cloneable, Comparable<LSApprovedWorkers> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LSApprovedWorkers");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/LSSupervisorAssignments.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LSSupervisorAssignments.java b/storm-core/src/jvm/backtype/storm/generated/LSSupervisorAssignments.java
index cc738c2..7eb3628 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LSSupervisorAssignments.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LSSupervisorAssignments.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class LSSupervisorAssignments implements org.apache.thrift.TBase<LSSupervisorAssignments, LSSupervisorAssignments._Fields>, java.io.Serializable, Cloneable, Comparable<LSSupervisorAssignments> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LSSupervisorAssignments");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/LSSupervisorId.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LSSupervisorId.java b/storm-core/src/jvm/backtype/storm/generated/LSSupervisorId.java
index d7f1b69..8b0241a 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LSSupervisorId.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LSSupervisorId.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class LSSupervisorId implements org.apache.thrift.TBase<LSSupervisorId, LSSupervisorId._Fields>, java.io.Serializable, Cloneable, Comparable<LSSupervisorId> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LSSupervisorId");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/LSWorkerHeartbeat.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LSWorkerHeartbeat.java b/storm-core/src/jvm/backtype/storm/generated/LSWorkerHeartbeat.java
index ecf4776..fc37eb1 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LSWorkerHeartbeat.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LSWorkerHeartbeat.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class LSWorkerHeartbeat implements org.apache.thrift.TBase<LSWorkerHeartbeat, LSWorkerHeartbeat._Fields>, java.io.Serializable, Cloneable, Comparable<LSWorkerHeartbeat> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LSWorkerHeartbeat");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/LocalAssignment.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LocalAssignment.java b/storm-core/src/jvm/backtype/storm/generated/LocalAssignment.java
index 8a9d91f..1e9f05c 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LocalAssignment.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LocalAssignment.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class LocalAssignment implements org.apache.thrift.TBase<LocalAssignment, LocalAssignment._Fields>, java.io.Serializable, Cloneable, Comparable<LocalAssignment> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LocalAssignment");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/LocalStateData.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LocalStateData.java b/storm-core/src/jvm/backtype/storm/generated/LocalStateData.java
index 3bca07b..e821149 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LocalStateData.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LocalStateData.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class LocalStateData implements org.apache.thrift.TBase<LocalStateData, LocalStateData._Fields>, java.io.Serializable, Cloneable, Comparable<LocalStateData> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LocalStateData");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/LogConfig.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LogConfig.java b/storm-core/src/jvm/backtype/storm/generated/LogConfig.java
index 192697c..6d537f8 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LogConfig.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LogConfig.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class LogConfig implements org.apache.thrift.TBase<LogConfig, LogConfig._Fields>, java.io.Serializable, Cloneable, Comparable<LogConfig> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LogConfig");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/LogLevel.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LogLevel.java b/storm-core/src/jvm/backtype/storm/generated/LogLevel.java
index 43aa7c9..1526bf3 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LogLevel.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LogLevel.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class LogLevel implements org.apache.thrift.TBase<LogLevel, LogLevel._Fields>, java.io.Serializable, Cloneable, Comparable<LogLevel> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LogLevel");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/Nimbus.java b/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
index 592d761..59f74fb 100644
--- a/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
+++ b/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class Nimbus {
 
   public interface Iface {

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/NimbusSummary.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/NimbusSummary.java b/storm-core/src/jvm/backtype/storm/generated/NimbusSummary.java
index 27d2f62..7d1e2fd 100644
--- a/storm-core/src/jvm/backtype/storm/generated/NimbusSummary.java
+++ b/storm-core/src/jvm/backtype/storm/generated/NimbusSummary.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class NimbusSummary implements org.apache.thrift.TBase<NimbusSummary, NimbusSummary._Fields>, java.io.Serializable, Cloneable, Comparable<NimbusSummary> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NimbusSummary");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java b/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java
index 9a4d9f9..364afa7 100644
--- a/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class NodeInfo implements org.apache.thrift.TBase<NodeInfo, NodeInfo._Fields>, java.io.Serializable, Cloneable, Comparable<NodeInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NodeInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/NotAliveException.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/NotAliveException.java b/storm-core/src/jvm/backtype/storm/generated/NotAliveException.java
index 63c3e4c..cbabcf9 100644
--- a/storm-core/src/jvm/backtype/storm/generated/NotAliveException.java
+++ b/storm-core/src/jvm/backtype/storm/generated/NotAliveException.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class NotAliveException extends TException implements org.apache.thrift.TBase<NotAliveException, NotAliveException._Fields>, java.io.Serializable, Cloneable, Comparable<NotAliveException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NotAliveException");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/NullStruct.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/NullStruct.java b/storm-core/src/jvm/backtype/storm/generated/NullStruct.java
index 98dd8b9..1b8208c 100644
--- a/storm-core/src/jvm/backtype/storm/generated/NullStruct.java
+++ b/storm-core/src/jvm/backtype/storm/generated/NullStruct.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class NullStruct implements org.apache.thrift.TBase<NullStruct, NullStruct._Fields>, java.io.Serializable, Cloneable, Comparable<NullStruct> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NullStruct");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/RebalanceOptions.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/RebalanceOptions.java b/storm-core/src/jvm/backtype/storm/generated/RebalanceOptions.java
index b3f916b..d859f5a 100644
--- a/storm-core/src/jvm/backtype/storm/generated/RebalanceOptions.java
+++ b/storm-core/src/jvm/backtype/storm/generated/RebalanceOptions.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class RebalanceOptions implements org.apache.thrift.TBase<RebalanceOptions, RebalanceOptions._Fields>, java.io.Serializable, Cloneable, Comparable<RebalanceOptions> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RebalanceOptions");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/ShellComponent.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ShellComponent.java b/storm-core/src/jvm/backtype/storm/generated/ShellComponent.java
index 8647419..ab86c6a 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ShellComponent.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ShellComponent.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class ShellComponent implements org.apache.thrift.TBase<ShellComponent, ShellComponent._Fields>, java.io.Serializable, Cloneable, Comparable<ShellComponent> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ShellComponent");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/SpoutAggregateStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/SpoutAggregateStats.java b/storm-core/src/jvm/backtype/storm/generated/SpoutAggregateStats.java
index bc128aa..a8d6ec7 100644
--- a/storm-core/src/jvm/backtype/storm/generated/SpoutAggregateStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/SpoutAggregateStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class SpoutAggregateStats implements org.apache.thrift.TBase<SpoutAggregateStats, SpoutAggregateStats._Fields>, java.io.Serializable, Cloneable, Comparable<SpoutAggregateStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SpoutAggregateStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/SpoutSpec.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/SpoutSpec.java b/storm-core/src/jvm/backtype/storm/generated/SpoutSpec.java
index bb67050..3fc45cf 100644
--- a/storm-core/src/jvm/backtype/storm/generated/SpoutSpec.java
+++ b/storm-core/src/jvm/backtype/storm/generated/SpoutSpec.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class SpoutSpec implements org.apache.thrift.TBase<SpoutSpec, SpoutSpec._Fields>, java.io.Serializable, Cloneable, Comparable<SpoutSpec> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SpoutSpec");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/SpoutStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/SpoutStats.java b/storm-core/src/jvm/backtype/storm/generated/SpoutStats.java
index d744184..478143f 100644
--- a/storm-core/src/jvm/backtype/storm/generated/SpoutStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/SpoutStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class SpoutStats implements org.apache.thrift.TBase<SpoutStats, SpoutStats._Fields>, java.io.Serializable, Cloneable, Comparable<SpoutStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SpoutStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/StateSpoutSpec.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/StateSpoutSpec.java b/storm-core/src/jvm/backtype/storm/generated/StateSpoutSpec.java
index 1e5ffde..530b7ca 100644
--- a/storm-core/src/jvm/backtype/storm/generated/StateSpoutSpec.java
+++ b/storm-core/src/jvm/backtype/storm/generated/StateSpoutSpec.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class StateSpoutSpec implements org.apache.thrift.TBase<StateSpoutSpec, StateSpoutSpec._Fields>, java.io.Serializable, Cloneable, Comparable<StateSpoutSpec> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StateSpoutSpec");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/StormBase.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/StormBase.java b/storm-core/src/jvm/backtype/storm/generated/StormBase.java
index 6eed480..f4af67a 100644
--- a/storm-core/src/jvm/backtype/storm/generated/StormBase.java
+++ b/storm-core/src/jvm/backtype/storm/generated/StormBase.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class StormBase implements org.apache.thrift.TBase<StormBase, StormBase._Fields>, java.io.Serializable, Cloneable, Comparable<StormBase> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StormBase");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/StormTopology.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/StormTopology.java b/storm-core/src/jvm/backtype/storm/generated/StormTopology.java
index eb74a18..9b96fa3 100644
--- a/storm-core/src/jvm/backtype/storm/generated/StormTopology.java
+++ b/storm-core/src/jvm/backtype/storm/generated/StormTopology.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class StormTopology implements org.apache.thrift.TBase<StormTopology, StormTopology._Fields>, java.io.Serializable, Cloneable, Comparable<StormTopology> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StormTopology");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/StreamInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/StreamInfo.java b/storm-core/src/jvm/backtype/storm/generated/StreamInfo.java
index 55b265a..e3b0fdb 100644
--- a/storm-core/src/jvm/backtype/storm/generated/StreamInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/StreamInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class StreamInfo implements org.apache.thrift.TBase<StreamInfo, StreamInfo._Fields>, java.io.Serializable, Cloneable, Comparable<StreamInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StreamInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/SubmitOptions.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/SubmitOptions.java b/storm-core/src/jvm/backtype/storm/generated/SubmitOptions.java
index 1633361..358468a 100644
--- a/storm-core/src/jvm/backtype/storm/generated/SubmitOptions.java
+++ b/storm-core/src/jvm/backtype/storm/generated/SubmitOptions.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class SubmitOptions implements org.apache.thrift.TBase<SubmitOptions, SubmitOptions._Fields>, java.io.Serializable, Cloneable, Comparable<SubmitOptions> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SubmitOptions");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java b/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java
index 9bcb567..6d68927 100644
--- a/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class SupervisorInfo implements org.apache.thrift.TBase<SupervisorInfo, SupervisorInfo._Fields>, java.io.Serializable, Cloneable, Comparable<SupervisorInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SupervisorInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/SupervisorSummary.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/SupervisorSummary.java b/storm-core/src/jvm/backtype/storm/generated/SupervisorSummary.java
index 022ecb4..7e36d0f 100644
--- a/storm-core/src/jvm/backtype/storm/generated/SupervisorSummary.java
+++ b/storm-core/src/jvm/backtype/storm/generated/SupervisorSummary.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class SupervisorSummary implements org.apache.thrift.TBase<SupervisorSummary, SupervisorSummary._Fields>, java.io.Serializable, Cloneable, Comparable<SupervisorSummary> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SupervisorSummary");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/ThriftSerializedObject.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ThriftSerializedObject.java b/storm-core/src/jvm/backtype/storm/generated/ThriftSerializedObject.java
index e233458..4b2bc63 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ThriftSerializedObject.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ThriftSerializedObject.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class ThriftSerializedObject implements org.apache.thrift.TBase<ThriftSerializedObject, ThriftSerializedObject._Fields>, java.io.Serializable, Cloneable, Comparable<ThriftSerializedObject> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ThriftSerializedObject");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/TopologyInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/TopologyInfo.java b/storm-core/src/jvm/backtype/storm/generated/TopologyInfo.java
index b49c39b..4901e01 100644
--- a/storm-core/src/jvm/backtype/storm/generated/TopologyInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/TopologyInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class TopologyInfo implements org.apache.thrift.TBase<TopologyInfo, TopologyInfo._Fields>, java.io.Serializable, Cloneable, Comparable<TopologyInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TopologyInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java b/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java
index 5a06a65..99eddc3 100644
--- a/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class TopologyPageInfo implements org.apache.thrift.TBase<TopologyPageInfo, TopologyPageInfo._Fields>, java.io.Serializable, Cloneable, Comparable<TopologyPageInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TopologyPageInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/TopologyStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/TopologyStats.java b/storm-core/src/jvm/backtype/storm/generated/TopologyStats.java
index ded0010..0ff01de 100644
--- a/storm-core/src/jvm/backtype/storm/generated/TopologyStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/TopologyStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class TopologyStats implements org.apache.thrift.TBase<TopologyStats, TopologyStats._Fields>, java.io.Serializable, Cloneable, Comparable<TopologyStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TopologyStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java b/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java
index f702140..3770f4d 100644
--- a/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java
+++ b/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class TopologySummary implements org.apache.thrift.TBase<TopologySummary, TopologySummary._Fields>, java.io.Serializable, Cloneable, Comparable<TopologySummary> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TopologySummary");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/a8ceb1ca/storm-core/src/jvm/backtype/storm/generated/WorkerResources.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/WorkerResources.java b/storm-core/src/jvm/backtype/storm/generated/WorkerResources.java
index 2ab462f..7cfadd7 100644
--- a/storm-core/src/jvm/backtype/storm/generated/WorkerResources.java
+++ b/storm-core/src/jvm/backtype/storm/generated/WorkerResources.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
 public class WorkerResources implements org.apache.thrift.TBase<WorkerResources, WorkerResources._Fields>, java.io.Serializable, Cloneable, Comparable<WorkerResources> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("WorkerResources");
 


[04/37] storm git commit: PACEMAKER OPEN SOURCE!

Posted by kn...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/HBMessageData.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/HBMessageData.java b/storm-core/src/jvm/backtype/storm/generated/HBMessageData.java
new file mode 100644
index 0000000..6724f3d
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/generated/HBMessageData.java
@@ -0,0 +1,640 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+public class HBMessageData extends org.apache.thrift.TUnion<HBMessageData, HBMessageData._Fields> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBMessageData");
+  private static final org.apache.thrift.protocol.TField PATH_FIELD_DESC = new org.apache.thrift.protocol.TField("path", org.apache.thrift.protocol.TType.STRING, (short)1);
+  private static final org.apache.thrift.protocol.TField PULSE_FIELD_DESC = new org.apache.thrift.protocol.TField("pulse", org.apache.thrift.protocol.TType.STRUCT, (short)2);
+  private static final org.apache.thrift.protocol.TField BOOLVAL_FIELD_DESC = new org.apache.thrift.protocol.TField("boolval", org.apache.thrift.protocol.TType.BOOL, (short)3);
+  private static final org.apache.thrift.protocol.TField RECORDS_FIELD_DESC = new org.apache.thrift.protocol.TField("records", org.apache.thrift.protocol.TType.STRUCT, (short)4);
+  private static final org.apache.thrift.protocol.TField NODES_FIELD_DESC = new org.apache.thrift.protocol.TField("nodes", org.apache.thrift.protocol.TType.STRUCT, (short)5);
+  private static final org.apache.thrift.protocol.TField MESSAGE_BLOB_FIELD_DESC = new org.apache.thrift.protocol.TField("message_blob", org.apache.thrift.protocol.TType.STRING, (short)7);
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    PATH((short)1, "path"),
+    PULSE((short)2, "pulse"),
+    BOOLVAL((short)3, "boolval"),
+    RECORDS((short)4, "records"),
+    NODES((short)5, "nodes"),
+    MESSAGE_BLOB((short)7, "message_blob");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // PATH
+          return PATH;
+        case 2: // PULSE
+          return PULSE;
+        case 3: // BOOLVAL
+          return BOOLVAL;
+        case 4: // RECORDS
+          return RECORDS;
+        case 5: // NODES
+          return NODES;
+        case 7: // MESSAGE_BLOB
+          return MESSAGE_BLOB;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.PATH, new org.apache.thrift.meta_data.FieldMetaData("path", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.PULSE, new org.apache.thrift.meta_data.FieldMetaData("pulse", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT        , "HBPulse")));
+    tmpMap.put(_Fields.BOOLVAL, new org.apache.thrift.meta_data.FieldMetaData("boolval", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
+    tmpMap.put(_Fields.RECORDS, new org.apache.thrift.meta_data.FieldMetaData("records", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT        , "HBRecords")));
+    tmpMap.put(_Fields.NODES, new org.apache.thrift.meta_data.FieldMetaData("nodes", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT        , "HBNodes")));
+    tmpMap.put(_Fields.MESSAGE_BLOB, new org.apache.thrift.meta_data.FieldMetaData("message_blob", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING        , true)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HBMessageData.class, metaDataMap);
+  }
+
+  public HBMessageData() {
+    super();
+  }
+
+  public HBMessageData(_Fields setField, Object value) {
+    super(setField, value);
+  }
+
+  public HBMessageData(HBMessageData other) {
+    super(other);
+  }
+  public HBMessageData deepCopy() {
+    return new HBMessageData(this);
+  }
+
+  public static HBMessageData path(String value) {
+    HBMessageData x = new HBMessageData();
+    x.set_path(value);
+    return x;
+  }
+
+  public static HBMessageData pulse(HBPulse value) {
+    HBMessageData x = new HBMessageData();
+    x.set_pulse(value);
+    return x;
+  }
+
+  public static HBMessageData boolval(boolean value) {
+    HBMessageData x = new HBMessageData();
+    x.set_boolval(value);
+    return x;
+  }
+
+  public static HBMessageData records(HBRecords value) {
+    HBMessageData x = new HBMessageData();
+    x.set_records(value);
+    return x;
+  }
+
+  public static HBMessageData nodes(HBNodes value) {
+    HBMessageData x = new HBMessageData();
+    x.set_nodes(value);
+    return x;
+  }
+
+  public static HBMessageData message_blob(ByteBuffer value) {
+    HBMessageData x = new HBMessageData();
+    x.set_message_blob(value);
+    return x;
+  }
+
+  public static HBMessageData message_blob(byte[] value) {
+    HBMessageData x = new HBMessageData();
+    x.set_message_blob(ByteBuffer.wrap(Arrays.copyOf(value, value.length)));
+    return x;
+  }
+
+
+  @Override
+  protected void checkType(_Fields setField, Object value) throws ClassCastException {
+    switch (setField) {
+      case PATH:
+        if (value instanceof String) {
+          break;
+        }
+        throw new ClassCastException("Was expecting value of type String for field 'path', but got " + value.getClass().getSimpleName());
+      case PULSE:
+        if (value instanceof HBPulse) {
+          break;
+        }
+        throw new ClassCastException("Was expecting value of type HBPulse for field 'pulse', but got " + value.getClass().getSimpleName());
+      case BOOLVAL:
+        if (value instanceof Boolean) {
+          break;
+        }
+        throw new ClassCastException("Was expecting value of type Boolean for field 'boolval', but got " + value.getClass().getSimpleName());
+      case RECORDS:
+        if (value instanceof HBRecords) {
+          break;
+        }
+        throw new ClassCastException("Was expecting value of type HBRecords for field 'records', but got " + value.getClass().getSimpleName());
+      case NODES:
+        if (value instanceof HBNodes) {
+          break;
+        }
+        throw new ClassCastException("Was expecting value of type HBNodes for field 'nodes', but got " + value.getClass().getSimpleName());
+      case MESSAGE_BLOB:
+        if (value instanceof ByteBuffer) {
+          break;
+        }
+        throw new ClassCastException("Was expecting value of type ByteBuffer for field 'message_blob', but got " + value.getClass().getSimpleName());
+      default:
+        throw new IllegalArgumentException("Unknown field id " + setField);
+    }
+  }
+
+  @Override
+  protected Object standardSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TField field) throws org.apache.thrift.TException {
+    _Fields setField = _Fields.findByThriftId(field.id);
+    if (setField != null) {
+      switch (setField) {
+        case PATH:
+          if (field.type == PATH_FIELD_DESC.type) {
+            String path;
+            path = iprot.readString();
+            return path;
+          } else {
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+            return null;
+          }
+        case PULSE:
+          if (field.type == PULSE_FIELD_DESC.type) {
+            HBPulse pulse;
+            pulse = new HBPulse();
+            pulse.read(iprot);
+            return pulse;
+          } else {
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+            return null;
+          }
+        case BOOLVAL:
+          if (field.type == BOOLVAL_FIELD_DESC.type) {
+            Boolean boolval;
+            boolval = iprot.readBool();
+            return boolval;
+          } else {
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+            return null;
+          }
+        case RECORDS:
+          if (field.type == RECORDS_FIELD_DESC.type) {
+            HBRecords records;
+            records = new HBRecords();
+            records.read(iprot);
+            return records;
+          } else {
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+            return null;
+          }
+        case NODES:
+          if (field.type == NODES_FIELD_DESC.type) {
+            HBNodes nodes;
+            nodes = new HBNodes();
+            nodes.read(iprot);
+            return nodes;
+          } else {
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+            return null;
+          }
+        case MESSAGE_BLOB:
+          if (field.type == MESSAGE_BLOB_FIELD_DESC.type) {
+            ByteBuffer message_blob;
+            message_blob = iprot.readBinary();
+            return message_blob;
+          } else {
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+            return null;
+          }
+        default:
+          throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
+      }
+    } else {
+      org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+      return null;
+    }
+  }
+
+  @Override
+  protected void standardSchemeWriteValue(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    switch (setField_) {
+      case PATH:
+        String path = (String)value_;
+        oprot.writeString(path);
+        return;
+      case PULSE:
+        HBPulse pulse = (HBPulse)value_;
+        pulse.write(oprot);
+        return;
+      case BOOLVAL:
+        Boolean boolval = (Boolean)value_;
+        oprot.writeBool(boolval);
+        return;
+      case RECORDS:
+        HBRecords records = (HBRecords)value_;
+        records.write(oprot);
+        return;
+      case NODES:
+        HBNodes nodes = (HBNodes)value_;
+        nodes.write(oprot);
+        return;
+      case MESSAGE_BLOB:
+        ByteBuffer message_blob = (ByteBuffer)value_;
+        oprot.writeBinary(message_blob);
+        return;
+      default:
+        throw new IllegalStateException("Cannot write union with unknown field " + setField_);
+    }
+  }
+
+  @Override
+  protected Object tupleSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot, short fieldID) throws org.apache.thrift.TException {
+    _Fields setField = _Fields.findByThriftId(fieldID);
+    if (setField != null) {
+      switch (setField) {
+        case PATH:
+          String path;
+          path = iprot.readString();
+          return path;
+        case PULSE:
+          HBPulse pulse;
+          pulse = new HBPulse();
+          pulse.read(iprot);
+          return pulse;
+        case BOOLVAL:
+          Boolean boolval;
+          boolval = iprot.readBool();
+          return boolval;
+        case RECORDS:
+          HBRecords records;
+          records = new HBRecords();
+          records.read(iprot);
+          return records;
+        case NODES:
+          HBNodes nodes;
+          nodes = new HBNodes();
+          nodes.read(iprot);
+          return nodes;
+        case MESSAGE_BLOB:
+          ByteBuffer message_blob;
+          message_blob = iprot.readBinary();
+          return message_blob;
+        default:
+          throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
+      }
+    } else {
+      throw new TProtocolException("Couldn't find a field with field id " + fieldID);
+    }
+  }
+
+  @Override
+  protected void tupleSchemeWriteValue(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    switch (setField_) {
+      case PATH:
+        String path = (String)value_;
+        oprot.writeString(path);
+        return;
+      case PULSE:
+        HBPulse pulse = (HBPulse)value_;
+        pulse.write(oprot);
+        return;
+      case BOOLVAL:
+        Boolean boolval = (Boolean)value_;
+        oprot.writeBool(boolval);
+        return;
+      case RECORDS:
+        HBRecords records = (HBRecords)value_;
+        records.write(oprot);
+        return;
+      case NODES:
+        HBNodes nodes = (HBNodes)value_;
+        nodes.write(oprot);
+        return;
+      case MESSAGE_BLOB:
+        ByteBuffer message_blob = (ByteBuffer)value_;
+        oprot.writeBinary(message_blob);
+        return;
+      default:
+        throw new IllegalStateException("Cannot write union with unknown field " + setField_);
+    }
+  }
+
+  @Override
+  protected org.apache.thrift.protocol.TField getFieldDesc(_Fields setField) {
+    switch (setField) {
+      case PATH:
+        return PATH_FIELD_DESC;
+      case PULSE:
+        return PULSE_FIELD_DESC;
+      case BOOLVAL:
+        return BOOLVAL_FIELD_DESC;
+      case RECORDS:
+        return RECORDS_FIELD_DESC;
+      case NODES:
+        return NODES_FIELD_DESC;
+      case MESSAGE_BLOB:
+        return MESSAGE_BLOB_FIELD_DESC;
+      default:
+        throw new IllegalArgumentException("Unknown field id " + setField);
+    }
+  }
+
+  @Override
+  protected org.apache.thrift.protocol.TStruct getStructDesc() {
+    return STRUCT_DESC;
+  }
+
+  @Override
+  protected _Fields enumForId(short id) {
+    return _Fields.findByThriftIdOrThrow(id);
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+
+  public String get_path() {
+    if (getSetField() == _Fields.PATH) {
+      return (String)getFieldValue();
+    } else {
+      throw new RuntimeException("Cannot get field 'path' because union is currently set to " + getFieldDesc(getSetField()).name);
+    }
+  }
+
+  public void set_path(String value) {
+    if (value == null) throw new NullPointerException();
+    setField_ = _Fields.PATH;
+    value_ = value;
+  }
+
+  public HBPulse get_pulse() {
+    if (getSetField() == _Fields.PULSE) {
+      return (HBPulse)getFieldValue();
+    } else {
+      throw new RuntimeException("Cannot get field 'pulse' because union is currently set to " + getFieldDesc(getSetField()).name);
+    }
+  }
+
+  public void set_pulse(HBPulse value) {
+    if (value == null) throw new NullPointerException();
+    setField_ = _Fields.PULSE;
+    value_ = value;
+  }
+
+  public boolean get_boolval() {
+    if (getSetField() == _Fields.BOOLVAL) {
+      return (Boolean)getFieldValue();
+    } else {
+      throw new RuntimeException("Cannot get field 'boolval' because union is currently set to " + getFieldDesc(getSetField()).name);
+    }
+  }
+
+  public void set_boolval(boolean value) {
+    setField_ = _Fields.BOOLVAL;
+    value_ = value;
+  }
+
+  public HBRecords get_records() {
+    if (getSetField() == _Fields.RECORDS) {
+      return (HBRecords)getFieldValue();
+    } else {
+      throw new RuntimeException("Cannot get field 'records' because union is currently set to " + getFieldDesc(getSetField()).name);
+    }
+  }
+
+  public void set_records(HBRecords value) {
+    if (value == null) throw new NullPointerException();
+    setField_ = _Fields.RECORDS;
+    value_ = value;
+  }
+
+  public HBNodes get_nodes() {
+    if (getSetField() == _Fields.NODES) {
+      return (HBNodes)getFieldValue();
+    } else {
+      throw new RuntimeException("Cannot get field 'nodes' because union is currently set to " + getFieldDesc(getSetField()).name);
+    }
+  }
+
+  public void set_nodes(HBNodes value) {
+    if (value == null) throw new NullPointerException();
+    setField_ = _Fields.NODES;
+    value_ = value;
+  }
+
+  public byte[] get_message_blob() {
+    set_message_blob(org.apache.thrift.TBaseHelper.rightSize(buffer_for_message_blob()));
+    ByteBuffer b = buffer_for_message_blob();
+    return b == null ? null : b.array();
+  }
+
+  public ByteBuffer buffer_for_message_blob() {
+    if (getSetField() == _Fields.MESSAGE_BLOB) {
+      return org.apache.thrift.TBaseHelper.copyBinary((ByteBuffer)getFieldValue());
+    } else {
+      throw new RuntimeException("Cannot get field 'message_blob' because union is currently set to " + getFieldDesc(getSetField()).name);
+    }
+  }
+
+  public void set_message_blob(byte[] value) {
+    set_message_blob(ByteBuffer.wrap(Arrays.copyOf(value, value.length)));
+  }
+
+  public void set_message_blob(ByteBuffer value) {
+    if (value == null) throw new NullPointerException();
+    setField_ = _Fields.MESSAGE_BLOB;
+    value_ = value;
+  }
+
+  public boolean is_set_path() {
+    return setField_ == _Fields.PATH;
+  }
+
+
+  public boolean is_set_pulse() {
+    return setField_ == _Fields.PULSE;
+  }
+
+
+  public boolean is_set_boolval() {
+    return setField_ == _Fields.BOOLVAL;
+  }
+
+
+  public boolean is_set_records() {
+    return setField_ == _Fields.RECORDS;
+  }
+
+
+  public boolean is_set_nodes() {
+    return setField_ == _Fields.NODES;
+  }
+
+
+  public boolean is_set_message_blob() {
+    return setField_ == _Fields.MESSAGE_BLOB;
+  }
+
+
+  public boolean equals(Object other) {
+    if (other instanceof HBMessageData) {
+      return equals((HBMessageData)other);
+    } else {
+      return false;
+    }
+  }
+
+  public boolean equals(HBMessageData other) {
+    return other != null && getSetField() == other.getSetField() && getFieldValue().equals(other.getFieldValue());
+  }
+
+  @Override
+  public int compareTo(HBMessageData other) {
+    int lastComparison = org.apache.thrift.TBaseHelper.compareTo(getSetField(), other.getSetField());
+    if (lastComparison == 0) {
+      return org.apache.thrift.TBaseHelper.compareTo(getFieldValue(), other.getFieldValue());
+    }
+    return lastComparison;
+  }
+
+
+  @Override
+  public int hashCode() {
+    List<Object> list = new ArrayList<Object>();
+    list.add(this.getClass().getName());
+    org.apache.thrift.TFieldIdEnum setField = getSetField();
+    if (setField != null) {
+      list.add(setField.getThriftFieldId());
+      Object value = getFieldValue();
+      if (value instanceof org.apache.thrift.TEnum) {
+        list.add(((org.apache.thrift.TEnum)getFieldValue()).getValue());
+      } else {
+        list.add(value);
+      }
+    }
+    return list.hashCode();
+  }
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/HBNodes.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/HBNodes.java b/storm-core/src/jvm/backtype/storm/generated/HBNodes.java
new file mode 100644
index 0000000..f23a49e
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/generated/HBNodes.java
@@ -0,0 +1,461 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+public class HBNodes implements org.apache.thrift.TBase<HBNodes, HBNodes._Fields>, java.io.Serializable, Cloneable, Comparable<HBNodes> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBNodes");
+
+  private static final org.apache.thrift.protocol.TField PULSE_IDS_FIELD_DESC = new org.apache.thrift.protocol.TField("pulseIds", org.apache.thrift.protocol.TType.LIST, (short)1);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new HBNodesStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new HBNodesTupleSchemeFactory());
+  }
+
+  private List<String> pulseIds; // required
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    PULSE_IDS((short)1, "pulseIds");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // PULSE_IDS
+          return PULSE_IDS;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.PULSE_IDS, new org.apache.thrift.meta_data.FieldMetaData("pulseIds", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HBNodes.class, metaDataMap);
+  }
+
+  public HBNodes() {
+  }
+
+  public HBNodes(
+    List<String> pulseIds)
+  {
+    this();
+    this.pulseIds = pulseIds;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public HBNodes(HBNodes other) {
+    if (other.is_set_pulseIds()) {
+      List<String> __this__pulseIds = new ArrayList<String>(other.pulseIds);
+      this.pulseIds = __this__pulseIds;
+    }
+  }
+
+  public HBNodes deepCopy() {
+    return new HBNodes(this);
+  }
+
+  @Override
+  public void clear() {
+    this.pulseIds = null;
+  }
+
+  public int get_pulseIds_size() {
+    return (this.pulseIds == null) ? 0 : this.pulseIds.size();
+  }
+
+  public java.util.Iterator<String> get_pulseIds_iterator() {
+    return (this.pulseIds == null) ? null : this.pulseIds.iterator();
+  }
+
+  public void add_to_pulseIds(String elem) {
+    if (this.pulseIds == null) {
+      this.pulseIds = new ArrayList<String>();
+    }
+    this.pulseIds.add(elem);
+  }
+
+  public List<String> get_pulseIds() {
+    return this.pulseIds;
+  }
+
+  public void set_pulseIds(List<String> pulseIds) {
+    this.pulseIds = pulseIds;
+  }
+
+  public void unset_pulseIds() {
+    this.pulseIds = null;
+  }
+
+  /** Returns true if field pulseIds is set (has been assigned a value) and false otherwise */
+  public boolean is_set_pulseIds() {
+    return this.pulseIds != null;
+  }
+
+  public void set_pulseIds_isSet(boolean value) {
+    if (!value) {
+      this.pulseIds = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case PULSE_IDS:
+      if (value == null) {
+        unset_pulseIds();
+      } else {
+        set_pulseIds((List<String>)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case PULSE_IDS:
+      return get_pulseIds();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case PULSE_IDS:
+      return is_set_pulseIds();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof HBNodes)
+      return this.equals((HBNodes)that);
+    return false;
+  }
+
+  public boolean equals(HBNodes that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_pulseIds = true && this.is_set_pulseIds();
+    boolean that_present_pulseIds = true && that.is_set_pulseIds();
+    if (this_present_pulseIds || that_present_pulseIds) {
+      if (!(this_present_pulseIds && that_present_pulseIds))
+        return false;
+      if (!this.pulseIds.equals(that.pulseIds))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    List<Object> list = new ArrayList<Object>();
+
+    boolean present_pulseIds = true && (is_set_pulseIds());
+    list.add(present_pulseIds);
+    if (present_pulseIds)
+      list.add(pulseIds);
+
+    return list.hashCode();
+  }
+
+  @Override
+  public int compareTo(HBNodes other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = Boolean.valueOf(is_set_pulseIds()).compareTo(other.is_set_pulseIds());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_pulseIds()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.pulseIds, other.pulseIds);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("HBNodes(");
+    boolean first = true;
+
+    sb.append("pulseIds:");
+    if (this.pulseIds == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.pulseIds);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class HBNodesStandardSchemeFactory implements SchemeFactory {
+    public HBNodesStandardScheme getScheme() {
+      return new HBNodesStandardScheme();
+    }
+  }
+
+  private static class HBNodesStandardScheme extends StandardScheme<HBNodes> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, HBNodes struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // PULSE_IDS
+            if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+              {
+                org.apache.thrift.protocol.TList _list674 = iprot.readListBegin();
+                struct.pulseIds = new ArrayList<String>(_list674.size);
+                String _elem675;
+                for (int _i676 = 0; _i676 < _list674.size; ++_i676)
+                {
+                  _elem675 = iprot.readString();
+                  struct.pulseIds.add(_elem675);
+                }
+                iprot.readListEnd();
+              }
+              struct.set_pulseIds_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, HBNodes struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.pulseIds != null) {
+        oprot.writeFieldBegin(PULSE_IDS_FIELD_DESC);
+        {
+          oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.pulseIds.size()));
+          for (String _iter677 : struct.pulseIds)
+          {
+            oprot.writeString(_iter677);
+          }
+          oprot.writeListEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class HBNodesTupleSchemeFactory implements SchemeFactory {
+    public HBNodesTupleScheme getScheme() {
+      return new HBNodesTupleScheme();
+    }
+  }
+
+  private static class HBNodesTupleScheme extends TupleScheme<HBNodes> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, HBNodes struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      BitSet optionals = new BitSet();
+      if (struct.is_set_pulseIds()) {
+        optionals.set(0);
+      }
+      oprot.writeBitSet(optionals, 1);
+      if (struct.is_set_pulseIds()) {
+        {
+          oprot.writeI32(struct.pulseIds.size());
+          for (String _iter678 : struct.pulseIds)
+          {
+            oprot.writeString(_iter678);
+          }
+        }
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, HBNodes struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      BitSet incoming = iprot.readBitSet(1);
+      if (incoming.get(0)) {
+        {
+          org.apache.thrift.protocol.TList _list679 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+          struct.pulseIds = new ArrayList<String>(_list679.size);
+          String _elem680;
+          for (int _i681 = 0; _i681 < _list679.size; ++_i681)
+          {
+            _elem680 = iprot.readString();
+            struct.pulseIds.add(_elem680);
+          }
+        }
+        struct.set_pulseIds_isSet(true);
+      }
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/HBPulse.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/HBPulse.java b/storm-core/src/jvm/backtype/storm/generated/HBPulse.java
new file mode 100644
index 0000000..942a0ee
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/generated/HBPulse.java
@@ -0,0 +1,522 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+public class HBPulse implements org.apache.thrift.TBase<HBPulse, HBPulse._Fields>, java.io.Serializable, Cloneable, Comparable<HBPulse> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBPulse");
+
+  private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.STRING, (short)1);
+  private static final org.apache.thrift.protocol.TField DETAILS_FIELD_DESC = new org.apache.thrift.protocol.TField("details", org.apache.thrift.protocol.TType.STRING, (short)2);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new HBPulseStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new HBPulseTupleSchemeFactory());
+  }
+
+  private String id; // required
+  private ByteBuffer details; // required
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    ID((short)1, "id"),
+    DETAILS((short)2, "details");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // ID
+          return ID;
+        case 2: // DETAILS
+          return DETAILS;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.ID, new org.apache.thrift.meta_data.FieldMetaData("id", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.DETAILS, new org.apache.thrift.meta_data.FieldMetaData("details", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING        , true)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HBPulse.class, metaDataMap);
+  }
+
+  public HBPulse() {
+  }
+
+  public HBPulse(
+    String id,
+    ByteBuffer details)
+  {
+    this();
+    this.id = id;
+    this.details = org.apache.thrift.TBaseHelper.copyBinary(details);
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public HBPulse(HBPulse other) {
+    if (other.is_set_id()) {
+      this.id = other.id;
+    }
+    if (other.is_set_details()) {
+      this.details = org.apache.thrift.TBaseHelper.copyBinary(other.details);
+    }
+  }
+
+  public HBPulse deepCopy() {
+    return new HBPulse(this);
+  }
+
+  @Override
+  public void clear() {
+    this.id = null;
+    this.details = null;
+  }
+
+  public String get_id() {
+    return this.id;
+  }
+
+  public void set_id(String id) {
+    this.id = id;
+  }
+
+  public void unset_id() {
+    this.id = null;
+  }
+
+  /** Returns true if field id is set (has been assigned a value) and false otherwise */
+  public boolean is_set_id() {
+    return this.id != null;
+  }
+
+  public void set_id_isSet(boolean value) {
+    if (!value) {
+      this.id = null;
+    }
+  }
+
+  public byte[] get_details() {
+    set_details(org.apache.thrift.TBaseHelper.rightSize(details));
+    return details == null ? null : details.array();
+  }
+
+  public ByteBuffer buffer_for_details() {
+    return org.apache.thrift.TBaseHelper.copyBinary(details);
+  }
+
+  public void set_details(byte[] details) {
+    this.details = details == null ? (ByteBuffer)null : ByteBuffer.wrap(Arrays.copyOf(details, details.length));
+  }
+
+  public void set_details(ByteBuffer details) {
+    this.details = org.apache.thrift.TBaseHelper.copyBinary(details);
+  }
+
+  public void unset_details() {
+    this.details = null;
+  }
+
+  /** Returns true if field details is set (has been assigned a value) and false otherwise */
+  public boolean is_set_details() {
+    return this.details != null;
+  }
+
+  public void set_details_isSet(boolean value) {
+    if (!value) {
+      this.details = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case ID:
+      if (value == null) {
+        unset_id();
+      } else {
+        set_id((String)value);
+      }
+      break;
+
+    case DETAILS:
+      if (value == null) {
+        unset_details();
+      } else {
+        set_details((ByteBuffer)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case ID:
+      return get_id();
+
+    case DETAILS:
+      return get_details();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case ID:
+      return is_set_id();
+    case DETAILS:
+      return is_set_details();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof HBPulse)
+      return this.equals((HBPulse)that);
+    return false;
+  }
+
+  public boolean equals(HBPulse that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_id = true && this.is_set_id();
+    boolean that_present_id = true && that.is_set_id();
+    if (this_present_id || that_present_id) {
+      if (!(this_present_id && that_present_id))
+        return false;
+      if (!this.id.equals(that.id))
+        return false;
+    }
+
+    boolean this_present_details = true && this.is_set_details();
+    boolean that_present_details = true && that.is_set_details();
+    if (this_present_details || that_present_details) {
+      if (!(this_present_details && that_present_details))
+        return false;
+      if (!this.details.equals(that.details))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    List<Object> list = new ArrayList<Object>();
+
+    boolean present_id = true && (is_set_id());
+    list.add(present_id);
+    if (present_id)
+      list.add(id);
+
+    boolean present_details = true && (is_set_details());
+    list.add(present_details);
+    if (present_details)
+      list.add(details);
+
+    return list.hashCode();
+  }
+
+  @Override
+  public int compareTo(HBPulse other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = Boolean.valueOf(is_set_id()).compareTo(other.is_set_id());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_id()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, other.id);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(is_set_details()).compareTo(other.is_set_details());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_details()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.details, other.details);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("HBPulse(");
+    boolean first = true;
+
+    sb.append("id:");
+    if (this.id == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.id);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("details:");
+    if (this.details == null) {
+      sb.append("null");
+    } else {
+      org.apache.thrift.TBaseHelper.toString(this.details, sb);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    if (!is_set_id()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 'id' is unset! Struct:" + toString());
+    }
+
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class HBPulseStandardSchemeFactory implements SchemeFactory {
+    public HBPulseStandardScheme getScheme() {
+      return new HBPulseStandardScheme();
+    }
+  }
+
+  private static class HBPulseStandardScheme extends StandardScheme<HBPulse> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, HBPulse struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // ID
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.id = iprot.readString();
+              struct.set_id_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 2: // DETAILS
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.details = iprot.readBinary();
+              struct.set_details_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, HBPulse struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.id != null) {
+        oprot.writeFieldBegin(ID_FIELD_DESC);
+        oprot.writeString(struct.id);
+        oprot.writeFieldEnd();
+      }
+      if (struct.details != null) {
+        oprot.writeFieldBegin(DETAILS_FIELD_DESC);
+        oprot.writeBinary(struct.details);
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class HBPulseTupleSchemeFactory implements SchemeFactory {
+    public HBPulseTupleScheme getScheme() {
+      return new HBPulseTupleScheme();
+    }
+  }
+
+  private static class HBPulseTupleScheme extends TupleScheme<HBPulse> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, HBPulse struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      oprot.writeString(struct.id);
+      BitSet optionals = new BitSet();
+      if (struct.is_set_details()) {
+        optionals.set(0);
+      }
+      oprot.writeBitSet(optionals, 1);
+      if (struct.is_set_details()) {
+        oprot.writeBinary(struct.details);
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, HBPulse struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      struct.id = iprot.readString();
+      struct.set_id_isSet(true);
+      BitSet incoming = iprot.readBitSet(1);
+      if (incoming.get(0)) {
+        struct.details = iprot.readBinary();
+        struct.set_details_isSet(true);
+      }
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/HBRecords.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/HBRecords.java b/storm-core/src/jvm/backtype/storm/generated/HBRecords.java
new file mode 100644
index 0000000..2df69bd
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/generated/HBRecords.java
@@ -0,0 +1,466 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
+public class HBRecords implements org.apache.thrift.TBase<HBRecords, HBRecords._Fields>, java.io.Serializable, Cloneable, Comparable<HBRecords> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBRecords");
+
+  private static final org.apache.thrift.protocol.TField PULSES_FIELD_DESC = new org.apache.thrift.protocol.TField("pulses", org.apache.thrift.protocol.TType.LIST, (short)1);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new HBRecordsStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new HBRecordsTupleSchemeFactory());
+  }
+
+  private List<HBPulse> pulses; // required
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    PULSES((short)1, "pulses");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // PULSES
+          return PULSES;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.PULSES, new org.apache.thrift.meta_data.FieldMetaData("pulses", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
+            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, HBPulse.class))));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HBRecords.class, metaDataMap);
+  }
+
+  public HBRecords() {
+  }
+
+  public HBRecords(
+    List<HBPulse> pulses)
+  {
+    this();
+    this.pulses = pulses;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public HBRecords(HBRecords other) {
+    if (other.is_set_pulses()) {
+      List<HBPulse> __this__pulses = new ArrayList<HBPulse>(other.pulses.size());
+      for (HBPulse other_element : other.pulses) {
+        __this__pulses.add(new HBPulse(other_element));
+      }
+      this.pulses = __this__pulses;
+    }
+  }
+
+  public HBRecords deepCopy() {
+    return new HBRecords(this);
+  }
+
+  @Override
+  public void clear() {
+    this.pulses = null;
+  }
+
+  public int get_pulses_size() {
+    return (this.pulses == null) ? 0 : this.pulses.size();
+  }
+
+  public java.util.Iterator<HBPulse> get_pulses_iterator() {
+    return (this.pulses == null) ? null : this.pulses.iterator();
+  }
+
+  public void add_to_pulses(HBPulse elem) {
+    if (this.pulses == null) {
+      this.pulses = new ArrayList<HBPulse>();
+    }
+    this.pulses.add(elem);
+  }
+
+  public List<HBPulse> get_pulses() {
+    return this.pulses;
+  }
+
+  public void set_pulses(List<HBPulse> pulses) {
+    this.pulses = pulses;
+  }
+
+  public void unset_pulses() {
+    this.pulses = null;
+  }
+
+  /** Returns true if field pulses is set (has been assigned a value) and false otherwise */
+  public boolean is_set_pulses() {
+    return this.pulses != null;
+  }
+
+  public void set_pulses_isSet(boolean value) {
+    if (!value) {
+      this.pulses = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case PULSES:
+      if (value == null) {
+        unset_pulses();
+      } else {
+        set_pulses((List<HBPulse>)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case PULSES:
+      return get_pulses();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case PULSES:
+      return is_set_pulses();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof HBRecords)
+      return this.equals((HBRecords)that);
+    return false;
+  }
+
+  public boolean equals(HBRecords that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_pulses = true && this.is_set_pulses();
+    boolean that_present_pulses = true && that.is_set_pulses();
+    if (this_present_pulses || that_present_pulses) {
+      if (!(this_present_pulses && that_present_pulses))
+        return false;
+      if (!this.pulses.equals(that.pulses))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    List<Object> list = new ArrayList<Object>();
+
+    boolean present_pulses = true && (is_set_pulses());
+    list.add(present_pulses);
+    if (present_pulses)
+      list.add(pulses);
+
+    return list.hashCode();
+  }
+
+  @Override
+  public int compareTo(HBRecords other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = Boolean.valueOf(is_set_pulses()).compareTo(other.is_set_pulses());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_pulses()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.pulses, other.pulses);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("HBRecords(");
+    boolean first = true;
+
+    sb.append("pulses:");
+    if (this.pulses == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.pulses);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class HBRecordsStandardSchemeFactory implements SchemeFactory {
+    public HBRecordsStandardScheme getScheme() {
+      return new HBRecordsStandardScheme();
+    }
+  }
+
+  private static class HBRecordsStandardScheme extends StandardScheme<HBRecords> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, HBRecords struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // PULSES
+            if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+              {
+                org.apache.thrift.protocol.TList _list666 = iprot.readListBegin();
+                struct.pulses = new ArrayList<HBPulse>(_list666.size);
+                HBPulse _elem667;
+                for (int _i668 = 0; _i668 < _list666.size; ++_i668)
+                {
+                  _elem667 = new HBPulse();
+                  _elem667.read(iprot);
+                  struct.pulses.add(_elem667);
+                }
+                iprot.readListEnd();
+              }
+              struct.set_pulses_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, HBRecords struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.pulses != null) {
+        oprot.writeFieldBegin(PULSES_FIELD_DESC);
+        {
+          oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.pulses.size()));
+          for (HBPulse _iter669 : struct.pulses)
+          {
+            _iter669.write(oprot);
+          }
+          oprot.writeListEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class HBRecordsTupleSchemeFactory implements SchemeFactory {
+    public HBRecordsTupleScheme getScheme() {
+      return new HBRecordsTupleScheme();
+    }
+  }
+
+  private static class HBRecordsTupleScheme extends TupleScheme<HBRecords> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, HBRecords struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      BitSet optionals = new BitSet();
+      if (struct.is_set_pulses()) {
+        optionals.set(0);
+      }
+      oprot.writeBitSet(optionals, 1);
+      if (struct.is_set_pulses()) {
+        {
+          oprot.writeI32(struct.pulses.size());
+          for (HBPulse _iter670 : struct.pulses)
+          {
+            _iter670.write(oprot);
+          }
+        }
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, HBRecords struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      BitSet incoming = iprot.readBitSet(1);
+      if (incoming.get(0)) {
+        {
+          org.apache.thrift.protocol.TList _list671 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+          struct.pulses = new ArrayList<HBPulse>(_list671.size);
+          HBPulse _elem672;
+          for (int _i673 = 0; _i673 < _list671.size; ++_i673)
+          {
+            _elem672 = new HBPulse();
+            _elem672.read(iprot);
+            struct.pulses.add(_elem672);
+          }
+        }
+        struct.set_pulses_isSet(true);
+      }
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/HBServerMessageType.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/HBServerMessageType.java b/storm-core/src/jvm/backtype/storm/generated/HBServerMessageType.java
new file mode 100644
index 0000000..6f940c2
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/generated/HBServerMessageType.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package backtype.storm.generated;
+
+
+import java.util.Map;
+import java.util.HashMap;
+import org.apache.thrift.TEnum;
+
+public enum HBServerMessageType implements org.apache.thrift.TEnum {
+  CREATE_PATH(0),
+  CREATE_PATH_RESPONSE(1),
+  EXISTS(2),
+  EXISTS_RESPONSE(3),
+  SEND_PULSE(4),
+  SEND_PULSE_RESPONSE(5),
+  GET_ALL_PULSE_FOR_PATH(6),
+  GET_ALL_PULSE_FOR_PATH_RESPONSE(7),
+  GET_ALL_NODES_FOR_PATH(8),
+  GET_ALL_NODES_FOR_PATH_RESPONSE(9),
+  GET_PULSE(10),
+  GET_PULSE_RESPONSE(11),
+  DELETE_PATH(12),
+  DELETE_PATH_RESPONSE(13),
+  DELETE_PULSE_ID(14),
+  DELETE_PULSE_ID_RESPONSE(15),
+  CONTROL_MESSAGE(16),
+  SASL_MESSAGE_TOKEN(17),
+  NOT_AUTHORIZED(18);
+
+  private final int value;
+
+  private HBServerMessageType(int value) {
+    this.value = value;
+  }
+
+  /**
+   * Get the integer value of this enum value, as defined in the Thrift IDL.
+   */
+  public int getValue() {
+    return value;
+  }
+
+  /**
+   * Find a the enum type by its integer value, as defined in the Thrift IDL.
+   * @return null if the value is not found.
+   */
+  public static HBServerMessageType findByValue(int value) { 
+    switch (value) {
+      case 0:
+        return CREATE_PATH;
+      case 1:
+        return CREATE_PATH_RESPONSE;
+      case 2:
+        return EXISTS;
+      case 3:
+        return EXISTS_RESPONSE;
+      case 4:
+        return SEND_PULSE;
+      case 5:
+        return SEND_PULSE_RESPONSE;
+      case 6:
+        return GET_ALL_PULSE_FOR_PATH;
+      case 7:
+        return GET_ALL_PULSE_FOR_PATH_RESPONSE;
+      case 8:
+        return GET_ALL_NODES_FOR_PATH;
+      case 9:
+        return GET_ALL_NODES_FOR_PATH_RESPONSE;
+      case 10:
+        return GET_PULSE;
+      case 11:
+        return GET_PULSE_RESPONSE;
+      case 12:
+        return DELETE_PATH;
+      case 13:
+        return DELETE_PATH_RESPONSE;
+      case 14:
+        return DELETE_PULSE_ID;
+      case 15:
+        return DELETE_PULSE_ID_RESPONSE;
+      case 16:
+        return CONTROL_MESSAGE;
+      case 17:
+        return SASL_MESSAGE_TOKEN;
+      case 18:
+        return NOT_AUTHORIZED;
+      default:
+        return null;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/InvalidTopologyException.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/InvalidTopologyException.java b/storm-core/src/jvm/backtype/storm/generated/InvalidTopologyException.java
index 003dae7..0a1a45d 100644
--- a/storm-core/src/jvm/backtype/storm/generated/InvalidTopologyException.java
+++ b/storm-core/src/jvm/backtype/storm/generated/InvalidTopologyException.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class InvalidTopologyException extends TException implements org.apache.thrift.TBase<InvalidTopologyException, InvalidTopologyException._Fields>, java.io.Serializable, Cloneable, Comparable<InvalidTopologyException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InvalidTopologyException");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/JavaObject.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/JavaObject.java b/storm-core/src/jvm/backtype/storm/generated/JavaObject.java
index 15d8fd7..bf6f800 100644
--- a/storm-core/src/jvm/backtype/storm/generated/JavaObject.java
+++ b/storm-core/src/jvm/backtype/storm/generated/JavaObject.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class JavaObject implements org.apache.thrift.TBase<JavaObject, JavaObject._Fields>, java.io.Serializable, Cloneable, Comparable<JavaObject> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("JavaObject");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/KillOptions.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/KillOptions.java b/storm-core/src/jvm/backtype/storm/generated/KillOptions.java
index bebfbb1..a3bb76a 100644
--- a/storm-core/src/jvm/backtype/storm/generated/KillOptions.java
+++ b/storm-core/src/jvm/backtype/storm/generated/KillOptions.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class KillOptions implements org.apache.thrift.TBase<KillOptions, KillOptions._Fields>, java.io.Serializable, Cloneable, Comparable<KillOptions> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("KillOptions");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/LSApprovedWorkers.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LSApprovedWorkers.java b/storm-core/src/jvm/backtype/storm/generated/LSApprovedWorkers.java
index 75da228..505b362 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LSApprovedWorkers.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LSApprovedWorkers.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class LSApprovedWorkers implements org.apache.thrift.TBase<LSApprovedWorkers, LSApprovedWorkers._Fields>, java.io.Serializable, Cloneable, Comparable<LSApprovedWorkers> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LSApprovedWorkers");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/LSSupervisorAssignments.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LSSupervisorAssignments.java b/storm-core/src/jvm/backtype/storm/generated/LSSupervisorAssignments.java
index 7eb3628..cc738c2 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LSSupervisorAssignments.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LSSupervisorAssignments.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class LSSupervisorAssignments implements org.apache.thrift.TBase<LSSupervisorAssignments, LSSupervisorAssignments._Fields>, java.io.Serializable, Cloneable, Comparable<LSSupervisorAssignments> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LSSupervisorAssignments");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/LSSupervisorId.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LSSupervisorId.java b/storm-core/src/jvm/backtype/storm/generated/LSSupervisorId.java
index 8b0241a..d7f1b69 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LSSupervisorId.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LSSupervisorId.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class LSSupervisorId implements org.apache.thrift.TBase<LSSupervisorId, LSSupervisorId._Fields>, java.io.Serializable, Cloneable, Comparable<LSSupervisorId> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LSSupervisorId");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/LSWorkerHeartbeat.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LSWorkerHeartbeat.java b/storm-core/src/jvm/backtype/storm/generated/LSWorkerHeartbeat.java
index fc37eb1..ecf4776 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LSWorkerHeartbeat.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LSWorkerHeartbeat.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class LSWorkerHeartbeat implements org.apache.thrift.TBase<LSWorkerHeartbeat, LSWorkerHeartbeat._Fields>, java.io.Serializable, Cloneable, Comparable<LSWorkerHeartbeat> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LSWorkerHeartbeat");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/LocalAssignment.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LocalAssignment.java b/storm-core/src/jvm/backtype/storm/generated/LocalAssignment.java
index 1e9f05c..8a9d91f 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LocalAssignment.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LocalAssignment.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class LocalAssignment implements org.apache.thrift.TBase<LocalAssignment, LocalAssignment._Fields>, java.io.Serializable, Cloneable, Comparable<LocalAssignment> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LocalAssignment");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/LocalStateData.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LocalStateData.java b/storm-core/src/jvm/backtype/storm/generated/LocalStateData.java
index e821149..3bca07b 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LocalStateData.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LocalStateData.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class LocalStateData implements org.apache.thrift.TBase<LocalStateData, LocalStateData._Fields>, java.io.Serializable, Cloneable, Comparable<LocalStateData> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LocalStateData");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/LogConfig.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LogConfig.java b/storm-core/src/jvm/backtype/storm/generated/LogConfig.java
index 6d537f8..192697c 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LogConfig.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LogConfig.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class LogConfig implements org.apache.thrift.TBase<LogConfig, LogConfig._Fields>, java.io.Serializable, Cloneable, Comparable<LogConfig> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LogConfig");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/LogLevel.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LogLevel.java b/storm-core/src/jvm/backtype/storm/generated/LogLevel.java
index 1526bf3..43aa7c9 100644
--- a/storm-core/src/jvm/backtype/storm/generated/LogLevel.java
+++ b/storm-core/src/jvm/backtype/storm/generated/LogLevel.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class LogLevel implements org.apache.thrift.TBase<LogLevel, LogLevel._Fields>, java.io.Serializable, Cloneable, Comparable<LogLevel> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LogLevel");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/Nimbus.java b/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
index 59f74fb..592d761 100644
--- a/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
+++ b/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class Nimbus {
 
   public interface Iface {

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/NimbusSummary.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/NimbusSummary.java b/storm-core/src/jvm/backtype/storm/generated/NimbusSummary.java
index 7d1e2fd..27d2f62 100644
--- a/storm-core/src/jvm/backtype/storm/generated/NimbusSummary.java
+++ b/storm-core/src/jvm/backtype/storm/generated/NimbusSummary.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class NimbusSummary implements org.apache.thrift.TBase<NimbusSummary, NimbusSummary._Fields>, java.io.Serializable, Cloneable, Comparable<NimbusSummary> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NimbusSummary");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java b/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java
index 364afa7..9a4d9f9 100644
--- a/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class NodeInfo implements org.apache.thrift.TBase<NodeInfo, NodeInfo._Fields>, java.io.Serializable, Cloneable, Comparable<NodeInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NodeInfo");
 


[36/37] storm git commit: Merge branch 'STORM-855' of https://github.com/knusbaum/incubator-storm

Posted by kn...@apache.org.
Merge branch 'STORM-855' of https://github.com/knusbaum/incubator-storm


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/8d4d72ca
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/8d4d72ca
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/8d4d72ca

Branch: refs/heads/master
Commit: 8d4d72cac5e0e5080d6907a378e54381e0ed5db4
Parents: a8d253a e7a8c94
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Mon Nov 23 15:06:16 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Mon Nov 23 15:06:16 2015 -0600

----------------------------------------------------------------------
 bin/storm.py                                    |  38 +-
 conf/defaults.yaml                              |  10 +
 docs/documentation/Pacemaker.md                 | 108 +++
 pom.xml                                         |   1 +
 storm-core/pom.xml                              |   7 +-
 storm-core/src/clj/backtype/storm/cluster.clj   | 279 +++-----
 .../cluster_state/zookeeper_state_factory.clj   | 157 +++++
 .../clj/backtype/storm/command/heartbeats.clj   |  52 ++
 storm-core/src/clj/backtype/storm/config.clj    |   6 +
 .../src/clj/backtype/storm/daemon/worker.clj    |   5 -
 storm-core/src/clj/backtype/storm/util.clj      |  16 +
 .../org/apache/storm/pacemaker/pacemaker.clj    | 237 +++++++
 .../storm/pacemaker/pacemaker_state_factory.clj | 124 ++++
 storm-core/src/jvm/backtype/storm/Config.java   |  55 ++
 .../backtype/storm/cluster/ClusterState.java    | 208 ++++++
 .../storm/cluster/ClusterStateContext.java      |  41 ++
 .../storm/cluster/ClusterStateFactory.java      |  28 +
 .../storm/cluster/ClusterStateListener.java     |  22 +
 .../backtype/storm/cluster/ConnectionState.java |  24 +
 .../jvm/backtype/storm/cluster/DaemonType.java  |  27 +
 .../generated/HBAuthorizationException.java     | 406 ++++++++++++
 .../storm/generated/HBExecutionException.java   | 406 ++++++++++++
 .../jvm/backtype/storm/generated/HBMessage.java | 636 ++++++++++++++++++
 .../backtype/storm/generated/HBMessageData.java | 640 ++++++++++++++++++
 .../jvm/backtype/storm/generated/HBNodes.java   | 461 +++++++++++++
 .../jvm/backtype/storm/generated/HBPulse.java   | 522 +++++++++++++++
 .../jvm/backtype/storm/generated/HBRecords.java | 466 +++++++++++++
 .../storm/generated/HBServerMessageType.java    | 113 ++++
 .../jvm/backtype/storm/generated/Nimbus.java    |  36 +-
 .../storm/messaging/netty/ControlMessage.java   |  17 +-
 .../messaging/netty/INettySerializable.java     |  26 +
 .../netty/KerberosSaslClientHandler.java        | 152 +++++
 .../netty/KerberosSaslNettyClient.java          | 203 ++++++
 .../netty/KerberosSaslNettyClientState.java     |  31 +
 .../netty/KerberosSaslNettyServer.java          | 210 ++++++
 .../netty/KerberosSaslNettyServerState.java     |  30 +
 .../netty/KerberosSaslServerHandler.java        | 133 ++++
 .../storm/messaging/netty/MessageDecoder.java   |   4 +-
 .../netty/NettyRenameThreadFactory.java         |  10 +-
 .../netty/NettyUncaughtExceptionHandler.java    |  35 +
 .../storm/messaging/netty/SaslMessageToken.java |  37 +-
 .../storm/messaging/netty/SaslNettyClient.java  |  22 +-
 .../storm/messaging/netty/SaslNettyServer.java  | 244 ++++---
 .../messaging/netty/SaslNettyServerState.java   |  13 +-
 .../messaging/netty/SaslStormServerHandler.java |  21 +-
 .../storm/messaging/netty/SaslUtils.java        |   1 +
 .../backtype/storm/messaging/netty/Server.java  |  50 +-
 .../messaging/netty/StormServerHandler.java     |  24 +-
 .../backtype/storm/security/auth/AuthUtils.java |  69 ++
 .../src/jvm/backtype/storm/utils/Utils.java     |  51 ++
 .../storm/validation/ConfigValidation.java      |  20 +-
 .../storm/pacemaker/IServerMessageHandler.java  |  25 +
 .../apache/storm/pacemaker/PacemakerClient.java | 255 +++++++
 .../storm/pacemaker/PacemakerClientHandler.java |  75 +++
 .../apache/storm/pacemaker/PacemakerServer.java | 163 +++++
 .../storm/pacemaker/codec/ThriftDecoder.java    |  76 +++
 .../storm/pacemaker/codec/ThriftEncoder.java    | 110 ++++
 .../pacemaker/codec/ThriftNettyClientCodec.java |  94 +++
 .../pacemaker/codec/ThriftNettyServerCodec.java |  99 +++
 .../jvm/storm/trident/util/TridentUtils.java    |  33 +-
 storm-core/src/py/storm/Nimbus.py               |  14 +-
 storm-core/src/py/storm/ttypes.py               | 658 +++++++++++++++++++
 storm-core/src/storm.thrift                     |  59 ++
 .../test/clj/backtype/storm/cluster_test.clj    |   7 +-
 .../storm/pacemaker_state_factory_test.clj      | 150 +++++
 .../clj/org/apache/storm/pacemaker_test.clj     | 242 +++++++
 .../jvm/backtype/storm/TestConfigValidate.java  |  18 +
 67 files changed, 8135 insertions(+), 477 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/8d4d72ca/bin/storm.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/8d4d72ca/conf/defaults.yaml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/8d4d72ca/storm-core/src/jvm/backtype/storm/Config.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/8d4d72ca/storm-core/src/jvm/backtype/storm/utils/Utils.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/8d4d72ca/storm-core/src/py/storm/ttypes.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/8d4d72ca/storm-core/src/storm.thrift
----------------------------------------------------------------------


[33/37] storm git commit: Removing newline from EOF.

Posted by kn...@apache.org.
Removing newline from EOF.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/c2f1da0e
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/c2f1da0e
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/c2f1da0e

Branch: refs/heads/master
Commit: c2f1da0eee0910b568a7df257d322d2c45179a52
Parents: 61921bb
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Mon Nov 23 12:48:34 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Mon Nov 23 12:48:34 2015 -0600

----------------------------------------------------------------------
 .../storm/messaging/netty/SaslStormServerAuthorizeHandler.java     | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/c2f1da0e/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerAuthorizeHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerAuthorizeHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerAuthorizeHandler.java
index 8b3d1c0..04cd66e 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerAuthorizeHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslStormServerAuthorizeHandler.java
@@ -80,4 +80,4 @@ public class SaslStormServerAuthorizeHandler extends SimpleChannelUpstreamHandle
 		// pipeline component.
 		Channels.fireMessageReceived(ctx, msg);
 	}
-}
+}
\ No newline at end of file


[28/37] storm git commit: Less verbose logs (no logs)

Posted by kn...@apache.org.
Less verbose logs (no logs)


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/ee5265d8
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/ee5265d8
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/ee5265d8

Branch: refs/heads/master
Commit: ee5265d807f3cebeb1e1f9e44de077f1b2b3c66f
Parents: 9815986
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Tue Nov 17 17:09:02 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Tue Nov 17 17:09:02 2015 -0600

----------------------------------------------------------------------
 .../src/clj/org/apache/storm/pacemaker/pacemaker.clj  | 14 +++++++-------
 1 file changed, 7 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/ee5265d8/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj b/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
index 3770229..cd48eb4 100644
--- a/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
+++ b/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
@@ -30,7 +30,7 @@
 
 ;; Stats Functions
 
-(def sleep-seconds 5)
+(def sleep-seconds 60)
 
 
 (defn- check-and-set-loop [stats key new & {:keys [compare new-fn]
@@ -71,12 +71,12 @@
             largest (.getAndSet (:largest-heartbeat-size stats) 0)
             average (.getAndSet (:average-heartbeat-size stats) 0)
             total-keys (.size heartbeats)]
-        (log-message "\nReceived " send-count " heartbeats totaling " received-size " bytes,\n"
-                     "Sent " get-count " heartbeats totaling " sent-size " bytes,\n"
-                     "The largest heartbeat was " largest " bytes,\n"
-                     "The average heartbeat was " average " bytes,\n"
-                     "Pacemaker contained " total-keys " total keys\n"
-                     "in the last " sleep-seconds " second(s)")
+        (log-debug "\nReceived " send-count " heartbeats totaling " received-size " bytes,\n"
+                   "Sent " get-count " heartbeats totaling " sent-size " bytes,\n"
+                   "The largest heartbeat was " largest " bytes,\n"
+                   "The average heartbeat was " average " bytes,\n"
+                   "Pacemaker contained " total-keys " total keys\n"
+                   "in the last " sleep-seconds " second(s)")
         (dosync (ref-set last-five-s
                          {:send-pulse-count send-count
                           :total-received-size received-size


[06/37] storm git commit: PACEMAKER OPEN SOURCE!

Posted by kn...@apache.org.
PACEMAKER OPEN SOURCE!


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/444ec05e
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/444ec05e
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/444ec05e

Branch: refs/heads/master
Commit: 444ec05e5a9f38f9a9472c54b39f1371c839683b
Parents: 0bba2ba
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Fri Oct 30 17:21:27 2015 -0500
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Fri Oct 30 17:21:27 2015 -0500

----------------------------------------------------------------------
 bin/storm.py                                    |  36 +-
 conf/defaults.yaml                              |  11 +
 pom.xml                                         |   1 +
 storm-core/pom.xml                              |   7 +-
 storm-core/src/clj/backtype/storm/cluster.clj   | 271 +++-----
 .../cluster_state/zookeeper_state_factory.clj   | 152 +++++
 .../clj/backtype/storm/command/heartbeats.clj   |  52 ++
 storm-core/src/clj/backtype/storm/config.clj    |   5 +
 .../src/clj/backtype/storm/daemon/worker.clj    |   5 -
 storm-core/src/clj/backtype/storm/util.clj      |  16 +
 .../org/apache/storm/pacemaker/pacemaker.clj    | 248 +++++++
 .../storm/pacemaker/pacemaker_state_factory.clj | 124 ++++
 storm-core/src/genthrift.sh                     |   0
 storm-core/src/jvm/backtype/storm/Config.java   |  55 ++
 .../backtype/storm/cluster/ClusterState.java    |  45 ++
 .../storm/cluster/ClusterStateContext.java      |  41 ++
 .../storm/cluster/ClusterStateFactory.java      |  28 +
 .../storm/cluster/ClusterStateListener.java     |  22 +
 .../backtype/storm/cluster/ConnectionState.java |  24 +
 .../jvm/backtype/storm/cluster/DaemonType.java  |  27 +
 .../storm/generated/AlreadyAliveException.java  |   2 +-
 .../backtype/storm/generated/Assignment.java    |   2 +-
 .../storm/generated/AuthorizationException.java |   2 +-
 .../src/jvm/backtype/storm/generated/Bolt.java  |   2 +-
 .../storm/generated/BoltAggregateStats.java     |   2 +-
 .../jvm/backtype/storm/generated/BoltStats.java |   2 +-
 .../storm/generated/ClusterSummary.java         |   2 +-
 .../storm/generated/ClusterWorkerHeartbeat.java |   2 +-
 .../storm/generated/CommonAggregateStats.java   |   2 +-
 .../generated/ComponentAggregateStats.java      |   2 +-
 .../storm/generated/ComponentCommon.java        |   2 +-
 .../storm/generated/ComponentPageInfo.java      |   2 +-
 .../backtype/storm/generated/Credentials.java   |   2 +-
 .../storm/generated/DRPCExecutionException.java |   2 +-
 .../backtype/storm/generated/DRPCRequest.java   |   2 +-
 .../backtype/storm/generated/DebugOptions.java  |   2 +-
 .../storm/generated/DistributedRPC.java         |   2 +-
 .../generated/DistributedRPCInvocations.java    |   2 +-
 .../jvm/backtype/storm/generated/ErrorInfo.java |   2 +-
 .../storm/generated/ExecutorAggregateStats.java |   2 +-
 .../backtype/storm/generated/ExecutorInfo.java  |   2 +-
 .../backtype/storm/generated/ExecutorStats.java |   2 +-
 .../storm/generated/ExecutorSummary.java        |   2 +-
 .../storm/generated/GetInfoOptions.java         |   2 +-
 .../storm/generated/GlobalStreamId.java         |   2 +-
 .../generated/HBAuthorizationException.java     | 406 ++++++++++++
 .../storm/generated/HBExecutionException.java   | 406 ++++++++++++
 .../jvm/backtype/storm/generated/HBMessage.java | 636 ++++++++++++++++++
 .../backtype/storm/generated/HBMessageData.java | 640 ++++++++++++++++++
 .../jvm/backtype/storm/generated/HBNodes.java   | 461 +++++++++++++
 .../jvm/backtype/storm/generated/HBPulse.java   | 522 +++++++++++++++
 .../jvm/backtype/storm/generated/HBRecords.java | 466 +++++++++++++
 .../storm/generated/HBServerMessageType.java    | 113 ++++
 .../generated/InvalidTopologyException.java     |   2 +-
 .../backtype/storm/generated/JavaObject.java    |   2 +-
 .../backtype/storm/generated/KillOptions.java   |   2 +-
 .../storm/generated/LSApprovedWorkers.java      |   2 +-
 .../generated/LSSupervisorAssignments.java      |   2 +-
 .../storm/generated/LSSupervisorId.java         |   2 +-
 .../storm/generated/LSWorkerHeartbeat.java      |   2 +-
 .../storm/generated/LocalAssignment.java        |   2 +-
 .../storm/generated/LocalStateData.java         |   2 +-
 .../jvm/backtype/storm/generated/LogConfig.java |   2 +-
 .../jvm/backtype/storm/generated/LogLevel.java  |   2 +-
 .../jvm/backtype/storm/generated/Nimbus.java    |   2 +-
 .../backtype/storm/generated/NimbusSummary.java |   2 +-
 .../jvm/backtype/storm/generated/NodeInfo.java  |   2 +-
 .../storm/generated/NotAliveException.java      |   2 +-
 .../backtype/storm/generated/NullStruct.java    |   2 +-
 .../storm/generated/RebalanceOptions.java       |   2 +-
 .../storm/generated/ShellComponent.java         |   2 +-
 .../storm/generated/SpoutAggregateStats.java    |   2 +-
 .../jvm/backtype/storm/generated/SpoutSpec.java |   2 +-
 .../backtype/storm/generated/SpoutStats.java    |   2 +-
 .../storm/generated/StateSpoutSpec.java         |   2 +-
 .../jvm/backtype/storm/generated/StormBase.java |   2 +-
 .../backtype/storm/generated/StormTopology.java |   2 +-
 .../backtype/storm/generated/StreamInfo.java    |   2 +-
 .../backtype/storm/generated/SubmitOptions.java |   2 +-
 .../storm/generated/SupervisorInfo.java         |   2 +-
 .../storm/generated/SupervisorSummary.java      |   2 +-
 .../storm/generated/ThriftSerializedObject.java |   2 +-
 .../backtype/storm/generated/TopologyInfo.java  |   2 +-
 .../storm/generated/TopologyPageInfo.java       |   2 +-
 .../backtype/storm/generated/TopologyStats.java |   2 +-
 .../storm/generated/TopologySummary.java        |   2 +-
 .../storm/generated/WorkerResources.java        |   2 +-
 .../backtype/storm/messaging/netty/Client.java  |  91 ++-
 .../backtype/storm/messaging/netty/Context.java |   6 +-
 .../storm/messaging/netty/ControlMessage.java   |  15 +-
 .../messaging/netty/INettySerializable.java     |  26 +
 .../storm/messaging/netty/ISaslClient.java      |  28 +
 .../storm/messaging/netty/ISaslServer.java      |  26 +
 .../backtype/storm/messaging/netty/IServer.java |  26 +
 .../netty/KerberosSaslClientHandler.java        | 154 +++++
 .../netty/KerberosSaslNettyClient.java          | 212 ++++++
 .../netty/KerberosSaslNettyClientState.java     |  31 +
 .../netty/KerberosSaslNettyServer.java          | 223 +++++++
 .../netty/KerberosSaslNettyServerState.java     |  30 +
 .../netty/KerberosSaslServerHandler.java        | 132 ++++
 .../storm/messaging/netty/MessageDecoder.java   |   4 +-
 .../netty/NettyRenameThreadFactory.java         |  10 +-
 .../netty/NettyUncaughtExceptionHandler.java    |  35 +
 .../storm/messaging/netty/SaslMessageToken.java |  27 +-
 .../storm/messaging/netty/SaslNettyClient.java  |  22 +-
 .../storm/messaging/netty/SaslNettyServer.java  | 249 ++++---
 .../messaging/netty/SaslNettyServerState.java   |  13 +-
 .../messaging/netty/SaslStormClientHandler.java |  37 +-
 .../netty/SaslStormServerAuthorizeHandler.java  |   2 +-
 .../messaging/netty/SaslStormServerHandler.java |  19 +-
 .../storm/messaging/netty/SaslUtils.java        |   1 +
 .../backtype/storm/messaging/netty/Server.java  | 120 ++--
 .../messaging/netty/StormClientHandler.java     |  35 +-
 .../netty/StormClientPipelineFactory.java       |  11 +-
 .../messaging/netty/StormServerHandler.java     |  24 +-
 .../backtype/storm/security/auth/AuthUtils.java |  59 ++
 .../src/jvm/backtype/storm/utils/Utils.java     |  51 ++
 .../storm/pacemaker/IServerMessageHandler.java  |  25 +
 .../apache/storm/pacemaker/PacemakerClient.java | 252 +++++++
 .../storm/pacemaker/PacemakerClientHandler.java |  75 +++
 .../apache/storm/pacemaker/PacemakerServer.java | 158 +++++
 .../storm/pacemaker/codec/ThriftDecoder.java    |  77 +++
 .../storm/pacemaker/codec/ThriftEncoder.java    | 108 +++
 .../pacemaker/codec/ThriftNettyClientCodec.java |  94 +++
 .../pacemaker/codec/ThriftNettyServerCodec.java |  97 +++
 .../jvm/storm/trident/util/TridentUtils.java    |  33 +-
 storm-core/src/py/storm/ttypes.py               | 658 +++++++++++++++++++
 storm-core/src/storm.thrift                     |  59 ++
 .../test/clj/backtype/storm/cluster_test.clj    |   7 +-
 .../storm/messaging/netty_unit_test.clj         |  15 +-
 .../storm/pacemaker_state_factory_test.clj      | 135 ++++
 .../clj/org/apache/storm/pacemaker_test.clj     | 227 +++++++
 132 files changed, 8093 insertions(+), 584 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/bin/storm.py
----------------------------------------------------------------------
diff --git a/bin/storm.py b/bin/storm.py
index 1b9617e..0f32998 100755
--- a/bin/storm.py
+++ b/bin/storm.py
@@ -263,6 +263,17 @@ def upload_credentials(*args):
         jvmtype="-client",
         extrajars=[USER_CONF_DIR, STORM_BIN_DIR])
 
+def heartbeats(*args):
+    """Syntax: [storm heartbeats [cmd]]
+
+    list [KEY...] - lists heartbeats nodes under KEY currently in pacemaker.
+    """
+    exec_storm_class(
+        "backtype.storm.command.heartbeats",
+        args=args,
+        jvmtype="-client",
+        extrajars=[USER_CONF_DIR, STORM_BIN_DIR])
+    
 def activate(*args):
     """Syntax: [storm activate topology-name]
 
@@ -446,6 +457,27 @@ def nimbus(klass="backtype.storm.daemon.nimbus"):
         extrajars=cppaths,
         jvmopts=jvmopts)
 
+def pacemaker(klass="org.apache.storm.pacemaker.pacemaker"):
+    """Syntax: [storm pacemaker]
+
+    Launches the Pacemaker daemon. This command should be run under
+    supervision with a tool like daemontools or monit.
+
+    See Setting up a Storm cluster for more information.
+    (http://storm.apache.org/documentation/Setting-up-a-Storm-cluster)
+    """
+    cppaths = [CLUSTER_CONF_DIR]
+    jvmopts = parse_args(confvalue("pacemaker.childopts", cppaths)) + [
+        "-Dlogfile.name=pacemaker.log",
+        "-Dlog4j.configurationFile=" + os.path.join(get_log4j2_conf_dir(), "cluster.xml"),
+    ]
+    exec_storm_class(
+        klass,
+        jvmtype="-server",
+        daemonName="pacemaker",
+        extrajars=cppaths,
+        jvmopts=jvmopts)
+
 def supervisor(klass="backtype.storm.daemon.supervisor"):
     """Syntax: [storm supervisor]
 
@@ -609,8 +641,8 @@ COMMANDS = {"jar": jar, "kill": kill, "shell": shell, "nimbus": nimbus, "ui": ui
             "remoteconfvalue": print_remoteconfvalue, "repl": repl, "classpath": print_classpath,
             "activate": activate, "deactivate": deactivate, "rebalance": rebalance, "help": print_usage,
             "list": listtopos, "dev-zookeeper": dev_zookeeper, "version": version, "monitor": monitor,
-            "upload-credentials": upload_credentials, "get-errors": get_errors, "set_log_level": set_log_level,
-            "kill_workers": kill_workers }
+            "upload-credentials": upload_credentials, "pacemaker": pacemaker, "heartbeats": heartbeats,
+            "get-errors": get_errors, "set_log_level": set_log_level, "kill_workers": kill_workers }
 
 def parse_config(config_list):
     global CONFIG_OPTS

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/conf/defaults.yaml
----------------------------------------------------------------------
diff --git a/conf/defaults.yaml b/conf/defaults.yaml
index 9150ca4..5b97f49 100644
--- a/conf/defaults.yaml
+++ b/conf/defaults.yaml
@@ -49,6 +49,8 @@ storm.auth.simple-white-list.users: []
 storm.auth.simple-acl.users: []
 storm.auth.simple-acl.users.commands: []
 storm.auth.simple-acl.admins: []
+#storm.cluster.state.store: "org.apache.storm.pacemaker.pacemaker_state_factory"
+storm.cluster.state.store: "backtype.storm.cluster_state.zookeeper_state_factory"
 storm.meta.serialization.delegate: "backtype.storm.serialization.GzipThriftSerializationDelegate"
 storm.codedistributor.class: "backtype.storm.codedistributor.LocalFileSystemCodeDistributor"
 
@@ -232,3 +234,12 @@ topology.component.cpu.pcore.percent: 10.0
 topology.worker.max.heap.size.mb: 768.0
 
 dev.zookeeper.path: "/tmp/dev-storm-zookeeper"
+
+pacemaker.host: "localhost"
+pacemaker.port: 6699
+pacemaker.base.threads: 10
+pacemaker.max.threads: 50
+pacemaker.thread.timeout: 10
+pacemaker.childopts: "-Xmx1024m"
+pacemaker.auth.method: "NONE"
+pacemaker.kerberos.users: []

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 008a988..c137f0f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -172,6 +172,7 @@
 
         <!-- dependency versions -->
         <clojure.version>1.7.0</clojure.version>
+        <java_jmx.version>0.3.1</java_jmx.version>
         <compojure.version>1.1.3</compojure.version>
         <hiccup.version>0.3.6</hiccup.version>
         <commons-io.version>2.4</commons-io.version>

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/pom.xml
----------------------------------------------------------------------
diff --git a/storm-core/pom.xml b/storm-core/pom.xml
index 87a5804..4876c45 100644
--- a/storm-core/pom.xml
+++ b/storm-core/pom.xml
@@ -126,7 +126,12 @@
             <artifactId>data.codec</artifactId>
             <scope>test</scope>
         </dependency>
-
+        <dependency>
+          <groupId>org.clojure</groupId>
+          <artifactId>java.jmx</artifactId>
+          <version>${java_jmx.version}</version>
+        </dependency>
+        
         <!--java-->
         <dependency>
             <groupId>commons-io</groupId>

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/clj/backtype/storm/cluster.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/cluster.clj b/storm-core/src/clj/backtype/storm/cluster.clj
index 5c10a1c..b1eb0d0 100644
--- a/storm-core/src/clj/backtype/storm/cluster.clj
+++ b/storm-core/src/clj/backtype/storm/cluster.clj
@@ -20,9 +20,9 @@
             LogConfig]
            [java.io Serializable])
   (:import [org.apache.zookeeper KeeperException KeeperException$NoNodeException ZooDefs ZooDefs$Ids ZooDefs$Perms])
-  (:import [org.apache.curator.framework.state ConnectionStateListener ConnectionState])
   (:import [org.apache.curator.framework CuratorFramework])
   (:import [backtype.storm.utils Utils])
+  (:import [backtype.storm.cluster ClusterState ClusterStateContext ClusterStateListener ConnectionState])
   (:import [java.security MessageDigest])
   (:import [org.apache.zookeeper.server.auth DigestAuthenticationProvider])
   (:import [backtype.storm.nimbus NimbusInfo])
@@ -30,24 +30,6 @@
   (:require [backtype.storm [zookeeper :as zk]])
   (:require [backtype.storm.daemon [common :as common]]))
 
-(defprotocol ClusterState
-  (set-ephemeral-node [this path data acls])
-  (delete-node [this path])
-  (create-sequential [this path data acls])
-  ;; if node does not exist, create persistent with this data
-  (set-data [this path data acls])
-  (get-data [this path watch?])
-  (get-version [this path watch?])
-  (get-data-with-version [this path watch?])
-  (get-children [this path watch?])
-  (mkdirs [this path acls])
-  (exists-node? [this path watch?])
-  (close [this])
-  (register [this callback])
-  (unregister [this id])
-  (add-listener [this listener])
-  (sync-path [this path]))
-
 (defn mk-topo-only-acls
   [topo-conf]
   (let [payload (.get topo-conf STORM-ZOOKEEPER-TOPOLOGY-AUTH-PAYLOAD)]
@@ -56,117 +38,14 @@
        (ACL. ZooDefs$Perms/READ (Id. "digest" (DigestAuthenticationProvider/generateDigest payload)))])))
  
 (defnk mk-distributed-cluster-state
-  [conf :auth-conf nil :acls nil :separate-zk-writer? false]
-  (let [zk (zk/mk-client conf (conf STORM-ZOOKEEPER-SERVERS) (conf STORM-ZOOKEEPER-PORT) :auth-conf auth-conf)]
-    (zk/mkdirs zk (conf STORM-ZOOKEEPER-ROOT) acls)
-    (.close zk))
-  (let [callbacks (atom {})
-        active (atom true)
-        zk-writer (zk/mk-client conf
-                         (conf STORM-ZOOKEEPER-SERVERS)
-                         (conf STORM-ZOOKEEPER-PORT)
-                         :auth-conf auth-conf
-                         :root (conf STORM-ZOOKEEPER-ROOT)
-                         :watcher (fn [state type path]
-                                    (when @active
-                                      (when-not (= :connected state)
-                                        (log-warn "Received event " state ":" type ":" path " with disconnected Writer Zookeeper."))
-                                      (when-not (= :none type)
-                                        (doseq [callback (vals @callbacks)]
-                                          (callback type path))))))
-        zk-reader (if separate-zk-writer?
-                    (zk/mk-client conf
-                         (conf STORM-ZOOKEEPER-SERVERS)
-                         (conf STORM-ZOOKEEPER-PORT)
-                         :auth-conf auth-conf
-                         :root (conf STORM-ZOOKEEPER-ROOT)
-                         :watcher (fn [state type path]
-                                    (when @active
-                                      (when-not (= :connected state)
-                                        (log-warn "Received event " state ":" type ":" path " with disconnected Reader Zookeeper."))
-                                      (when-not (= :none type)
-                                        (doseq [callback (vals @callbacks)]
-                                          (callback type path))))))
-                    zk-writer)]
-    (reify
-     ClusterState
-
-     (register
-       [this callback]
-       (let [id (uuid)]
-         (swap! callbacks assoc id callback)
-         id))
-
-     (unregister
-       [this id]
-       (swap! callbacks dissoc id))
-
-     (set-ephemeral-node
-       [this path data acls]
-       (zk/mkdirs zk-writer (parent-path path) acls)
-       (if (zk/exists zk-writer path false)
-         (try-cause
-           (zk/set-data zk-writer path data) ; should verify that it's ephemeral
-           (catch KeeperException$NoNodeException e
-             (log-warn-error e "Ephemeral node disappeared between checking for existing and setting data")
-             (zk/create-node zk-writer path data :ephemeral acls)))
-         (zk/create-node zk-writer path data :ephemeral acls)))
-
-     (create-sequential
-       [this path data acls]
-       (zk/create-node zk-writer path data :sequential acls))
-
-     (set-data
-       [this path data acls]
-       ;; note: this does not turn off any existing watches
-       (if (zk/exists zk-writer path false)
-         (zk/set-data zk-writer path data)
-         (do
-           (zk/mkdirs zk-writer (parent-path path) acls)
-           (zk/create-node zk-writer path data :persistent acls))))
-
-     (delete-node
-       [this path]
-       (zk/delete-node zk-writer path))
-
-     (get-data
-       [this path watch?]
-       (zk/get-data zk-reader path watch?))
-
-     (get-data-with-version
-       [this path watch?]
-       (zk/get-data-with-version zk-reader path watch?))
-
-     (get-version 
-       [this path watch?]
-       (zk/get-version zk-reader path watch?))
-
-     (get-children
-       [this path watch?]
-       (zk/get-children zk-reader path watch?))
-
-     (mkdirs
-       [this path acls]
-       (zk/mkdirs zk-writer path acls))
-
-     (exists-node?
-       [this path watch?]
-       (zk/exists-node? zk-reader path watch?))
-
-     (close
-       [this]
-       (reset! active false)
-       (.close zk-writer)
-       (if separate-zk-writer? (.close zk-reader)))
-
-      (add-listener
-        [this listener]
-        (zk/add-listener zk-reader listener))
-
-      (sync-path
-        [this path]
-        (zk/sync-path zk-writer path))
-      )))
+  [conf :auth-conf nil :acls nil :context (ClusterStateContext.)]
+  (let [clazz (Class/forName (or (conf STORM-CLUSTER-STATE-STORE)
+                                 "backtype.storm.cluster_state.zookeeper_state_factory"))
+        state-instance (.newInstance clazz)]
+    (log-debug "Creating cluster state: " (.toString clazz))
+    (or (.mkState state-instance conf auth-conf acls context)
+        nil)))
+  
 
 (defprotocol StormClusterState
   (assignments [this callback])
@@ -342,10 +221,10 @@
 
 ;; Watches should be used for optimization. When ZK is reconnecting, they're not guaranteed to be called.
 (defnk mk-storm-cluster-state
-  [cluster-state-spec :acls nil :separate-zk-writer? false]
-  (let [[solo? cluster-state] (if (satisfies? ClusterState cluster-state-spec)
+  [cluster-state-spec :acls nil :context (ClusterStateContext.)]
+  (let [[solo? cluster-state] (if (instance? ClusterState cluster-state-spec)
                                 [false cluster-state-spec]
-                                [true (mk-distributed-cluster-state cluster-state-spec :auth-conf cluster-state-spec :acls acls :separate-zk-writer? separate-zk-writer?)])
+                                [true (mk-distributed-cluster-state cluster-state-spec :auth-conf cluster-state-spec :acls acls :context context)])
         assignment-info-callback (atom {})
         assignment-info-with-version-callback (atom {})
         assignment-version-callback (atom {})
@@ -356,7 +235,7 @@
         code-distributor-callback (atom nil)
         credentials-callback (atom {})
         log-config-callback (atom {})
-        state-id (register
+        state-id (.register
                   cluster-state
                   (fn [type path]
                     (let [[subtree & args] (tokenize-path path)]
@@ -377,7 +256,7 @@
                          (exit-process! 30 "Unknown callback for subtree " subtree args)))))]
     (doseq [p [ASSIGNMENTS-SUBTREE STORMS-SUBTREE SUPERVISORS-SUBTREE WORKERBEATS-SUBTREE ERRORS-SUBTREE CODE-DISTRIBUTOR-SUBTREE NIMBUSES-SUBTREE
                LOGCONFIG-SUBTREE]]
-      (mkdirs cluster-state p acls))
+      (.mkdirs cluster-state p acls))
     (reify
       StormClusterState
 
@@ -385,20 +264,20 @@
         [this callback]
         (when callback
           (reset! assignments-callback callback))
-        (get-children cluster-state ASSIGNMENTS-SUBTREE (not-nil? callback)))
+        (.get_children cluster-state ASSIGNMENTS-SUBTREE (not-nil? callback)))
 
       (assignment-info
         [this storm-id callback]
         (when callback
           (swap! assignment-info-callback assoc storm-id callback))
-        (clojurify-assignment (maybe-deserialize (get-data cluster-state (assignment-path storm-id) (not-nil? callback)) Assignment)))
+        (clojurify-assignment (maybe-deserialize (.get_data cluster-state (assignment-path storm-id) (not-nil? callback)) Assignment)))
 
       (assignment-info-with-version 
         [this storm-id callback]
         (when callback
           (swap! assignment-info-with-version-callback assoc storm-id callback))
         (let [{data :data version :version} 
-              (get-data-with-version cluster-state (assignment-path storm-id) (not-nil? callback))]
+              (.get_data_with_version cluster-state (assignment-path storm-id) (not-nil? callback))]
         {:data (clojurify-assignment (maybe-deserialize data Assignment))
          :version version}))
 
@@ -406,59 +285,59 @@
         [this storm-id callback]
         (when callback
           (swap! assignment-version-callback assoc storm-id callback))
-        (get-version cluster-state (assignment-path storm-id) (not-nil? callback)))
+        (.get_version cluster-state (assignment-path storm-id) (not-nil? callback)))
 
       (code-distributor
         [this callback]
         (when callback
           (reset! code-distributor-callback callback))
         (do
-          (sync-path cluster-state CODE-DISTRIBUTOR-SUBTREE)
-          (get-children cluster-state CODE-DISTRIBUTOR-SUBTREE (not-nil? callback))))
+          (.sync_path cluster-state CODE-DISTRIBUTOR-SUBTREE)
+          (.get_children cluster-state CODE-DISTRIBUTOR-SUBTREE (not-nil? callback))))
 
       (nimbuses
         [this]
-        (map #(maybe-deserialize (get-data cluster-state (nimbus-path %1) false) NimbusSummary)
-          (get-children cluster-state NIMBUSES-SUBTREE false)))
+        (map #(maybe-deserialize (.get_data cluster-state (nimbus-path %1) false) NimbusSummary)
+          (.get_children cluster-state NIMBUSES-SUBTREE false)))
 
       (add-nimbus-host!
         [this nimbus-id nimbus-summary]
         ;explicit delete for ephmeral node to ensure this session creates the entry.
-        (delete-node cluster-state (nimbus-path nimbus-id))
+        (.delete_node cluster-state (nimbus-path nimbus-id))
 
-        (add-listener cluster-state (reify ConnectionStateListener
-                        (^void stateChanged[this ^CuratorFramework client ^ConnectionState newState]
+        (.add_listener cluster-state (reify ClusterStateListener
+                        (^void stateChanged[this ^ConnectionState newState]
                           (log-message "Connection state listener invoked, zookeeper connection state has changed to " newState)
                           (if (.equals newState ConnectionState/RECONNECTED)
                             (do
                               (log-message "Connection state has changed to reconnected so setting nimbuses entry one more time")
-                              (set-ephemeral-node cluster-state (nimbus-path nimbus-id) (Utils/serialize nimbus-summary) acls))))))
-
-        (set-ephemeral-node cluster-state (nimbus-path nimbus-id) (Utils/serialize nimbus-summary) acls))
+                              (.set_ephemeral_node cluster-state (nimbus-path nimbus-id) (Utils/serialize nimbus-summary) acls))))))
+        
+        (.set_ephemeral_node cluster-state (nimbus-path nimbus-id) (Utils/serialize nimbus-summary) acls))
 
       (code-distributor-info
         [this storm-id]
         (map (fn [nimbus-info] (NimbusInfo/parse nimbus-info))
           (let [path (code-distributor-path storm-id)]
             (do
-              (sync-path cluster-state path)
-              (get-children cluster-state path false)))))
+              (.sync_path cluster-state path)
+              (.get_children cluster-state path false)))))
 
       (active-storms
         [this]
-        (get-children cluster-state STORMS-SUBTREE false))
+        (.get_children cluster-state STORMS-SUBTREE false))
 
       (heartbeat-storms
         [this]
-        (get-children cluster-state WORKERBEATS-SUBTREE false))
+        (.get_worker_hb_children cluster-state WORKERBEATS-SUBTREE false))
 
       (error-topologies
         [this]
-        (get-children cluster-state ERRORS-SUBTREE false))
+        (.get_children cluster-state ERRORS-SUBTREE false))
 
       (get-worker-heartbeat
         [this storm-id node port]
-        (let [worker-hb (get-data cluster-state (workerbeat-path storm-id node port) false)]
+        (let [worker-hb (.get_worker_hb cluster-state (workerbeat-path storm-id node port) false)]
           (if worker-hb
             (-> worker-hb
               (maybe-deserialize ClusterWorkerHeartbeat)
@@ -481,11 +360,11 @@
         [this callback]
         (when callback
           (reset! supervisors-callback callback))
-        (get-children cluster-state SUPERVISORS-SUBTREE (not-nil? callback)))
+        (.get_children cluster-state SUPERVISORS-SUBTREE (not-nil? callback)))
 
       (supervisor-info
         [this supervisor-id]
-        (clojurify-supervisor-info (maybe-deserialize (get-data cluster-state (supervisor-path supervisor-id) false) SupervisorInfo)))
+        (clojurify-supervisor-info (maybe-deserialize (.get_data cluster-state (supervisor-path supervisor-id) false) SupervisorInfo)))
 
       (topology-log-config
         [this storm-id cb]
@@ -501,20 +380,20 @@
         [this storm-id node port info]
         (let [thrift-worker-hb (thriftify-zk-worker-hb info)]
           (if thrift-worker-hb
-            (set-data cluster-state (workerbeat-path storm-id node port) (Utils/serialize thrift-worker-hb) acls))))
+            (.set_worker_hb cluster-state (workerbeat-path storm-id node port) (Utils/serialize thrift-worker-hb) acls))))
 
       (remove-worker-heartbeat!
         [this storm-id node port]
-        (delete-node cluster-state (workerbeat-path storm-id node port)))
+        (.delete_worker_hb cluster-state (workerbeat-path storm-id node port)))
 
       (setup-heartbeats!
         [this storm-id]
-        (mkdirs cluster-state (workerbeat-storm-root storm-id) acls))
+        (.mkdirs cluster-state (workerbeat-storm-root storm-id) acls))
 
       (teardown-heartbeats!
         [this storm-id]
         (try-cause
-          (delete-node cluster-state (workerbeat-storm-root storm-id))
+          (.delete_worker_hb cluster-state (workerbeat-storm-root storm-id))
           (catch KeeperException e
             (log-warn-error e "Could not teardown heartbeats for " storm-id))))
 
@@ -523,12 +402,12 @@
         "if znode exists and to be not on?, delete; if exists and on?, do nothing;
         if not exists and to be on?, create; if not exists and not on?, do nothing"
         (let [path (backpressure-path storm-id node port)
-              existed (exists-node? cluster-state path false)]
+              existed (.node_exists cluster-state path false)]
           (if existed
             (if (not on?)
-              (delete-node cluster-state path))   ;; delete the znode since the worker is not congested
+              (.delete_node cluster-state path))   ;; delete the znode since the worker is not congested
             (if on?
-              (set-ephemeral-node cluster-state path nil acls))))) ;; create the znode since worker is congested
+              (.set_ephemeral_node cluster-state path nil acls))))) ;; create the znode since worker is congested
     
       (topology-backpressure
         [this storm-id callback]
@@ -536,33 +415,33 @@
         (when callback
           (swap! backpressure-callback assoc storm-id callback))
         (let [path (backpressure-storm-root storm-id)
-              children (get-children cluster-state path (not-nil? callback))]
+              children (.get_children cluster-state path (not-nil? callback))]
               (> (count children) 0)))
       
       (setup-backpressure!
         [this storm-id]
-        (mkdirs cluster-state (backpressure-storm-root storm-id) acls))
+        (.mkdirs cluster-state (backpressure-storm-root storm-id) acls))
 
       (remove-worker-backpressure!
         [this storm-id node port]
-        (delete-node cluster-state (backpressure-path storm-id node port)))
+        (.delete_node cluster-state (backpressure-path storm-id node port)))
 
       (teardown-topology-errors!
         [this storm-id]
         (try-cause
-          (delete-node cluster-state (error-storm-root storm-id))
+          (.delete_node cluster-state (error-storm-root storm-id))
           (catch KeeperException e
             (log-warn-error e "Could not teardown errors for " storm-id))))
 
       (supervisor-heartbeat!
         [this supervisor-id info]
         (let [thrift-supervisor-info (thriftify-supervisor-info info)]
-          (set-ephemeral-node cluster-state (supervisor-path supervisor-id) (Utils/serialize thrift-supervisor-info) acls)))
+          (.set_ephemeral_node cluster-state (supervisor-path supervisor-id) (Utils/serialize thrift-supervisor-info) acls)))
 
       (activate-storm!
         [this storm-id storm-base]
         (let [thrift-storm-base (thriftify-storm-base storm-base)]
-          (set-data cluster-state (storm-path storm-id) (Utils/serialize thrift-storm-base) acls)))
+          (.set_data cluster-state (storm-path storm-id) (Utils/serialize thrift-storm-base) acls)))
 
       (update-storm!
         [this storm-id new-elems]
@@ -571,7 +450,7 @@
               component->debug (:component->debug base)
               new-elems (update new-elems :component->executors (partial merge executors))
               new-elems (update new-elems :component->debug (partial merge-with merge component->debug))]
-          (set-data cluster-state (storm-path storm-id)
+          (.set_data cluster-state (storm-path storm-id)
                     (-> base
                         (merge new-elems)
                         thriftify-storm-base
@@ -582,31 +461,31 @@
         [this storm-id callback]
         (when callback
           (swap! storm-base-callback assoc storm-id callback))
-        (clojurify-storm-base (maybe-deserialize (get-data cluster-state (storm-path storm-id) (not-nil? callback)) StormBase)))
+        (clojurify-storm-base (maybe-deserialize (.get_data cluster-state (storm-path storm-id) (not-nil? callback)) StormBase)))
 
       (remove-storm-base!
         [this storm-id]
-        (delete-node cluster-state (storm-path storm-id)))
+        (.delete_node cluster-state (storm-path storm-id)))
 
       (set-assignment!
         [this storm-id info]
         (let [thrift-assignment (thriftify-assignment info)]
-          (set-data cluster-state (assignment-path storm-id) (Utils/serialize thrift-assignment) acls)))
+          (.set_data cluster-state (assignment-path storm-id) (Utils/serialize thrift-assignment) acls)))
 
       (setup-code-distributor!
         [this storm-id nimbusInfo]
         (let [path (str (code-distributor-path storm-id) "/" (.toHostPortString nimbusInfo))]
-        (mkdirs cluster-state (code-distributor-path storm-id) acls)
+        (.mkdirs cluster-state (code-distributor-path storm-id) acls)
         ;we delete the node first to ensure the node gets created as part of this session only.
-        (delete-node cluster-state path)
-        (set-ephemeral-node cluster-state path nil acls)))
+        (.delete_node cluster-state path)
+        (.set_ephemeral_node cluster-state path nil acls)))
 
       (remove-storm!
         [this storm-id]
-        (delete-node cluster-state (assignment-path storm-id))
-        (delete-node cluster-state (code-distributor-path storm-id))
-        (delete-node cluster-state (credentials-path storm-id))
-        (delete-node cluster-state (log-config-path storm-id))
+        (.delete_node cluster-state (assignment-path storm-id))
+        (.delete_node cluster-state (code-distributor-path storm-id))
+        (.delete_node cluster-state (credentials-path storm-id))
+        (.delete_node cluster-state (log-config-path storm-id))
         (remove-storm-base! this storm-id))
 
       (set-credentials!
@@ -614,37 +493,37 @@
          (let [topo-acls (mk-topo-only-acls topo-conf)
                path (credentials-path storm-id)
                thriftified-creds (thriftify-credentials creds)]
-           (set-data cluster-state path (Utils/serialize thriftified-creds) topo-acls)))
+           (.set_data cluster-state path (Utils/serialize thriftified-creds) topo-acls)))
 
       (credentials
         [this storm-id callback]
         (when callback
           (swap! credentials-callback assoc storm-id callback))
-        (clojurify-crdentials (maybe-deserialize (get-data cluster-state (credentials-path storm-id) (not-nil? callback)) Credentials)))
+        (clojurify-crdentials (maybe-deserialize (.get_data cluster-state (credentials-path storm-id) (not-nil? callback)) Credentials)))
 
       (report-error
          [this storm-id component-id node port error]
          (let [path (error-path storm-id component-id)
                last-error-path (last-error-path storm-id component-id)
                data (thriftify-error {:time-secs (current-time-secs) :error (stringify-error error) :host node :port port})
-               _ (mkdirs cluster-state path acls)
+               _ (.mkdirs cluster-state path acls)
                ser-data (Utils/serialize data)
-               _ (mkdirs cluster-state path acls)
-               _ (create-sequential cluster-state (str path "/e") ser-data acls)
-               _ (set-data cluster-state last-error-path ser-data acls)
-               to-kill (->> (get-children cluster-state path false)
+               _ (.mkdirs cluster-state path acls)
+               _ (.create_sequential cluster-state (str path "/e") ser-data acls)
+               _ (.set_data cluster-state last-error-path ser-data acls)
+               to-kill (->> (.get_children cluster-state path false)
                             (sort-by parse-error-path)
                             reverse
                             (drop 10))]
            (doseq [k to-kill]
-             (delete-node cluster-state (str path "/" k)))))
+             (.delete_node cluster-state (str path "/" k)))))
 
       (errors
          [this storm-id component-id]
          (let [path (error-path storm-id component-id)
-               errors (if (exists-node? cluster-state path false)
-                        (dofor [c (get-children cluster-state path false)]
-                          (if-let [data (-> (get-data cluster-state
+               errors (if (.node_exists cluster-state path false)
+                        (dofor [c (.get_children cluster-state path false)]
+                          (if-let [data (-> (.get_data cluster-state
                                                       (str path "/" c)
                                                       false)
                                           (maybe-deserialize ErrorInfo)
@@ -657,17 +536,17 @@
       (last-error
         [this storm-id component-id]
         (let [path (last-error-path storm-id component-id)]
-          (if (exists-node? cluster-state path false)
-            (if-let [data (-> (get-data cluster-state path false)
+          (if (.node_exists cluster-state path false)
+            (if-let [data (-> (.get_data cluster-state path false)
                               (maybe-deserialize ErrorInfo)
                               clojurify-error)]
               (map->TaskError data)))))
       
       (disconnect
          [this]
-        (unregister cluster-state state-id)
+        (.unregister cluster-state state-id)
         (when solo?
-          (close cluster-state))))))
+          (.close cluster-state))))))
 
 ;; daemons have a single thread that will respond to events
 ;; start with initialize event

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/clj/backtype/storm/cluster_state/zookeeper_state_factory.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/cluster_state/zookeeper_state_factory.clj b/storm-core/src/clj/backtype/storm/cluster_state/zookeeper_state_factory.clj
new file mode 100644
index 0000000..3e656b2
--- /dev/null
+++ b/storm-core/src/clj/backtype/storm/cluster_state/zookeeper_state_factory.clj
@@ -0,0 +1,152 @@
+;; Licensed to the Apache Software Foundation (ASF) under one
+;; or more contributor license agreements.  See the NOTICE file
+;; distributed with this work for additional information
+;; regarding copyright ownership.  The ASF licenses this file
+;; to you under the Apache License, Version 2.0 (the
+;; "License"); you may not use this file except in compliance
+;; with the License.  You may obtain a copy of the License at
+;;
+;; http://www.apache.org/licenses/LICENSE-2.0
+;;
+;; Unless required by applicable law or agreed to in writing, software
+;; distributed under the License is distributed on an "AS IS" BASIS,
+;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;; See the License for the specific language governing permissions and
+;; limitations under the License.
+
+(ns backtype.storm.cluster-state.zookeeper-state-factory
+  (:import [org.apache.zookeeper KeeperException KeeperException$NoNodeException ZooDefs ZooDefs$Ids ZooDefs$Perms]
+           [backtype.storm.cluster ClusterState ClusterStateContext DaemonType])
+  (:use [backtype.storm cluster config log util])
+  (:require [backtype.storm [zookeeper :as zk]])
+  (:gen-class
+   :implements [backtype.storm.cluster.ClusterStateFactory]))
+
+(defn -mkState [this conf auth-conf acls context]
+  (let [zk (zk/mk-client conf (conf STORM-ZOOKEEPER-SERVERS) (conf STORM-ZOOKEEPER-PORT) :auth-conf auth-conf)]
+    (zk/mkdirs zk (conf STORM-ZOOKEEPER-ROOT) acls)
+    (.close zk))
+  (let [callbacks (atom {})
+        active (atom true)
+        zk-writer (zk/mk-client conf
+                         (conf STORM-ZOOKEEPER-SERVERS)
+                         (conf STORM-ZOOKEEPER-PORT)
+                         :auth-conf auth-conf
+                         :root (conf STORM-ZOOKEEPER-ROOT)
+                         :watcher (fn [state type path]
+                                    (when @active
+                                      (when-not (= :connected state)
+                                        (log-warn "Received event " state ":" type ":" path " with disconnected Writer Zookeeper."))
+                                      (when-not (= :none type)
+                                        (doseq [callback (vals @callbacks)]
+                                          (callback type path))))))
+        is-nimbus? (= (.getDaemonType context) DaemonType/NIMBUS)
+        zk-reader (if is-nimbus?
+                    (zk/mk-client conf
+                         (conf STORM-ZOOKEEPER-SERVERS)
+                         (conf STORM-ZOOKEEPER-PORT)
+                         :auth-conf auth-conf
+                         :root (conf STORM-ZOOKEEPER-ROOT)
+                         :watcher (fn [state type path]
+                                    (when @active
+                                      (when-not (= :connected state)
+                                        (log-warn "Received event " state ":" type ":" path " with disconnected Reader Zookeeper."))
+                                      (when-not (= :none type)
+                                        (doseq [callback (vals @callbacks)]
+                                          (callback type path))))))
+                    zk-writer)]
+    (reify
+     ClusterState
+
+     (register
+       [this callback]
+       (let [id (uuid)]
+         (swap! callbacks assoc id callback)
+         id))
+
+     (unregister
+       [this id]
+       (swap! callbacks dissoc id))
+
+     (set-ephemeral-node
+       [this path data acls]
+       (zk/mkdirs zk-writer (parent-path path) acls)
+       (if (zk/exists zk-writer path false)
+         (try-cause
+           (zk/set-data zk-writer path data) ; should verify that it's ephemeral
+           (catch KeeperException$NoNodeException e
+             (log-warn-error e "Ephemeral node disappeared between checking for existing and setting data")
+             (zk/create-node zk-writer path data :ephemeral acls)))
+         (zk/create-node zk-writer path data :ephemeral acls)))
+
+     (create-sequential
+       [this path data acls]
+       (zk/create-node zk-writer path data :sequential acls))
+
+     (set-data
+       [this path data acls]
+       ;; note: this does not turn off any existing watches
+       (if (zk/exists zk-writer path false)
+         (zk/set-data zk-writer path data)
+         (do
+           (zk/mkdirs zk-writer (parent-path path) acls)
+           (zk/create-node zk-writer path data :persistent acls))))
+
+     (set-worker-hb
+       [this path data acls]
+       (.set_data this path data acls))
+     
+     (delete-node
+       [this path]
+       (zk/delete-node zk-writer path))
+
+     (delete-worker-hb
+       [this path]
+       (.delete_node this path))
+     
+     (get-data
+       [this path watch?]
+       (zk/get-data zk-reader path watch?))
+
+     (get-data-with-version
+       [this path watch?]
+       (zk/get-data-with-version zk-reader path watch?))
+
+     (get-version 
+       [this path watch?]
+       (zk/get-version zk-reader path watch?))
+
+     (get-worker-hb
+       [this path watch?]
+       (.get_data this path watch?))
+     
+     (get-children
+       [this path watch?]
+       (zk/get-children zk-reader path watch?))
+
+     (get-worker-hb-children
+       [this path watch?]
+       (.get_children this path watch?))
+     
+     (mkdirs
+       [this path acls]
+       (zk/mkdirs zk-writer path acls))
+
+     (node-exists
+       [this path watch?]
+       (zk/exists-node? zk-reader path watch?))
+
+     (add-listener
+        [this listener]
+        (zk/add-listener zk-reader listener))
+
+      (sync-path
+        [this path]
+        (zk/sync-path zk-writer path))
+     
+     (close
+       [this]
+       (reset! active false)
+       (.close zk-writer)
+       (if is-nimbus?
+         (.close zk-reader))))))

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/clj/backtype/storm/command/heartbeats.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/command/heartbeats.clj b/storm-core/src/clj/backtype/storm/command/heartbeats.clj
new file mode 100644
index 0000000..99790aa
--- /dev/null
+++ b/storm-core/src/clj/backtype/storm/command/heartbeats.clj
@@ -0,0 +1,52 @@
+;; Licensed to the Apache Software Foundation (ASF) under one
+;; or more contributor license agreements.  See the NOTICE file
+;; distributed with this work for additional information
+;; regarding copyright ownership.  The ASF licenses this file
+;; to you under the Apache License, Version 2.0 (the
+;; "License"); you may not use this file except in compliance
+;; with the License.  You may obtain a copy of the License at
+;;
+;; http://www.apache.org/licenses/LICENSE-2.0
+;;
+;; Unless required by applicable law or agreed to in writing, software
+;; distributed under the License is distributed on an "AS IS" BASIS,
+;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;; See the License for the specific language governing permissions and
+;; limitations under the License.
+(ns backtype.storm.command.heartbeats
+  (:require [backtype.storm
+             [config :refer :all]
+             [log :refer :all]
+             [cluster :refer :all]
+             [converter :refer :all]]
+        [clojure.string :refer :all])
+  (:import [backtype.storm.generated ClusterWorkerHeartbeat]
+           [backtype.storm.utils Utils])
+  (:gen-class))
+
+(defn -main [command path & args]
+  (let [conf (read-storm-config)
+        cluster (mk-distributed-cluster-state conf :auth-conf conf)]
+    (println "Command: [" command "]")
+    (condp = command
+      "list"
+      (let [message (join " \n" (.get_worker_hb_children cluster path false))]
+        (log-message "list " path ":\n"
+                     message "\n"))
+      "get"
+      (log-message 
+       (if-let [hb (.get_worker_hb cluster path false)]
+         (clojurify-zk-worker-hb
+          (Utils/deserialize
+           hb
+           ClusterWorkerHeartbeat))
+         "Nothing"))
+      
+      (log-message "Usage: heartbeats [list|get] path"))
+    
+    (try
+      (.close cluster)
+      (catch Exception e
+        (log-message "Caught exception: " e " on close."))))
+  (System/exit 0))
+         

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/clj/backtype/storm/config.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/config.clj b/storm-core/src/clj/backtype/storm/config.clj
index f06f6e9..89b4a30 100644
--- a/storm-core/src/clj/backtype/storm/config.clj
+++ b/storm-core/src/clj/backtype/storm/config.clj
@@ -264,3 +264,8 @@
 (defn ^LocalState worker-state
   [conf id]
   (LocalState. (worker-heartbeats-root conf id)))
+
+(defn override-login-config-with-system-property [conf]
+  (if-let [login_conf_file (System/getProperty "java.security.auth.login.config")]
+    (assoc conf "java.security.auth.login.config" login_conf_file)
+    conf))

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/clj/backtype/storm/daemon/worker.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/daemon/worker.clj b/storm-core/src/clj/backtype/storm/daemon/worker.clj
index 355c2f6..ec6622a 100644
--- a/storm-core/src/clj/backtype/storm/daemon/worker.clj
+++ b/storm-core/src/clj/backtype/storm/daemon/worker.clj
@@ -438,11 +438,6 @@
     (.shutdownNow (get dr WorkerTopologyContext/SHARED_EXECUTOR))
     (log-message "Shut down default resources")))
 
-(defn- override-login-config-with-system-property [conf]
-  (if-let [login_conf_file (System/getProperty "java.security.auth.login.config")]
-    (assoc conf "java.security.auth.login.config" login_conf_file)
-    conf))
-
 (defn- get-logger-levels []
   (into {}
     (let [logger-config (.getConfiguration (LogManager/getContext false))]

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/clj/backtype/storm/util.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/util.clj b/storm-core/src/clj/backtype/storm/util.clj
index 9ec8cd3..0f2293e 100644
--- a/storm-core/src/clj/backtype/storm/util.clj
+++ b/storm-core/src/clj/backtype/storm/util.clj
@@ -1054,6 +1054,22 @@
 (defn hashmap-to-persistent [^HashMap m]
   (zipmap (.keySet m) (.values m)))
 
+(defn retry-on-exception
+  "Retries specific function on exception based on retries count"
+  [tries task-description f & args]
+  (let [res (try {:value (apply f args)}
+              (catch Exception e
+                (if (= 0 tries)
+                  (throw e)
+                  {:exception e})))]
+    (if (:exception res)
+      (do 
+        (log-error (:exception res) (str "Failed to " task-description ". Will make [" tries "] more attempts."))
+        (recur (dec tries) task-description f args))
+      (do 
+        (log-debug (str "Successful " task-description "."))
+        (:value res)))))
+
 (defn setup-default-uncaught-exception-handler
   "Set a default uncaught exception handler to handle exceptions not caught in other threads."
   []

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj b/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
new file mode 100644
index 0000000..1b2ad1b
--- /dev/null
+++ b/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
@@ -0,0 +1,248 @@
+;; Licensed to the Apache Software Foundation (ASF) under one
+;; or more contributor license agreements.  See the NOTICE file
+;; distributed with this work for additional information
+;; regarding copyright ownership.  The ASF licenses this file
+;; to you under the Apache License, Version 2.0 (the
+;; "License"); you may not use this file except in compliance
+;; with the License.  You may obtain a copy of the License at
+;;
+;; http://www.apache.org/licenses/LICENSE-2.0
+;;
+;; Unless required by applicable law or agreed to in writing, software
+;; distributed under the License is distributed on an "AS IS" BASIS,
+;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;; See the License for the specific language governing permissions and
+;; limitations under the License.
+
+(ns org.apache.storm.pacemaker.pacemaker
+  (:import [org.apache.storm.pacemaker PacemakerServer IServerMessageHandler]
+           [java.util.concurrent ConcurrentHashMap ThreadPoolExecutor TimeUnit LinkedBlockingDeque]
+           [java.util.concurrent.atomic AtomicInteger]
+           [java.util Date]
+           [backtype.storm.generated
+            HBAuthorizationException HBExecutionException HBNodes HBRecords
+            HBServerMessageType HBMessage HBMessageData HBPulse])
+  (:use [clojure.string :only [replace-first split]]
+        [backtype.storm log config util])
+  (:require [clojure.java.jmx :as jmx])
+  (:gen-class))
+
+;; This is the old Thrift service that this server is emulating.
+;  void createPath(1: string path) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
+;  bool exists(1: string path) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
+;  void sendPulse(1: Pulse pulse) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
+;  HBRecords getAllPulseForPath(1: string idPrefix) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
+;  HBNodes getAllNodesForPath(1: string idPrefix) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
+;  Pulse getPulse(1: string id) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
+;  void deletePath(1: string idPrefix) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
+;  void deletePulseId(1: string id) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
+
+
+;; Stats Functions
+
+(def sleep-seconds 5)
+
+
+(defn- check-and-set-loop [stats key new & {:keys [compare new-fn]
+                                            :or {compare (fn [new old] true)
+                                                 new-fn (fn [new old] new)}}]
+  (loop []
+    (let [old (.get (key stats))
+          new (new-fn new old)]
+      (if (compare new old)
+        (if (.compareAndSet (key stats) old new)
+          nil
+          (recur))
+        nil))))
+
+(defn- set-average [stats size]
+  (check-and-set-loop
+   stats
+   :average-heartbeat-size
+   size
+   :new-fn (fn [new old]
+            (let [count (.get (:send-pulse-count stats))]
+                                        ; Weighted average
+              (/ (+ new (* count old)) (+ count 1))))))
+
+(defn- set-largest [stats size]
+  (check-and-set-loop
+   stats
+   :largest-heartbeat-size
+   size
+   :compare #'>))
+
+(defn- report-stats [heartbeats stats last-five-s]
+  (loop []
+      (let [send-count (.getAndSet (:send-pulse-count stats) 0)
+            received-size (.getAndSet (:total-received-size stats) 0)
+            get-count (.getAndSet (:get-pulse-count stats) 0)
+            sent-size (.getAndSet (:total-sent-size stats) 0)
+            largest (.getAndSet (:largest-heartbeat-size stats) 0)
+            average (.getAndSet (:average-heartbeat-size stats) 0)
+            total-keys (.size heartbeats)]
+        (log-message "\nReceived " send-count " heartbeats totaling " received-size " bytes,\n"
+                     "Sent " get-count " heartbeats totaling " sent-size " bytes,\n"
+                     "The largest heartbeat was " largest " bytes,\n"
+                     "The average heartbeat was " average " bytes,\n"
+                     "Pacemaker contained " total-keys " total keys\n"
+                     "in the last " sleep-seconds " second(s)")
+        (dosync (ref-set last-five-s
+                         {:send-pulse-count send-count
+                          :total-received-size received-size
+                          :get-pulse-count get-count
+                          :total-sent-size sent-size
+                          :largest-heartbeat-size largest
+                          :average-heartbeat-size average
+                          :total-keys total-keys})))
+      (Thread/sleep (* 1000 sleep-seconds))
+      (recur)))
+
+;; JMX stuff
+(defn register [last-five-s]
+  (jmx/register-mbean
+   (jmx/create-bean
+    last-five-s)
+   "org.apache.storm.pacemaker.pacemaker:stats=Stats_Last_5_Seconds"))
+
+;; Pacemaker Functions
+
+(defn hb-data [conf]
+  (ConcurrentHashMap.))
+
+(defn create-path [^String path heartbeats]
+  (HBMessage. HBServerMessageType/CREATE_PATH_RESPONSE nil))
+
+(defn exists [^String path heartbeats]
+  (let [it-does (.containsKey heartbeats path)]
+    (log-debug (str "Checking if path [" path "] exists..." it-does "."))
+    (HBMessage. HBServerMessageType/EXISTS_RESPONSE
+                (HBMessageData/boolval it-does))))
+
+(defn send-pulse [^HBPulse pulse heartbeats pacemaker-stats]
+  (let [id (.get_id pulse)
+        details (.get_details pulse)]
+    (log-debug (str "Saving Pulse for id [" id "] data [" + (str details) "]."))
+
+    (.incrementAndGet (:send-pulse-count pacemaker-stats))
+    (.addAndGet (:total-received-size pacemaker-stats) (alength details))
+    (set-largest pacemaker-stats (alength details))
+    (set-average pacemaker-stats (alength details))
+
+    (.put heartbeats id details)
+    (HBMessage. HBServerMessageType/SEND_PULSE_RESPONSE nil)))
+
+(defn get-all-pulse-for-path [^String path heartbeats]
+  (HBMessage. HBServerMessageType/GET_ALL_PULSE_FOR_PATH_RESPONSE nil))
+
+(defn get-all-nodes-for-path [^String path ^ConcurrentHashMap heartbeats]
+  (log-debug "List all nodes for path " path)
+  (HBMessage. HBServerMessageType/GET_ALL_NODES_FOR_PATH_RESPONSE
+              (HBMessageData/nodes
+               (HBNodes. (distinct (for [k (.keySet heartbeats)
+                                         :let [trimmed-k (first
+                                                          (filter #(not (= "" %))
+                                                                  (split (replace-first k path "") #"/")))]
+                                         :when (and
+                                                (not (nil? trimmed-k))
+                                                (= (.indexOf k path) 0))]
+                                     trimmed-k))))))
+
+(defn get-pulse [^String path heartbeats pacemaker-stats]
+  (let [details (.get heartbeats path)]
+    (log-debug (str "Getting Pulse for path [" path "]...data " (str details) "]."))
+
+
+    (.incrementAndGet (:get-pulse-count pacemaker-stats))
+    (if details
+      (.addAndGet (:total-sent-size pacemaker-stats) (alength details)))
+
+    (HBMessage. HBServerMessageType/GET_PULSE_RESPONSE
+                (HBMessageData/pulse
+                 (doto (HBPulse. ) (.set_id path) (.set_details details))))))
+
+(defn delete-pulse-id [^String path heartbeats]
+  (log-debug (str "Deleting Pulse for id [" path "]."))
+  (.remove heartbeats path)
+  (HBMessage. HBServerMessageType/DELETE_PULSE_ID_RESPONSE nil))
+
+(defn delete-path [^String path heartbeats]
+  (let [prefix (if (= \/ (last path)) path (str path "/"))]
+    (doseq [k (.keySet heartbeats)
+            :when (= (.indexOf k prefix) 0)]
+      (delete-pulse-id k heartbeats)))
+  (HBMessage. HBServerMessageType/DELETE_PATH_RESPONSE nil))
+
+(defn not-authorized []
+  (HBMessage. HBServerMessageType/NOT_AUTHORIZED nil))
+
+(defn mk-handler [conf]
+  (let [heartbeats ^ConcurrentHashMap (hb-data conf)
+        pacemaker-stats {:send-pulse-count (AtomicInteger.)
+                         :total-received-size (AtomicInteger.)
+                         :get-pulse-count (AtomicInteger.)
+                         :total-sent-size (AtomicInteger.)
+                         :largest-heartbeat-size (AtomicInteger.)
+                         :average-heartbeat-size (AtomicInteger.)}
+        last-five (ref {:send-pulse-count 0
+                        :total-received-size 0
+                        :get-pulse-count 0
+                        :total-sent-size 0
+                        :largest-heartbeat-size 0
+                        :average-heartbeat-size 0
+                        :total-keys 0})
+        stats-thread (Thread. (fn [] (report-stats heartbeats pacemaker-stats last-five)))]
+    (.setDaemon stats-thread true)
+    (.start stats-thread)
+    (register last-five)
+    (reify
+      IServerMessageHandler
+      (^HBMessage handleMessage [this ^HBMessage request ^boolean authenticated]
+        (let [response
+              (condp = (.get_type request)
+                HBServerMessageType/CREATE_PATH
+                (create-path (.get_path (.get_data request)) heartbeats)
+
+                HBServerMessageType/EXISTS
+                (if authenticated
+                  (exists (.get_path (.get_data request)) heartbeats)
+                  (not-authorized))
+
+                HBServerMessageType/SEND_PULSE
+                (send-pulse (.get_pulse (.get_data request)) heartbeats pacemaker-stats)
+
+                HBServerMessageType/GET_ALL_PULSE_FOR_PATH
+                (if authenticated
+                  (get-all-pulse-for-path (.get_path (.get_data request)) heartbeats)
+                  (not-authorized))
+
+                HBServerMessageType/GET_ALL_NODES_FOR_PATH
+                (if authenticated
+                  (get-all-nodes-for-path (.get_path (.get_data request)) heartbeats)
+                  (not-authorized))
+
+                HBServerMessageType/GET_PULSE
+                (if authenticated
+                  (get-pulse (.get_path (.get_data request)) heartbeats pacemaker-stats)
+                  (not-authorized))
+
+                HBServerMessageType/DELETE_PATH
+                (delete-path (.get_path (.get_data request)) heartbeats)
+
+                HBServerMessageType/DELETE_PULSE_ID
+                (delete-pulse-id (.get_path (.get_data request)) heartbeats)
+
+                ; Otherwise
+                (log-message "Got Unexpected Type: " (.get_type request)))]
+
+          (.set_message_id response (.get_message_id request))
+          response)))))
+
+(defn launch-server! []
+  (log-message "Starting Server.")
+  (let [conf (override-login-config-with-system-property (read-storm-config))]
+    (PacemakerServer. (mk-handler conf) conf)))
+
+(defn -main []
+  (redirect-stdio-to-slf4j!)
+  (launch-server!))

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker_state_factory.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker_state_factory.clj b/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker_state_factory.clj
new file mode 100644
index 0000000..9dc7809
--- /dev/null
+++ b/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker_state_factory.clj
@@ -0,0 +1,124 @@
+;; Licensed to the Apache Software Foundation (ASF) under one
+;; or more contributor license agreements.  See the NOTICE file
+;; distributed with this work for additional information
+;; regarding copyright ownership.  The ASF licenses this file
+;; to you under the Apache License, Version 2.0 (the
+;; "License"); you may not use this file except in compliance
+;; with the License.  You may obtain a copy of the License at
+;;
+;; http://www.apache.org/licenses/LICENSE-2.0
+;;
+;; Unless required by applicable law or agreed to in writing, software
+;; distributed under the License is distributed on an "AS IS" BASIS,
+;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;; See the License for the specific language governing permissions and
+;; limitations under the License.
+
+(ns org.apache.storm.pacemaker.pacemaker-state-factory
+  (:require [org.apache.storm.pacemaker pacemaker]
+            [backtype.storm.cluster-state [zookeeper-state-factory :as zk-factory]]
+            [backtype.storm
+             [config :refer :all]
+             [cluster :refer :all]
+             [log :refer :all]
+             [util :as util]])
+  (:import [backtype.storm.generated
+            HBExecutionException HBNodes HBRecords
+            HBServerMessageType HBMessage HBMessageData HBPulse]
+           [backtype.storm.cluster_state zookeeper_state_factory]
+           [backtype.storm.cluster ClusterState]
+           [org.apache.storm.pacemaker PacemakerClient])
+  (:gen-class
+   :implements [backtype.storm.cluster.ClusterStateFactory]))
+
+;; So we can mock the client for testing
+(defn makeClient [conf]
+  (PacemakerClient. conf))
+
+(defn makeZKState [conf auth-conf acls context]
+  (.mkState (zookeeper_state_factory.) conf auth-conf acls context))
+
+(def max-retries 10)
+
+(defn -mkState [this conf auth-conf acls context]
+  (let [zk-state (makeZKState conf auth-conf acls context)
+        pacemaker-client (makeClient conf)]
+
+    (reify
+      ClusterState
+      ;; Let these pass through to the zk-state. We only want to handle heartbeats.
+      (register [this callback] (.register zk-state callback))
+      (unregister [this callback] (.unregister zk-state callback))
+      (set_ephemeral_node [this path data acls] (.set_ephemeral_node zk-state path data acls))
+      (create_sequential [this path data acls] (.create_sequential zk-state path data acls))
+      (set_data [this path data acls] (.set_data zk-state path data acls))
+      (delete_node [this path] (.delete_node zk-state path))
+      (get_data [this path watch?] (.get_data zk-state path watch?))
+      (get_data_with_version [this path watch?] (.get_data_with_version zk-state path watch?))
+      (get_version [this path watch?] (.get_version zk-state path watch?))
+      (get_children [this path watch?] (.get_children zk-state path watch?))
+      (mkdirs [this path acls] (.mkdirs zk-state path acls))
+      (node_exists [this path watch?] (.node_exists zk-state path watch?))
+      (add_listener [this listener] (.add_listener zk-state listener))
+      (sync_path [this path] (.sync_path zk-state path))
+      
+      (set_worker_hb [this path data acls]
+        (util/retry-on-exception
+         max-retries
+         "set_worker_hb"
+         #(let [response
+                (.send pacemaker-client
+                       (HBMessage. HBServerMessageType/SEND_PULSE
+                                   (HBMessageData/pulse
+                                    (doto (HBPulse.)
+                                      (.set_id path)
+                                      (.set_details data)))))]
+            (if (= (.get_type response) HBServerMessageType/SEND_PULSE_RESPONSE)
+              :ok
+              (throw (HBExecutionException. "Invalid Response Type"))))))
+
+      (delete_worker_hb [this path]
+        (util/retry-on-exception
+         max-retries
+         "delete_worker_hb"
+         #(let [response
+                (.send pacemaker-client
+                       (HBMessage. HBServerMessageType/DELETE_PATH
+                                   (HBMessageData/path path)))]
+            (if (= (.get_type response) HBServerMessageType/DELETE_PATH_RESPONSE)
+              :ok
+              (throw (HBExecutionException. "Invalid Response Type"))))))
+      
+      (get_worker_hb [this path watch?]
+        (util/retry-on-exception
+         max-retries
+         "get_worker_hb"
+         #(let [response
+                (.send pacemaker-client
+                       (HBMessage. HBServerMessageType/GET_PULSE
+                                   (HBMessageData/path path)))]
+            (if (= (.get_type response) HBServerMessageType/GET_PULSE_RESPONSE)
+              (try 
+                (.get_details (.get_pulse (.get_data response)))
+                (catch Exception e
+                  (throw (HBExecutionException. (.toString e)))))
+              (throw (HBExecutionException. "Invalid Response Type"))))))
+      
+      (get_worker_hb_children [this path watch?]
+        (util/retry-on-exception
+         max-retries
+         "get_worker_hb_children"
+         #(let [response
+                (.send pacemaker-client
+                       (HBMessage. HBServerMessageType/GET_ALL_NODES_FOR_PATH
+                                   (HBMessageData/path path)))]
+            (if (= (.get_type response) HBServerMessageType/GET_ALL_NODES_FOR_PATH_RESPONSE)
+              (try
+                (into [] (.get_pulseIds (.get_nodes (.get_data response))))
+                (catch Exception e
+                  (throw (HBExecutionException. (.toString e)))))
+              (throw (HBExecutionException. "Invalid Response Type"))))))
+      
+      (close [this]
+        (.close zk-state)
+        (.close pacemaker-client)))))

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/genthrift.sh
----------------------------------------------------------------------
diff --git a/storm-core/src/genthrift.sh b/storm-core/src/genthrift.sh
old mode 100644
new mode 100755

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/Config.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/Config.java b/storm-core/src/jvm/backtype/storm/Config.java
index 764497d..25fcf9e 100644
--- a/storm-core/src/jvm/backtype/storm/Config.java
+++ b/storm-core/src/jvm/backtype/storm/Config.java
@@ -347,6 +347,13 @@ public class Config extends HashMap<String, Object> {
     public static final String STORM_NIMBUS_RETRY_INTERVAL_CEILING="storm.nimbus.retry.intervalceiling.millis";
 
     /**
+     * The ClusterState factory that worker will use to create a ClusterState
+     * to store state in. Defaults to ZooKeeper.
+     */
+    @isString
+    public static final String STORM_CLUSTER_STATE_STORE = "storm.cluster.state.store";
+
+    /**
      * The Nimbus transport plug-in for Thrift client/server communication
      */
     @isString
@@ -752,6 +759,54 @@ public class Config extends HashMap<String, Object> {
     public static final String UI_HTTPS_NEED_CLIENT_AUTH = "ui.https.need.client.auth";
 
     /**
+     * The host that Pacemaker is running on.
+     */
+    @isString
+    public static final String PACEMAKER_HOST = "pacemaker.host";
+
+    /**
+     * The port Pacemaker should run on. Clients should
+     * connect to this port to submit or read heartbeats.
+     */
+    @isNumber
+    @isPositiveNumber
+    public static final String PACEMAKER_PORT = "pacemaker.port";
+
+    /**
+     * The maximum number of threads that should be used by the Pacemaker.
+     * When Pacemaker gets loaded it will spawn new threads, up to 
+     * this many total, to handle the load.
+     */
+    @isNumber
+    @isPositiveNumber
+    public static final String PACEMAKER_MAX_THREADS = "pacemaker.max.threads";
+
+    /**
+     * This parameter is used by the storm-deploy project to configure the
+     * jvm options for the nimbus daemon.
+     */
+    @isString
+    public static final String PACEMAKER_CHILDOPTS = "pacemaker.childopts";
+
+    /**
+     * This should be one of "DIGEST", "KERBEROS", or "NONE"
+     * Determines the mode of authentication the pacemaker server and client use.
+     * The client must either match the server, or be NONE. In the case of NONE,
+     * no authentication is performed for the client, and if the server is running with
+     * DIGEST or KERBEROS, the client can only write to the server (no reads).
+     * This is intended to provide a primitive form of access-control.
+     */
+    @isString
+    public static final String PACEMAKER_AUTH_METHOD = "pacemaker.auth.method";
+
+    /**
+     * These are the kerberos users who are authorized to read hearbeats from
+     * Pacemaker.
+     */
+    @isStringList
+    public static final String PACEMAKER_KERBEROS_USERS = "pacemaker.kerberos.users";
+    
+    /**
      * List of DRPC servers so that the DRPCSpout knows who to talk to.
      */
     @isStringList

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java b/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
new file mode 100644
index 0000000..638d905
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.cluster;
+
+import clojure.lang.APersistentMap;
+import clojure.lang.IFn;
+import java.util.List;
+import org.apache.zookeeper.data.ACL;
+
+public interface ClusterState {
+    void unregister(String id);
+    void create_sequential(String path, byte[] data, List<ACL> acls);
+    void mkdirs(String path, List<ACL> acls);
+    void delete_node(String path);
+    void set_ephemeral_node(String path, byte[] data, List<ACL> acls);
+    Integer get_version(String path, boolean watch);
+    boolean node_exists(String path, boolean watch);
+    List<String> get_children(String path, boolean watch);
+    void close();
+    void set_data(String path, byte[] data, List<ACL> acls);
+    String register(IFn callback);
+    byte[] get_data(String path, boolean watch);
+    APersistentMap get_data_with_version(String path, boolean watch);
+    void set_worker_hb(String path, byte[] data, List<ACL> acls);
+    byte[] get_worker_hb(String path, boolean watch);
+    List<String> get_worker_hb_children(String path, boolean watch);
+    void delete_worker_hb(String path);
+    void add_listener(ClusterStateListener listener);
+    void sync_path(String path);
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/cluster/ClusterStateContext.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/cluster/ClusterStateContext.java b/storm-core/src/jvm/backtype/storm/cluster/ClusterStateContext.java
new file mode 100644
index 0000000..5ccde23
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/cluster/ClusterStateContext.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package backtype.storm.cluster;
+
+/**
+ * This class is intended to provide runtime-context to ClusterStateFactory
+ * implementors, giving information such as what daemon is creating it.
+ */
+public class ClusterStateContext {
+    
+    private DaemonType daemonType;
+
+    public ClusterStateContext() {
+        daemonType = DaemonType.UNKNOWN;
+    }
+    
+    public ClusterStateContext(DaemonType daemonType) {
+        this.daemonType = daemonType;
+    }
+    
+    public DaemonType getDaemonType() {
+        return daemonType;
+    }
+    
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/cluster/ClusterStateFactory.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/cluster/ClusterStateFactory.java b/storm-core/src/jvm/backtype/storm/cluster/ClusterStateFactory.java
new file mode 100644
index 0000000..d33646b
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/cluster/ClusterStateFactory.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.cluster;
+
+import clojure.lang.APersistentMap;
+import java.util.List;
+import org.apache.zookeeper.data.ACL;
+
+public interface ClusterStateFactory {
+    
+    public ClusterState mkState(APersistentMap config, APersistentMap auth_conf, List<ACL> acls, ClusterStateContext context);
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/cluster/ClusterStateListener.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/cluster/ClusterStateListener.java b/storm-core/src/jvm/backtype/storm/cluster/ClusterStateListener.java
new file mode 100644
index 0000000..22693f8
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/cluster/ClusterStateListener.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.cluster;
+
+public interface ClusterStateListener {
+    void stateChanged(ConnectionState newState);
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/cluster/ConnectionState.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/cluster/ConnectionState.java b/storm-core/src/jvm/backtype/storm/cluster/ConnectionState.java
new file mode 100644
index 0000000..d6887da
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/cluster/ConnectionState.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.cluster;
+
+public enum ConnectionState {
+    CONNECTED,
+    RECONNECTED,
+    LOST
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/cluster/DaemonType.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/cluster/DaemonType.java b/storm-core/src/jvm/backtype/storm/cluster/DaemonType.java
new file mode 100644
index 0000000..684d0ef
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/cluster/DaemonType.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package backtype.storm.cluster;
+
+public enum DaemonType {
+    SUPERVISOR,
+    NIMBUS,
+    WORKER,
+    PACEMAKER,
+    UNKNOWN
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/AlreadyAliveException.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/AlreadyAliveException.java b/storm-core/src/jvm/backtype/storm/generated/AlreadyAliveException.java
index fb2eee3..eb0d93f 100644
--- a/storm-core/src/jvm/backtype/storm/generated/AlreadyAliveException.java
+++ b/storm-core/src/jvm/backtype/storm/generated/AlreadyAliveException.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class AlreadyAliveException extends TException implements org.apache.thrift.TBase<AlreadyAliveException, AlreadyAliveException._Fields>, java.io.Serializable, Cloneable, Comparable<AlreadyAliveException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AlreadyAliveException");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/Assignment.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/Assignment.java b/storm-core/src/jvm/backtype/storm/generated/Assignment.java
index dbc1cc9..05198bf 100644
--- a/storm-core/src/jvm/backtype/storm/generated/Assignment.java
+++ b/storm-core/src/jvm/backtype/storm/generated/Assignment.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class Assignment implements org.apache.thrift.TBase<Assignment, Assignment._Fields>, java.io.Serializable, Cloneable, Comparable<Assignment> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Assignment");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/AuthorizationException.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/AuthorizationException.java b/storm-core/src/jvm/backtype/storm/generated/AuthorizationException.java
index 69fff12..2330391 100644
--- a/storm-core/src/jvm/backtype/storm/generated/AuthorizationException.java
+++ b/storm-core/src/jvm/backtype/storm/generated/AuthorizationException.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class AuthorizationException extends TException implements org.apache.thrift.TBase<AuthorizationException, AuthorizationException._Fields>, java.io.Serializable, Cloneable, Comparable<AuthorizationException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AuthorizationException");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/Bolt.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/Bolt.java b/storm-core/src/jvm/backtype/storm/generated/Bolt.java
index 9ea4bef..0c14b60 100644
--- a/storm-core/src/jvm/backtype/storm/generated/Bolt.java
+++ b/storm-core/src/jvm/backtype/storm/generated/Bolt.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class Bolt implements org.apache.thrift.TBase<Bolt, Bolt._Fields>, java.io.Serializable, Cloneable, Comparable<Bolt> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Bolt");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/BoltAggregateStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/BoltAggregateStats.java b/storm-core/src/jvm/backtype/storm/generated/BoltAggregateStats.java
index 940e0b9..3b8e38c 100644
--- a/storm-core/src/jvm/backtype/storm/generated/BoltAggregateStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/BoltAggregateStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class BoltAggregateStats implements org.apache.thrift.TBase<BoltAggregateStats, BoltAggregateStats._Fields>, java.io.Serializable, Cloneable, Comparable<BoltAggregateStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BoltAggregateStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/BoltStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/BoltStats.java b/storm-core/src/jvm/backtype/storm/generated/BoltStats.java
index c3ffc9f..6f64f14 100644
--- a/storm-core/src/jvm/backtype/storm/generated/BoltStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/BoltStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class BoltStats implements org.apache.thrift.TBase<BoltStats, BoltStats._Fields>, java.io.Serializable, Cloneable, Comparable<BoltStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BoltStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/ClusterSummary.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ClusterSummary.java b/storm-core/src/jvm/backtype/storm/generated/ClusterSummary.java
index d23cdcb..7935567 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ClusterSummary.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ClusterSummary.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class ClusterSummary implements org.apache.thrift.TBase<ClusterSummary, ClusterSummary._Fields>, java.io.Serializable, Cloneable, Comparable<ClusterSummary> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ClusterSummary");
 


[23/37] storm git commit: Minor tweaks to documentation.

Posted by kn...@apache.org.
Minor tweaks to documentation.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/099dc72e
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/099dc72e
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/099dc72e

Branch: refs/heads/master
Commit: 099dc72e6fb20369379fd4f5d715f862fa267f71
Parents: 921db43
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Tue Nov 17 15:24:36 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Tue Nov 17 15:24:36 2015 -0600

----------------------------------------------------------------------
 docs/documentation/Pacemaker.md | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/099dc72e/docs/documentation/Pacemaker.md
----------------------------------------------------------------------
diff --git a/docs/documentation/Pacemaker.md b/docs/documentation/Pacemaker.md
index 5084737..8acbb36 100644
--- a/docs/documentation/Pacemaker.md
+++ b/docs/documentation/Pacemaker.md
@@ -1,6 +1,6 @@
 # Pacemaker
 
-## Intro
+### Intro
 Pacemaker is a storm daemon designed to process heartbeats from workers. As Storm is scaled up, ZooKeeper begins to become a bottleneck due to high volumes of writes from workers doing heartbeats. Lots of writes to disk and traffic across the network is generated as ZooKeeper tries to maintain consistency.
 
 Because heartbeats are of an ephemeral nature, they do not need to be persisted to disk or synced across nodes; an in-memory store will do. This is the role of Pacemaker. Pacemaker functions as a simple in-memory key/value store with ZooKeeper-like, directory-style keys and byte array values.
@@ -9,7 +9,7 @@ The corresponding Pacemaker client is a plugin for the `ClusterState` interface,
 
 ------
 
-## Configuration
+### Configuration
 
  - `pacemaker.host` : The host that the Pacemaker daemon is running on
  - `pacemaker.port` : The port that Pacemaker will listen on
@@ -38,7 +38,7 @@ $ storm pacemaker
 
 The Storm cluster should now be pushing all worker heartbeats through Pacemaker.
 
-## Security
+### Security
 
 Currently digest (password-based) and Kerberos security are supported. Security is currently only around reads, not writes. Writes may be performed by anyone, whereas reads may only be performed by authorized and authenticated users. This is an area for future development, as it leaves the cluster open to DoS attacks, but it prevents any sensitive information from reaching unauthorized eyes, which was the main goal.
 


[03/37] storm git commit: PACEMAKER OPEN SOURCE!

Posted by kn...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/NotAliveException.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/NotAliveException.java b/storm-core/src/jvm/backtype/storm/generated/NotAliveException.java
index cbabcf9..63c3e4c 100644
--- a/storm-core/src/jvm/backtype/storm/generated/NotAliveException.java
+++ b/storm-core/src/jvm/backtype/storm/generated/NotAliveException.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class NotAliveException extends TException implements org.apache.thrift.TBase<NotAliveException, NotAliveException._Fields>, java.io.Serializable, Cloneable, Comparable<NotAliveException> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NotAliveException");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/NullStruct.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/NullStruct.java b/storm-core/src/jvm/backtype/storm/generated/NullStruct.java
index 1b8208c..98dd8b9 100644
--- a/storm-core/src/jvm/backtype/storm/generated/NullStruct.java
+++ b/storm-core/src/jvm/backtype/storm/generated/NullStruct.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class NullStruct implements org.apache.thrift.TBase<NullStruct, NullStruct._Fields>, java.io.Serializable, Cloneable, Comparable<NullStruct> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NullStruct");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/RebalanceOptions.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/RebalanceOptions.java b/storm-core/src/jvm/backtype/storm/generated/RebalanceOptions.java
index d859f5a..b3f916b 100644
--- a/storm-core/src/jvm/backtype/storm/generated/RebalanceOptions.java
+++ b/storm-core/src/jvm/backtype/storm/generated/RebalanceOptions.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class RebalanceOptions implements org.apache.thrift.TBase<RebalanceOptions, RebalanceOptions._Fields>, java.io.Serializable, Cloneable, Comparable<RebalanceOptions> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RebalanceOptions");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/ShellComponent.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ShellComponent.java b/storm-core/src/jvm/backtype/storm/generated/ShellComponent.java
index ab86c6a..8647419 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ShellComponent.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ShellComponent.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class ShellComponent implements org.apache.thrift.TBase<ShellComponent, ShellComponent._Fields>, java.io.Serializable, Cloneable, Comparable<ShellComponent> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ShellComponent");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/SpoutAggregateStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/SpoutAggregateStats.java b/storm-core/src/jvm/backtype/storm/generated/SpoutAggregateStats.java
index a8d6ec7..bc128aa 100644
--- a/storm-core/src/jvm/backtype/storm/generated/SpoutAggregateStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/SpoutAggregateStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class SpoutAggregateStats implements org.apache.thrift.TBase<SpoutAggregateStats, SpoutAggregateStats._Fields>, java.io.Serializable, Cloneable, Comparable<SpoutAggregateStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SpoutAggregateStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/SpoutSpec.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/SpoutSpec.java b/storm-core/src/jvm/backtype/storm/generated/SpoutSpec.java
index 3fc45cf..bb67050 100644
--- a/storm-core/src/jvm/backtype/storm/generated/SpoutSpec.java
+++ b/storm-core/src/jvm/backtype/storm/generated/SpoutSpec.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class SpoutSpec implements org.apache.thrift.TBase<SpoutSpec, SpoutSpec._Fields>, java.io.Serializable, Cloneable, Comparable<SpoutSpec> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SpoutSpec");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/SpoutStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/SpoutStats.java b/storm-core/src/jvm/backtype/storm/generated/SpoutStats.java
index 478143f..d744184 100644
--- a/storm-core/src/jvm/backtype/storm/generated/SpoutStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/SpoutStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class SpoutStats implements org.apache.thrift.TBase<SpoutStats, SpoutStats._Fields>, java.io.Serializable, Cloneable, Comparable<SpoutStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SpoutStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/StateSpoutSpec.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/StateSpoutSpec.java b/storm-core/src/jvm/backtype/storm/generated/StateSpoutSpec.java
index 530b7ca..1e5ffde 100644
--- a/storm-core/src/jvm/backtype/storm/generated/StateSpoutSpec.java
+++ b/storm-core/src/jvm/backtype/storm/generated/StateSpoutSpec.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class StateSpoutSpec implements org.apache.thrift.TBase<StateSpoutSpec, StateSpoutSpec._Fields>, java.io.Serializable, Cloneable, Comparable<StateSpoutSpec> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StateSpoutSpec");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/StormBase.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/StormBase.java b/storm-core/src/jvm/backtype/storm/generated/StormBase.java
index f4af67a..6eed480 100644
--- a/storm-core/src/jvm/backtype/storm/generated/StormBase.java
+++ b/storm-core/src/jvm/backtype/storm/generated/StormBase.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class StormBase implements org.apache.thrift.TBase<StormBase, StormBase._Fields>, java.io.Serializable, Cloneable, Comparable<StormBase> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StormBase");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/StormTopology.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/StormTopology.java b/storm-core/src/jvm/backtype/storm/generated/StormTopology.java
index 9b96fa3..eb74a18 100644
--- a/storm-core/src/jvm/backtype/storm/generated/StormTopology.java
+++ b/storm-core/src/jvm/backtype/storm/generated/StormTopology.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class StormTopology implements org.apache.thrift.TBase<StormTopology, StormTopology._Fields>, java.io.Serializable, Cloneable, Comparable<StormTopology> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StormTopology");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/StreamInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/StreamInfo.java b/storm-core/src/jvm/backtype/storm/generated/StreamInfo.java
index e3b0fdb..55b265a 100644
--- a/storm-core/src/jvm/backtype/storm/generated/StreamInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/StreamInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class StreamInfo implements org.apache.thrift.TBase<StreamInfo, StreamInfo._Fields>, java.io.Serializable, Cloneable, Comparable<StreamInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StreamInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/SubmitOptions.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/SubmitOptions.java b/storm-core/src/jvm/backtype/storm/generated/SubmitOptions.java
index 358468a..1633361 100644
--- a/storm-core/src/jvm/backtype/storm/generated/SubmitOptions.java
+++ b/storm-core/src/jvm/backtype/storm/generated/SubmitOptions.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class SubmitOptions implements org.apache.thrift.TBase<SubmitOptions, SubmitOptions._Fields>, java.io.Serializable, Cloneable, Comparable<SubmitOptions> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SubmitOptions");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java b/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java
index 6d68927..9bcb567 100644
--- a/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class SupervisorInfo implements org.apache.thrift.TBase<SupervisorInfo, SupervisorInfo._Fields>, java.io.Serializable, Cloneable, Comparable<SupervisorInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SupervisorInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/SupervisorSummary.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/SupervisorSummary.java b/storm-core/src/jvm/backtype/storm/generated/SupervisorSummary.java
index 7e36d0f..022ecb4 100644
--- a/storm-core/src/jvm/backtype/storm/generated/SupervisorSummary.java
+++ b/storm-core/src/jvm/backtype/storm/generated/SupervisorSummary.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class SupervisorSummary implements org.apache.thrift.TBase<SupervisorSummary, SupervisorSummary._Fields>, java.io.Serializable, Cloneable, Comparable<SupervisorSummary> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SupervisorSummary");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/ThriftSerializedObject.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ThriftSerializedObject.java b/storm-core/src/jvm/backtype/storm/generated/ThriftSerializedObject.java
index 4b2bc63..e233458 100644
--- a/storm-core/src/jvm/backtype/storm/generated/ThriftSerializedObject.java
+++ b/storm-core/src/jvm/backtype/storm/generated/ThriftSerializedObject.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class ThriftSerializedObject implements org.apache.thrift.TBase<ThriftSerializedObject, ThriftSerializedObject._Fields>, java.io.Serializable, Cloneable, Comparable<ThriftSerializedObject> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ThriftSerializedObject");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/TopologyInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/TopologyInfo.java b/storm-core/src/jvm/backtype/storm/generated/TopologyInfo.java
index 4f78417..c81eac5 100644
--- a/storm-core/src/jvm/backtype/storm/generated/TopologyInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/TopologyInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class TopologyInfo implements org.apache.thrift.TBase<TopologyInfo, TopologyInfo._Fields>, java.io.Serializable, Cloneable, Comparable<TopologyInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TopologyInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java b/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java
index 180b608..ad29c7d 100644
--- a/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class TopologyPageInfo implements org.apache.thrift.TBase<TopologyPageInfo, TopologyPageInfo._Fields>, java.io.Serializable, Cloneable, Comparable<TopologyPageInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TopologyPageInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/TopologyStats.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/TopologyStats.java b/storm-core/src/jvm/backtype/storm/generated/TopologyStats.java
index 0ff01de..ded0010 100644
--- a/storm-core/src/jvm/backtype/storm/generated/TopologyStats.java
+++ b/storm-core/src/jvm/backtype/storm/generated/TopologyStats.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class TopologyStats implements org.apache.thrift.TBase<TopologyStats, TopologyStats._Fields>, java.io.Serializable, Cloneable, Comparable<TopologyStats> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TopologyStats");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java b/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java
index 055a01a..cfa5e24 100644
--- a/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java
+++ b/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class TopologySummary implements org.apache.thrift.TBase<TopologySummary, TopologySummary._Fields>, java.io.Serializable, Cloneable, Comparable<TopologySummary> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TopologySummary");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/generated/WorkerResources.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/WorkerResources.java b/storm-core/src/jvm/backtype/storm/generated/WorkerResources.java
index 7cfadd7..2ab462f 100644
--- a/storm-core/src/jvm/backtype/storm/generated/WorkerResources.java
+++ b/storm-core/src/jvm/backtype/storm/generated/WorkerResources.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-30")
 public class WorkerResources implements org.apache.thrift.TBase<WorkerResources, WorkerResources._Fields>, java.io.Serializable, Cloneable, Comparable<WorkerResources> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("WorkerResources");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java b/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java
index 2149c0d..7ecd770 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java
@@ -17,6 +17,17 @@
  */
 package backtype.storm.messaging.netty;
 
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
+import java.util.Iterator;
+import java.util.Collection;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
+import java.lang.InterruptedException;
+
 import backtype.storm.Config;
 import backtype.storm.messaging.ConnectionWithStatus;
 import backtype.storm.messaging.TaskMessage;
@@ -34,17 +45,11 @@ import org.jboss.netty.util.TimerTask;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.net.InetSocketAddress;
-import java.net.SocketAddress;
+
 import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
 import java.util.List;
-import java.util.Map;
 import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
-import java.util.concurrent.atomic.AtomicReference;
 
 import static com.google.common.base.Preconditions.checkState;
 
@@ -60,7 +65,7 @@ import static com.google.common.base.Preconditions.checkState;
  *     - Note: The current implementation drops any messages that are being enqueued for sending if the connection to
  *       the remote destination is currently unavailable.
  */
-public class Client extends ConnectionWithStatus implements IStatefulObject {
+public class Client extends ConnectionWithStatus implements IStatefulObject, ISaslClient {
     private static final long PENDING_MESSAGES_FLUSH_TIMEOUT_MS = 600000L;
     private static final long PENDING_MESSAGES_FLUSH_INTERVAL_MS = 1000L;
 
@@ -73,7 +78,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
     private final ClientBootstrap bootstrap;
     private final InetSocketAddress dstAddress;
     protected final String dstAddressPrefixedName;
-
+    
     /**
      * The channel used for all write operations from this client to the remote destination.
      */
@@ -104,6 +109,10 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
      */
     private final AtomicLong pendingMessages = new AtomicLong(0);
 
+    /**
+     * Whether the SASL channel is ready.
+     */
+    private final AtomicBoolean saslChannelReady = new AtomicBoolean(false);
 
     /**
      * This flag is set to true if and only if a client instance is being closed.
@@ -125,6 +134,8 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
         this.scheduler = scheduler;
         this.context = context;
         int bufferSize = Utils.getInt(stormConf.get(Config.STORM_MESSAGING_NETTY_BUFFER_SIZE));
+        // if SASL authentication is disabled, saslChannelReady is initialized as true; otherwise false
+        saslChannelReady.set(!Utils.getBoolean(stormConf.get(Config.STORM_MESSAGING_NETTY_AUTHENTICATION), false));
         LOG.info("creating Netty Client, connecting to {}:{}, bufferSize: {}", host, port, bufferSize);
         int messageBatchSize = Utils.getInt(stormConf.get(Config.STORM_NETTY_MESSAGE_BATCH_SIZE), 262144);
 
@@ -134,19 +145,19 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
         retryPolicy = new StormBoundedExponentialBackoffRetry(minWaitMs, maxWaitMs, maxReconnectionAttempts);
 
         // Initiate connection to remote destination
-        bootstrap = createClientBootstrap(factory, bufferSize);
+        bootstrap = createClientBootstrap(factory, bufferSize, stormConf);
         dstAddress = new InetSocketAddress(host, port);
         dstAddressPrefixedName = prefixedName(dstAddress);
         scheduleConnect(NO_DELAY_MS);
         batcher = new MessageBuffer(messageBatchSize);
     }
 
-    private ClientBootstrap createClientBootstrap(ChannelFactory factory, int bufferSize) {
+    private ClientBootstrap createClientBootstrap(ChannelFactory factory, int bufferSize, Map stormConf) {
         ClientBootstrap bootstrap = new ClientBootstrap(factory);
         bootstrap.setOption("tcpNoDelay", true);
         bootstrap.setOption("sendBufferSize", bufferSize);
         bootstrap.setOption("keepAlive", true);
-        bootstrap.setPipelineFactory(new StormClientPipelineFactory(this));
+        bootstrap.setPipelineFactory(new StormClientPipelineFactory(this, stormConf));
         return bootstrap;
     }
 
@@ -158,7 +169,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
     }
 
     /**
-     * We will retry connection with exponential back-off policy
+     * Enqueue a task message to be sent to server
      */
     private void scheduleConnect(long delayMs) {
         scheduler.newTimeout(new Connect(dstAddress), delayMs, TimeUnit.MILLISECONDS);
@@ -190,7 +201,11 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
         } else if (!connectionEstablished(channelRef.get())) {
             return Status.Connecting;
         } else {
-            return Status.Ready;
+            if (saslChannelReady.get()) {
+                return Status.Ready;
+            } else {
+                return Status.Connecting; // need to wait until sasl channel is also ready
+            }
         }
     }
 
@@ -202,7 +217,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
     @Override
     public Iterator<TaskMessage> recv(int flags, int clientId) {
         throw new UnsupportedOperationException("Client connection should not receive any messages");
-    }
+        }
 
     @Override
     public void send(int taskId, byte[] payload) {
@@ -225,7 +240,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
         }
 
         if (!hasMessages(msgs)) {
-            return;
+          return;
         }
 
         Channel channel = getConnectedChannel();
@@ -266,7 +281,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
             // We can rely on `notifyInterestChanged` to push these messages as soon as there is spece in Netty's buffer
             // because we know `Channel.isWritable` was false after the messages were already in the buffer.
         }
-    }
+        }
 
     private Channel getConnectedChannel() {
         Channel channel = channelRef.get();
@@ -281,6 +296,10 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
             }
             return null;
         }
+        }
+
+    public InetSocketAddress getDstAddress() {
+        return dstAddress;
     }
 
     private boolean hasMessages(Iterator<TaskMessage> msgs) {
@@ -292,7 +311,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
         // We consume the iterator by traversing and thus "emptying" it.
         int msgCount = iteratorSize(msgs);
         messagesLost.getAndAdd(msgCount);
-    }
+                    }
 
     private int iteratorSize(Iterator<TaskMessage> msgs) {
         int size = 0;
@@ -300,8 +319,8 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
             while (msgs.hasNext()) {
                 size++;
                 msgs.next();
+                }
             }
-        }
         return size;
     }
 
@@ -335,7 +354,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
             }
 
         });
-    }
+        }
 
     /**
      * Schedule a reconnect if we closed a non-null channel, and acquired the right to
@@ -375,7 +394,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
         long totalPendingMsgs = pendingMessages.get();
         long startMs = System.currentTimeMillis();
         while (pendingMessages.get() != 0) {
-            try {
+        try {
                 long deltaMs = System.currentTimeMillis() - startMs;
                 if (deltaMs > PENDING_MESSAGES_FLUSH_TIMEOUT_MS) {
                     LOG.error("failed to send all pending messages to {} within timeout, {} of {} messages were not " +
@@ -386,11 +405,10 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
             }
             catch (InterruptedException e) {
                 break;
-            }
         }
-
     }
 
+    }
 
     private void closeChannel() {
         Channel channel = channelRef.get();
@@ -402,7 +420,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
 
     @Override
     public Object getState() {
-        LOG.info("Getting metrics for client connection to {}", dstAddressPrefixedName);
+        LOG.debug("Getting metrics for client connection to {}", dstAddressPrefixedName);
         HashMap<String, Object> ret = new HashMap<String, Object>();
         ret.put("reconnects", totalConnectionAttempts.getAndSet(0));
         ret.put("sent", messagesSent.getAndSet(0));
@@ -416,10 +434,28 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
         return ret;
     }
 
-    public Map getStormConf() {
+    public Map getConfig() {
         return stormConf;
     }
 
+    /** ISaslClient interface **/
+    public void channelConnected(Channel channel) {
+//        setChannel(channel);
+        }
+
+    public void channelReady() {
+        saslChannelReady.set(true);
+    }
+
+    public String name() {
+        return (String)stormConf.get(Config.TOPOLOGY_NAME);
+    }
+
+    public String secretKey() {
+        return SaslUtils.getSecretKey(stormConf);
+    }
+    /** end **/
+
     private String srcAddressName() {
         String name = null;
         Channel channel = channelRef.get();
@@ -495,7 +531,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
                                     connectionAttempt);
                             if (messagesLost.get() > 0) {
                                 LOG.warn("Re-connection to {} was successful but {} messages has been lost so far", address.toString(), messagesLost.get());
-                            }
+    }
                         } else {
                             Throwable cause = future.getCause();
                             reschedule(cause);
@@ -510,8 +546,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject {
                 throw new RuntimeException("Giving up to scheduleConnect to " + dstAddressPrefixedName + " after " +
                         connectionAttempts + " failed attempts. " + messagesLost.get() + " messages were lost");
 
-            }
+    }
         }
     }
-
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/Context.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/Context.java b/storm-core/src/jvm/backtype/storm/messaging/netty/Context.java
index 5d27a16..10c5059 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/Context.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/Context.java
@@ -21,10 +21,9 @@ import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
 import org.jboss.netty.util.HashedWheelTimer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.ThreadFactory;
-import java.util.concurrent.TimeUnit;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -55,6 +54,7 @@ public class Context implements IContext {
         int maxWorkers = Utils.getInt(storm_conf.get(Config.STORM_MESSAGING_NETTY_CLIENT_WORKER_THREADS));
 		ThreadFactory bossFactory = new NettyRenameThreadFactory("client" + "-boss");
         ThreadFactory workerFactory = new NettyRenameThreadFactory("client" + "-worker");
+        // TODO investigate impact of having one worker
         if (maxWorkers > 0) {
             clientChannelFactory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(bossFactory),
                     Executors.newCachedThreadPool(workerFactory), maxWorkers);
@@ -103,12 +103,10 @@ public class Context implements IContext {
         for (IConnection conn : connections.values()) {
             conn.close();
         }
-
         connections = null;
 
         //we need to release resources associated with client channel factory
         clientChannelFactory.releaseExternalResources();
-
     }
 
     private String key(String host, int port) {

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/ControlMessage.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/ControlMessage.java b/storm-core/src/jvm/backtype/storm/messaging/netty/ControlMessage.java
index fb3efe6..bffd953 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/ControlMessage.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/ControlMessage.java
@@ -23,7 +23,7 @@ import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.buffer.ChannelBufferOutputStream;
 import org.jboss.netty.buffer.ChannelBuffers;
 
-enum ControlMessage {
+public enum ControlMessage implements INettySerializable {
     CLOSE_MESSAGE((short)-100),
     EOB_MESSAGE((short)-201),
     OK_RESPONSE((short)-200),
@@ -43,14 +43,14 @@ enum ControlMessage {
      * @param encoded
      * @return
      */
-    static ControlMessage mkMessage(short encoded) {
+    public static ControlMessage mkMessage(short encoded) {
         for(ControlMessage cm: ControlMessage.values()) {
           if(encoded == cm.code) return cm;
         }
         return null;
     }
 
-    int encodeLength() {
+    public int encodeLength() {
         return 2; //short
     }
     
@@ -58,14 +58,19 @@ enum ControlMessage {
      * encode the current Control Message into a channel buffer
      * @throws Exception
      */
-    ChannelBuffer buffer() throws IOException {
+    public ChannelBuffer buffer() throws IOException {
         ChannelBufferOutputStream bout = new ChannelBufferOutputStream(ChannelBuffers.directBuffer(encodeLength()));      
         write(bout);
         bout.close();
         return bout.buffer();
     }
 
-    void write(ChannelBufferOutputStream bout) throws IOException {
+    public static ControlMessage read(byte[] serial) {
+        ChannelBuffer cm_buffer = ChannelBuffers.copiedBuffer(serial);
+        return mkMessage(cm_buffer.getShort(0));
+    }
+    
+    public void write(ChannelBufferOutputStream bout) throws IOException {
         bout.writeShort(code);        
     } 
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/INettySerializable.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/INettySerializable.java b/storm-core/src/jvm/backtype/storm/messaging/netty/INettySerializable.java
new file mode 100644
index 0000000..945e6e9
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/INettySerializable.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.messaging.netty;
+
+import java.io.IOException;
+import org.jboss.netty.buffer.ChannelBuffer;
+
+public interface INettySerializable {
+    public ChannelBuffer buffer() throws IOException;
+    public int encodeLength();
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/ISaslClient.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/ISaslClient.java b/storm-core/src/jvm/backtype/storm/messaging/netty/ISaslClient.java
new file mode 100644
index 0000000..57dcfe8
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/ISaslClient.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.messaging.netty;
+
+import org.jboss.netty.channel.Channel;
+import backtype.storm.Config;
+
+public interface ISaslClient {
+    void channelConnected(Channel channel);
+    void channelReady();
+    String name();
+    String secretKey();
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/ISaslServer.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/ISaslServer.java b/storm-core/src/jvm/backtype/storm/messaging/netty/ISaslServer.java
new file mode 100644
index 0000000..4203dcc
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/ISaslServer.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.messaging.netty;
+
+import org.jboss.netty.channel.Channel;
+
+public interface ISaslServer extends IServer {
+    String name();
+    String secretKey();
+    void authenticated(Channel c);
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/IServer.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/IServer.java b/storm-core/src/jvm/backtype/storm/messaging/netty/IServer.java
new file mode 100644
index 0000000..d046492
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/IServer.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.messaging.netty;
+
+import org.jboss.netty.channel.Channel;
+
+public interface IServer {
+    void channelConnected(Channel c);
+    void received(Object message, String remote, Channel channel) throws InterruptedException;
+    void closeChannel(Channel c);
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslClientHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslClientHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslClientHandler.java
new file mode 100644
index 0000000..9ae34fe
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslClientHandler.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.messaging.netty;
+
+import java.io.IOException;
+import java.util.Map;
+import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.channel.ChannelStateEvent;
+import org.jboss.netty.channel.Channels;
+import org.jboss.netty.channel.MessageEvent;
+import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class KerberosSaslClientHandler extends SimpleChannelUpstreamHandler {
+
+    private static final Logger LOG = LoggerFactory
+            .getLogger(KerberosSaslClientHandler.class);
+    private ISaslClient client;
+    long start_time;
+    /** Used for client or server's token to send or receive from each other. */
+    private Map storm_conf;
+    private String jaas_section;
+
+    public KerberosSaslClientHandler(ISaslClient client, Map storm_conf, String jaas_section) throws IOException {
+        this.client = client;
+        this.storm_conf = storm_conf;
+        this.jaas_section = jaas_section;
+        start_time = System.currentTimeMillis();
+    }
+
+    @Override
+    public void channelConnected(ChannelHandlerContext ctx,
+            ChannelStateEvent event) {
+        // register the newly established channel
+        Channel channel = ctx.getChannel();
+        client.channelConnected(channel);
+
+        LOG.info("Connection established from {} to {}",
+                 channel.getLocalAddress(), channel.getRemoteAddress());
+
+        try {
+            KerberosSaslNettyClient saslNettyClient = KerberosSaslNettyClientState.getKerberosSaslNettyClient
+                    .get(channel);
+
+            if (saslNettyClient == null) {
+                LOG.debug("Creating saslNettyClient now for channel: {}",
+                          channel);
+                saslNettyClient = new KerberosSaslNettyClient(storm_conf, jaas_section);
+                KerberosSaslNettyClientState.getKerberosSaslNettyClient.set(channel,
+                        saslNettyClient);
+            }
+            LOG.debug("Going to initiate Kerberos negotiations.");
+            byte[] initialChallenge = saslNettyClient.saslResponse(new SaslMessageToken(new byte[0]));
+            LOG.debug("Sending initial challenge: {}", initialChallenge);
+            channel.write(new SaslMessageToken(initialChallenge));
+        } catch (Exception e) {
+            LOG.error("Failed to authenticate with server due to error: ",
+                      e);
+        }
+        return;
+
+    }
+
+    @Override
+    public void messageReceived(ChannelHandlerContext ctx, MessageEvent event)
+            throws Exception {
+        LOG.debug("send/recv time (ms): {}",
+                (System.currentTimeMillis() - start_time));
+
+        Channel channel = ctx.getChannel();
+
+        // Generate SASL response to server using Channel-local SASL client.
+        KerberosSaslNettyClient saslNettyClient = KerberosSaslNettyClientState.getKerberosSaslNettyClient
+                .get(channel);
+        if (saslNettyClient == null) {
+            throw new Exception("saslNettyClient was unexpectedly null for channel:" + channel);
+        }
+
+        // examine the response message from server
+        if (event.getMessage() instanceof ControlMessage) {
+            ControlMessage msg = (ControlMessage) event.getMessage();
+            if (msg == ControlMessage.SASL_COMPLETE_REQUEST) {
+                LOG.debug("Server has sent us the SaslComplete message. Allowing normal work to proceed.");
+
+                if (!saslNettyClient.isComplete()) {
+                    String message = "Server returned a Sasl-complete message, but as far as we can tell, we are not authenticated yet.";
+                    LOG.error(message);
+                    throw new Exception(message);
+                }
+                ctx.getPipeline().remove(this);
+                this.client.channelReady();
+
+                // We call fireMessageReceived since the client is allowed to
+                // perform this request. The client's request will now proceed
+                // to the next pipeline component namely StormClientHandler.
+                Channels.fireMessageReceived(ctx, msg);
+            }
+            else {
+                LOG.warn("Unexpected control message: {}", msg);
+            }
+            return;
+        }
+        else if (event.getMessage() instanceof SaslMessageToken) {
+            SaslMessageToken saslTokenMessage = (SaslMessageToken) event
+                .getMessage();
+            LOG.debug("Responding to server's token of length: {}",
+                      saslTokenMessage.getSaslToken().length);
+
+            // Generate SASL response (but we only actually send the response if
+            // it's non-null.
+            byte[] responseToServer = saslNettyClient
+                .saslResponse(saslTokenMessage);
+            if (responseToServer == null) {
+                // If we generate a null response, then authentication has completed
+                // (if not, warn), and return without sending a response back to the
+                // server.
+                LOG.debug("Response to server is null: authentication should now be complete.");
+                if (!saslNettyClient.isComplete()) {
+                    LOG.warn("Generated a null response, but authentication is not complete.");
+                    throw new Exception("Our reponse to the server is null, but as far as we can tell, we are not authenticated yet.");
+                }
+                this.client.channelReady();
+                return;
+            } else {
+                LOG.debug("Response to server token has length: {}",
+                          responseToServer.length);
+            }
+            // Construct a message containing the SASL response and send it to the
+            // server.
+            SaslMessageToken saslResponse = new SaslMessageToken(responseToServer);
+            channel.write(saslResponse);
+        }
+        else {
+            LOG.error("Unexpected message from server: {}", event.getMessage());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
new file mode 100644
index 0000000..32afab0
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
@@ -0,0 +1,212 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.messaging.netty;
+
+import backtype.storm.Config;
+import backtype.storm.security.auth.AuthUtils;
+import java.io.IOException;
+import java.security.Principal;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+import java.util.Map;
+import java.util.TreeMap;
+import javax.security.auth.Subject;
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.callback.UnsupportedCallbackException;
+import javax.security.auth.kerberos.KerberosTicket;
+import javax.security.auth.login.Configuration;
+import javax.security.auth.login.LoginException;
+import javax.security.sasl.Sasl;
+import javax.security.sasl.SaslClient;
+import javax.security.sasl.SaslException;
+import org.apache.zookeeper.Login;
+import org.apache.zookeeper.server.auth.KerberosName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Implements SASL logic for storm worker client processes.
+ */
+public class KerberosSaslNettyClient {
+
+    private static final Logger LOG = LoggerFactory
+            .getLogger(KerberosSaslNettyClient.class);
+
+    /**
+     * Used to respond to server's counterpart, SaslServer with SASL tokens
+     * represented as byte arrays.
+     */
+    private SaslClient saslClient;
+    private Subject subject;
+    private String jaas_section;
+    
+    /**
+     * Create a KerberosSaslNettyClient for authentication with servers.
+     */
+    public KerberosSaslNettyClient(Map storm_conf, String jaas_section) {
+        LOG.debug("KerberosSaslNettyClient: Creating SASL {} client to authenticate to server ",
+                  SaslUtils.KERBEROS);
+        
+        LOG.info("Creating Kerberos Client.");
+        
+        Configuration login_conf;
+        try {
+            login_conf = AuthUtils.GetConfiguration(storm_conf);
+        }
+        catch (Throwable t) {
+            LOG.error("Failed to get login_conf: ", t);
+            throw t;
+        }
+        LOG.debug("KerberosSaslNettyClient: authmethod {}", SaslUtils.KERBEROS);
+        
+        SaslClientCallbackHandler ch = new SaslClientCallbackHandler();
+        
+        subject = null;
+        try {
+            LOG.debug("Setting Configuration to login_config: {}", login_conf);
+            //specify a configuration object to be used
+            Configuration.setConfiguration(login_conf); 
+            //now login
+            LOG.debug("Trying to login.");
+            Login login = new Login(jaas_section, ch);
+            subject = login.getSubject();
+            LOG.debug("Got Subject: {}", subject.toString());
+        } catch (LoginException ex) {
+            LOG.error("Client failed to login in principal:" + ex, ex);
+            throw new RuntimeException(ex);
+        }
+        
+        //check the credential of our principal
+        if (subject.getPrivateCredentials(KerberosTicket.class).isEmpty()) { 
+            LOG.error("Failed to verify user principal.");
+            throw new RuntimeException("Fail to verify user principal with section \"" +
+                                       jaas_section +
+                                       "\" in login configuration file " +
+                                       login_conf);
+        }
+
+        String serviceName = null;
+        try {
+            serviceName = AuthUtils.get(login_conf, jaas_section, "serviceName");
+        }
+        catch (IOException e) {
+            LOG.error("Failed to get service name.", e);
+            throw new RuntimeException(e);
+        }
+
+        try {
+            Principal principal = (Principal)subject.getPrincipals().toArray()[0];
+            final String fPrincipalName = principal.getName();
+            KerberosName kerbName = new KerberosName(principal.getName());
+            final String fHost = (String)storm_conf.get(Config.PACEMAKER_HOST);
+            final String fServiceName = serviceName;
+            final CallbackHandler fch = ch;
+            LOG.debug("Kerberos Client with principal: {}, host: {}", fPrincipalName, fHost);
+            saslClient = Subject.doAs(subject, new PrivilegedExceptionAction<SaslClient>() {
+                    public SaslClient run() {
+                        try {
+                            Map<String, String> props = new TreeMap<String,String>();
+                            props.put(Sasl.QOP, "auth");
+                            props.put(Sasl.SERVER_AUTH, "false");
+                            return Sasl.createSaslClient(
+                                new String[] { SaslUtils.KERBEROS },
+                                fPrincipalName,
+                                fServiceName,
+                                fHost,
+                                props, fch);
+                        }
+                        catch (Exception e) {
+                            LOG.error("Subject failed to create sasl client.", e);
+                            return null;
+                        }
+                    }
+                });
+            LOG.info("Got Client: {}", saslClient);
+            
+        } catch (PrivilegedActionException e) {
+            LOG.error("KerberosSaslNettyClient: Could not create Sasl Netty Client.");
+            throw new RuntimeException(e);
+        }
+}
+
+    public boolean isComplete() {
+        return saslClient.isComplete();
+    }
+
+    /**
+     * Respond to server's SASL token.
+     * 
+     * @param saslTokenMessage
+     *            contains server's SASL token
+     * @return client's response SASL token
+     */
+    public byte[] saslResponse(SaslMessageToken saslTokenMessage) {
+        try {
+            final SaslMessageToken fSaslTokenMessage = saslTokenMessage;
+            byte [] retval = Subject.doAs(subject, new PrivilegedExceptionAction<byte[]>() {
+                    public byte[] run() {
+                        try {
+                            byte[] retval = saslClient.evaluateChallenge(fSaslTokenMessage
+                                                                         .getSaslToken());
+                            return retval;
+                        } catch (SaslException e) {
+                            LOG.error("saslResponse: Failed to respond to SASL server's token:",
+                                      e);
+                            throw new RuntimeException(e);
+                        }
+                    }
+                });
+            return retval;
+        }
+        catch (PrivilegedActionException e) {
+            LOG.error("Failed to generate response for token: ", e);
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * Implementation of javax.security.auth.callback.CallbackHandler that works
+     * with Storm topology tokens.
+     */
+    private static class SaslClientCallbackHandler implements CallbackHandler {
+
+        /**
+         * Set private members using topology token.
+         * 
+         * @param topologyToken
+         */
+        public SaslClientCallbackHandler() {
+        }
+
+        /**
+         * Implementation used to respond to SASL tokens from server.
+         * 
+         * @param callbacks
+         *            objects that indicate what credential information the
+         *            server's SaslServer requires from the client.
+         * @throws UnsupportedCallbackException
+         */
+        public void handle(Callback[] callbacks) throws UnsupportedCallbackException {
+            for (Callback callback : callbacks) {
+                LOG.info("Kerberos Client Callback Handler got callback: {}", callback.getClass());
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClientState.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClientState.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClientState.java
new file mode 100644
index 0000000..1283d9b
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClientState.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.messaging.netty;
+
+import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelLocal;
+
+final class KerberosSaslNettyClientState {
+
+	public static final ChannelLocal<KerberosSaslNettyClient> getKerberosSaslNettyClient = new ChannelLocal<KerberosSaslNettyClient>() {
+		protected KerberosSaslNettyClient initialValue(Channel channel) {
+			return null;
+		}
+	};
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServer.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServer.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServer.java
new file mode 100644
index 0000000..a0003c6
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServer.java
@@ -0,0 +1,223 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.messaging.netty;
+
+import backtype.storm.Config;
+import backtype.storm.security.auth.AuthUtils;
+import backtype.storm.security.auth.KerberosPrincipalToLocal;
+import java.io.IOException;
+import java.security.Principal;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import javax.security.auth.Subject;
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.callback.UnsupportedCallbackException;
+import javax.security.auth.kerberos.KerberosPrincipal;
+import javax.security.auth.kerberos.KerberosTicket;
+import javax.security.auth.login.Configuration;
+import javax.security.auth.login.LoginException;
+import javax.security.sasl.AuthorizeCallback;
+import javax.security.sasl.Sasl;
+import javax.security.sasl.SaslException;
+import javax.security.sasl.SaslServer;
+import org.apache.zookeeper.Login;
+import org.apache.zookeeper.server.auth.KerberosName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+class KerberosSaslNettyServer {
+
+    private static final Logger LOG = LoggerFactory
+        .getLogger(KerberosSaslNettyServer.class);
+
+    private SaslServer saslServer;
+    private Subject subject;
+    private String jaas_section;
+    private List<String> authorizedUsers;
+    
+    KerberosSaslNettyServer(Map storm_conf, String jaas_section, List<String> authorizedUsers) {
+        this.authorizedUsers = authorizedUsers;
+        LOG.debug("Getting Configuration.");
+        Configuration login_conf;
+        try {
+            login_conf = AuthUtils.GetConfiguration(storm_conf);
+        }
+        catch (Throwable t) {
+            LOG.error("Failed to get login_conf: ", t);
+            throw t;
+        }
+            
+        LOG.debug("KerberosSaslNettyServer: authmethod {}", SaslUtils.KERBEROS);
+
+        KerberosSaslCallbackHandler ch = new KerberosSaslNettyServer.KerberosSaslCallbackHandler(storm_conf, authorizedUsers);
+        
+        //login our principal
+        subject = null;
+        try {
+            LOG.debug("Setting Configuration to login_config: {}", login_conf);
+            //specify a configuration object to be used
+            Configuration.setConfiguration(login_conf); 
+            //now login
+            LOG.debug("Trying to login.");
+            Login login = new Login(jaas_section, ch);
+            subject = login.getSubject();
+            LOG.debug("Got Subject: {}", subject.toString());
+        } catch (LoginException ex) {
+            LOG.error("Server failed to login in principal:", ex);
+            throw new RuntimeException(ex);
+        }
+        
+        //check the credential of our principal
+        if (subject.getPrivateCredentials(KerberosTicket.class).isEmpty()) { 
+            LOG.error("Failed to verifyuser principal.");
+            throw new RuntimeException("Fail to verify user principal with section \""
+                                       + jaas_section
+                                       + "\" in login configuration file "
+                                       + login_conf);
+        }
+
+        try {    
+            LOG.info("Creating Kerberos Server.");
+            final CallbackHandler fch = ch;
+            Principal p = (Principal)subject.getPrincipals().toArray()[0];
+            KerberosName kName = new KerberosName(p.getName());
+            final String fHost = kName.getHostName();
+            final String fServiceName = kName.getServiceName();
+            LOG.debug("Server with host: {}", fHost);
+            saslServer =
+                Subject.doAs(subject, new PrivilegedExceptionAction<SaslServer>() {
+                        public SaslServer run() {
+                            try {
+                                Map<String, String> props = new TreeMap<String,String>();
+                                props.put(Sasl.QOP, "auth");
+                                props.put(Sasl.SERVER_AUTH, "false");
+                                return Sasl.createSaslServer(SaslUtils.KERBEROS,
+                                                             fServiceName,
+                                                             fHost, props, fch);
+                            }
+                            catch (Exception e) {
+                                LOG.error("Subject failed to create sasl server.", e);
+                                return null;
+                            }
+                        }
+                    });
+            LOG.info("Got Server: {}", saslServer);
+                 
+        } catch (PrivilegedActionException e) {
+            LOG.error("KerberosSaslNettyServer: Could not create SaslServer: ", e);
+            throw new RuntimeException(e);
+        }
+    }
+
+    public boolean isComplete() {
+        return saslServer.isComplete();
+    }
+
+    public String getUserName() {
+        return saslServer.getAuthorizationID();
+    }
+
+    private String getPrincipal(Subject subject) {
+        Set<Principal> principals = (Set<Principal>)subject.getPrincipals();
+        if (principals==null || principals.size()<1) {
+            LOG.info("No principal found in login subject");
+            return null;
+        }
+        return ((Principal)(principals.toArray()[0])).getName();
+    }
+
+    /** CallbackHandler for SASL DIGEST-MD5 mechanism */
+    public static class KerberosSaslCallbackHandler implements CallbackHandler {
+
+        /** Used to authenticate the clients */
+        private Map config;
+        private List<String> authorizedUsers;
+
+        public KerberosSaslCallbackHandler(Map config, List<String> authorizedUsers) {
+            LOG.debug("KerberosSaslCallback: Creating KerberosSaslCallback handler.");
+            this.config = config;
+            this.authorizedUsers = authorizedUsers;
+        }
+
+        @Override
+        public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
+            for (Callback callback : callbacks) {
+                LOG.info("Kerberos Callback Handler got callback: {}", callback.getClass());
+                if(callback instanceof AuthorizeCallback) {
+                    AuthorizeCallback ac = (AuthorizeCallback)callback;
+                    if(!ac.getAuthenticationID().equals(ac.getAuthorizationID())) {
+                        LOG.debug("{} != {}", ac.getAuthenticationID(), ac.getAuthorizationID());
+                        continue;
+                    }
+
+                    LOG.debug("Authorized Users: {}", authorizedUsers);
+                    LOG.debug("Checking authorization for: {}", ac.getAuthorizationID());
+                    for(String user : authorizedUsers) {
+                        String requester = ac.getAuthorizationID();
+
+                        KerberosPrincipal principal = new KerberosPrincipal(requester);
+                        requester = new KerberosPrincipalToLocal().toLocal(principal);
+
+                        if(requester.equals(user) ) {
+                            ac.setAuthorized(true);
+                            break;
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    /**
+     * Used by SaslTokenMessage::processToken() to respond to server SASL
+     * tokens.
+     * 
+     * @param token
+     *            Server's SASL token
+     * @return token to send back to the server.
+     */
+    public byte[] response(final byte[] token) {
+        try {
+            byte [] retval = Subject.doAs(subject, new PrivilegedExceptionAction<byte[]>() {
+                    public byte[] run(){
+                        try {
+                            LOG.debug("response: Responding to input token of length: {}",
+                                      token.length);
+                            byte[] retval = saslServer.evaluateResponse(token);
+                            return retval;
+                        } catch (SaslException e) {
+                            LOG.error("response: Failed to evaluate client token of length: {} : {}",
+                                      token.length, e);
+                            throw new RuntimeException(e);
+                        }
+                    }
+                });
+            return retval;
+        }
+        catch (PrivilegedActionException e) {
+            LOG.error("Failed to generate response for token: ", e);
+            throw new RuntimeException(e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServerState.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServerState.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServerState.java
new file mode 100644
index 0000000..064dc91
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServerState.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.messaging.netty;
+
+import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelLocal;
+
+final class KerberosSaslNettyServerState {
+
+    public static final ChannelLocal<KerberosSaslNettyServer> getKerberosSaslNettyServer = new ChannelLocal<KerberosSaslNettyServer>() {
+            protected KerberosSaslNettyServer initialValue(Channel channel) {
+                return null;
+            }
+	};
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslServerHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslServerHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslServerHandler.java
new file mode 100644
index 0000000..3ed3fd7
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslServerHandler.java
@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.messaging.netty;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.channel.Channels;
+import org.jboss.netty.channel.ExceptionEvent;
+import org.jboss.netty.channel.MessageEvent;
+import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class KerberosSaslServerHandler extends SimpleChannelUpstreamHandler {
+
+    ISaslServer server;
+    /** Used for client or server's token to send or receive from each other. */
+    private Map storm_conf;
+    private String jaas_section;
+    private List<String> authorizedUsers;
+    
+    private static final Logger LOG = LoggerFactory
+            .getLogger(KerberosSaslServerHandler.class);
+
+    public KerberosSaslServerHandler(ISaslServer server, Map storm_conf, String jaas_section, List<String> authorizedUsers) throws IOException {
+        this.server = server;
+        this.storm_conf = storm_conf;
+        this.jaas_section = jaas_section;
+        this.authorizedUsers = authorizedUsers;
+    }
+
+    @Override
+    public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)
+            throws Exception {
+        Object msg = e.getMessage();
+        if (msg == null)
+            return;
+
+        Channel channel = ctx.getChannel();
+
+        
+        if (msg instanceof SaslMessageToken) {
+            // initialize server-side SASL functionality, if we haven't yet
+            // (in which case we are looking at the first SASL message from the
+            // client).
+            try {
+                LOG.debug("Got SaslMessageToken!");
+
+                KerberosSaslNettyServer saslNettyServer = KerberosSaslNettyServerState.getKerberosSaslNettyServer
+                    .get(channel);
+                if (saslNettyServer == null) {
+                    LOG.debug("No saslNettyServer for {}  yet; creating now, with topology token: ", channel);
+                    try {
+                        saslNettyServer = new KerberosSaslNettyServer(storm_conf, jaas_section, authorizedUsers);
+                    } catch (RuntimeException ioe) {
+                        LOG.error("Error occurred while creating saslNettyServer on server {} for client {}", 
+                                  channel.getLocalAddress(), channel.getRemoteAddress());
+                        saslNettyServer = null;
+                    }
+                    
+                    KerberosSaslNettyServerState.getKerberosSaslNettyServer.set(channel,
+                                                                                saslNettyServer);
+                } else {
+                    LOG.debug("Found existing saslNettyServer on server: {} for client {}",
+                              channel.getLocalAddress(), channel.getRemoteAddress());
+                }
+
+                byte[] responseBytes = saslNettyServer.response(((SaslMessageToken) msg)
+                                                                .getSaslToken());
+                    
+                SaslMessageToken saslTokenMessageRequest = new SaslMessageToken(responseBytes);
+
+                if(saslTokenMessageRequest.getSaslToken() == null) {
+                    channel.write(ControlMessage.SASL_COMPLETE_REQUEST);
+                }
+                else {   
+                    // Send response to client.
+                    channel.write(saslTokenMessageRequest);
+                }
+                    
+                if (saslNettyServer.isComplete()) {
+                    // If authentication of client is complete, we will also send a
+                    // SASL-Complete message to the client.
+                    LOG.info("SASL authentication is complete for client with username: {}",
+                             saslNettyServer.getUserName());
+                    channel.write(ControlMessage.SASL_COMPLETE_REQUEST);
+                    LOG.debug("Removing SaslServerHandler from pipeline since SASL authentication is complete.");
+                    ctx.getPipeline().remove(this);
+                    server.authenticated(channel);
+                }
+                return;
+            }
+            catch (Exception ex) {
+                LOG.error("Failed to handle SaslMessageToken: ", ex);
+                throw ex;
+            }
+        } else {
+            // Client should not be sending other-than-SASL messages before
+            // SaslServerHandler has removed itself from the pipeline. Such
+            // non-SASL requests will be denied by the Authorize channel handler
+            // (the next handler upstream in the server pipeline) if SASL
+            // authentication has not completed.
+            LOG.warn("Sending upstream an unexpected non-SASL message : {}",
+                     msg);
+            Channels.fireMessageReceived(ctx, msg);
+        }
+    }
+
+    @Override
+    public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) {
+        if(server != null) server.closeChannel(e.getChannel());
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/MessageDecoder.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/MessageDecoder.java b/storm-core/src/jvm/backtype/storm/messaging/netty/MessageDecoder.java
index 7d8bf54..8c99e78 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/MessageDecoder.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/MessageDecoder.java
@@ -70,7 +70,7 @@ public class MessageDecoder extends FrameDecoder {
             }
             
             //case 2: SaslTokenMessageRequest
-            if(code==-500) {
+            if(code == SaslMessageToken.IDENTIFIER) {
             	// Make sure that we have received at least an integer (length) 
                 if (buf.readableBytes() < 4) {
                     //need more data
@@ -142,4 +142,4 @@ public class MessageDecoder extends FrameDecoder {
             return ret;
         }
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/NettyRenameThreadFactory.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/NettyRenameThreadFactory.java b/storm-core/src/jvm/backtype/storm/messaging/netty/NettyRenameThreadFactory.java
index 3a91a58..2a1cdea 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/NettyRenameThreadFactory.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/NettyRenameThreadFactory.java
@@ -33,8 +33,9 @@ public class NettyRenameThreadFactory  implements ThreadFactory {
     final ThreadGroup group;
     final AtomicInteger index = new AtomicInteger(1);
     final String name;
+    static final NettyUncaughtExceptionHandler uncaughtExceptionHandler = new NettyUncaughtExceptionHandler();
 
-    NettyRenameThreadFactory(String name) {
+    public NettyRenameThreadFactory(String name) {
         SecurityManager s = System.getSecurityManager();
         group = (s != null)? s.getThreadGroup() :
                              Thread.currentThread().getThreadGroup();
@@ -43,10 +44,13 @@ public class NettyRenameThreadFactory  implements ThreadFactory {
 
     public Thread newThread(Runnable r) {
         Thread t = new Thread(group, r, name + "-" + index.getAndIncrement(), 0);
-        if (t.isDaemon())
+        if (t.isDaemon()) {
             t.setDaemon(false);
-        if (t.getPriority() != Thread.NORM_PRIORITY)
+        }
+        if (t.getPriority() != Thread.NORM_PRIORITY) {
             t.setPriority(Thread.NORM_PRIORITY);
+        }
+        t.setUncaughtExceptionHandler(uncaughtExceptionHandler);
         return t;
     }
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/NettyUncaughtExceptionHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/NettyUncaughtExceptionHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/NettyUncaughtExceptionHandler.java
new file mode 100644
index 0000000..3d31544
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/NettyUncaughtExceptionHandler.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.messaging.netty;
+
+import backtype.storm.utils.Utils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class NettyUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler {
+  private static final Logger LOG = LoggerFactory.getLogger(NettyUncaughtExceptionHandler.class);
+  @Override
+  public void uncaughtException(Thread t, Throwable e) {
+    try {
+      Utils.handleUncaughtException(e);
+    } catch (Error error) {
+      LOG.info("Received error in netty thread.. terminating server...");
+      Runtime.getRuntime().exit(1);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/444ec05e/storm-core/src/jvm/backtype/storm/messaging/netty/SaslMessageToken.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslMessageToken.java b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslMessageToken.java
index d0d3ca1..2fe5c2d 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/SaslMessageToken.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/SaslMessageToken.java
@@ -17,6 +17,7 @@
  */
 package backtype.storm.messaging.netty;
 
+import java.io.IOException;
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.buffer.ChannelBufferOutputStream;
 import org.jboss.netty.buffer.ChannelBuffers;
@@ -26,7 +27,10 @@ import org.slf4j.LoggerFactory;
 /**
  * Send and receive SASL tokens.
  */
-public class SaslMessageToken {
+
+public class SaslMessageToken implements INettySerializable {
+    public static final short IDENTIFIER = -500;
+
     /** Class logger */
     private static final Logger LOG = LoggerFactory
             .getLogger(SaslMessageToken.class);
@@ -69,7 +73,8 @@ public class SaslMessageToken {
         this.token = token;
     }
 
-    int encodeLength() {
+
+    public int encodeLength() {
         return 2 + 4 + token.length;
     }
 
@@ -80,15 +85,15 @@ public class SaslMessageToken {
      * 
      * @throws Exception
      */
-    ChannelBuffer buffer() throws Exception {
+    public ChannelBuffer buffer() throws IOException {
         ChannelBufferOutputStream bout = new ChannelBufferOutputStream(
                 ChannelBuffers.directBuffer(encodeLength()));
-        short identifier = -500;
         int payload_len = 0;
         if (token != null)
             payload_len = token.length;
 
-        bout.writeShort((short) identifier);
+
+        bout.writeShort(IDENTIFIER);
         bout.writeInt((int) payload_len);
         if (payload_len > 0) {
             bout.write(token);
@@ -96,4 +101,16 @@ public class SaslMessageToken {
         bout.close();
         return bout.buffer();
     }
+    
+    public static SaslMessageToken read(byte[] serial) {
+        ChannelBuffer sm_buffer = ChannelBuffers.copiedBuffer(serial);
+        short identifier = sm_buffer.readShort();
+        int payload_len = sm_buffer.readInt();
+        if(identifier != -500) {
+            return null;
+        }
+        byte token[] = new byte[payload_len];
+        sm_buffer.readBytes(token, 0, payload_len);
+        return new SaslMessageToken(token);
+    }
 }


[35/37] storm git commit: Fixing nits.

Posted by kn...@apache.org.
Fixing nits.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/e7a8c94c
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/e7a8c94c
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/e7a8c94c

Branch: refs/heads/master
Commit: e7a8c94c94d22337dacc65de8ccd6b18809bec63
Parents: 9bfb26c
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Mon Nov 23 14:38:32 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Mon Nov 23 14:38:32 2015 -0600

----------------------------------------------------------------------
 storm-core/src/clj/backtype/storm/util.clj                   | 2 +-
 .../src/jvm/backtype/storm/security/auth/AuthUtils.java      | 8 +++-----
 2 files changed, 4 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/e7a8c94c/storm-core/src/clj/backtype/storm/util.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/util.clj b/storm-core/src/clj/backtype/storm/util.clj
index 0fab3e7..8835572 100644
--- a/storm-core/src/clj/backtype/storm/util.clj
+++ b/storm-core/src/clj/backtype/storm/util.clj
@@ -1067,7 +1067,7 @@
   [retries task-description f & args]
   (let [res (try {:value (apply f args)}
               (catch Exception e
-                (if (= 0 retries)
+                (if (<= 0 retries)
                   (throw e)
                   {:exception e})))]
     (if (:exception res)

http://git-wip-us.apache.org/repos/asf/storm/blob/e7a8c94c/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java b/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
index 429c712..6f6969e 100644
--- a/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
+++ b/storm-core/src/jvm/backtype/storm/security/auth/AuthUtils.java
@@ -323,8 +323,7 @@ public class AuthUtils {
             Map<String, ?> results = AuthUtils.PullConfig(login_config, config_section);
             username = (String)results.get(USERNAME);
             password = (String)results.get(PASSWORD);
-        }
-        catch (Exception e) {
+        } catch (Exception e) {
             LOG.error("Failed to pull username/password out of jaas conf", e);
         }
 
@@ -338,11 +337,10 @@ public class AuthUtils {
 
             StringBuilder builder = new StringBuilder();
             for(byte b : output) {
-            builder.append(String.format("%02x", b));
+                builder.append(String.format("%02x", b));
             }
             return builder.toString();
-        }
-        catch(java.security.NoSuchAlgorithmException e) {
+        } catch (java.security.NoSuchAlgorithmException e) {
             LOG.error("Cant run SHA-512 digest. Algorithm not available.", e);
             throw new RuntimeException(e);
         }


[12/37] storm git commit: Addressing some of the comments on the pull request.

Posted by kn...@apache.org.
Addressing some of the comments on the pull request.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/2a782ce7
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/2a782ce7
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/2a782ce7

Branch: refs/heads/master
Commit: 2a782ce746565561420b60159feebef84761796f
Parents: d59e936
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Mon Nov 9 16:26:57 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Mon Nov 9 16:26:57 2015 -0600

----------------------------------------------------------------------
 conf/defaults.yaml                              |   1 -
 .../cluster_state/zookeeper_state_factory.clj   |  27 +--
 .../org/apache/storm/pacemaker/pacemaker.clj    |  11 +-
 storm-core/src/jvm/backtype/storm/Config.java   |  14 +-
 .../backtype/storm/cluster/ClusterState.java    | 170 ++++++++++++++++++-
 .../pacemaker/codec/ThriftNettyServerCodec.java |   6 +-
 6 files changed, 195 insertions(+), 34 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/2a782ce7/conf/defaults.yaml
----------------------------------------------------------------------
diff --git a/conf/defaults.yaml b/conf/defaults.yaml
index c6fd2e9..d92b140 100644
--- a/conf/defaults.yaml
+++ b/conf/defaults.yaml
@@ -49,7 +49,6 @@ storm.auth.simple-white-list.users: []
 storm.auth.simple-acl.users: []
 storm.auth.simple-acl.users.commands: []
 storm.auth.simple-acl.admins: []
-#storm.cluster.state.store: "org.apache.storm.pacemaker.pacemaker_state_factory"
 storm.cluster.state.store: "backtype.storm.cluster_state.zookeeper_state_factory"
 storm.meta.serialization.delegate: "backtype.storm.serialization.GzipThriftSerializationDelegate"
 storm.codedistributor.class: "backtype.storm.codedistributor.LocalFileSystemCodeDistributor"

http://git-wip-us.apache.org/repos/asf/storm/blob/2a782ce7/storm-core/src/clj/backtype/storm/cluster_state/zookeeper_state_factory.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/cluster_state/zookeeper_state_factory.clj b/storm-core/src/clj/backtype/storm/cluster_state/zookeeper_state_factory.clj
index 3e656b2..ca45ec4 100644
--- a/storm-core/src/clj/backtype/storm/cluster_state/zookeeper_state_factory.clj
+++ b/storm-core/src/clj/backtype/storm/cluster_state/zookeeper_state_factory.clj
@@ -15,6 +15,7 @@
 ;; limitations under the License.
 
 (ns backtype.storm.cluster-state.zookeeper-state-factory
+  (:import [org.apache.curator.framework.state ConnectionStateListener])
   (:import [org.apache.zookeeper KeeperException KeeperException$NoNodeException ZooDefs ZooDefs$Ids ZooDefs$Perms]
            [backtype.storm.cluster ClusterState ClusterStateContext DaemonType])
   (:use [backtype.storm cluster config log util])
@@ -95,7 +96,7 @@
      (set-worker-hb
        [this path data acls]
        (.set_data this path data acls))
-     
+
      (delete-node
        [this path]
        (zk/delete-node zk-writer path))
@@ -103,7 +104,7 @@
      (delete-worker-hb
        [this path]
        (.delete_node this path))
-     
+
      (get-data
        [this path watch?]
        (zk/get-data zk-reader path watch?))
@@ -112,14 +113,14 @@
        [this path watch?]
        (zk/get-data-with-version zk-reader path watch?))
 
-     (get-version 
+     (get-version
        [this path watch?]
        (zk/get-version zk-reader path watch?))
 
      (get-worker-hb
        [this path watch?]
        (.get_data this path watch?))
-     
+
      (get-children
        [this path watch?]
        (zk/get-children zk-reader path watch?))
@@ -127,7 +128,7 @@
      (get-worker-hb-children
        [this path watch?]
        (.get_children this path watch?))
-     
+
      (mkdirs
        [this path acls]
        (zk/mkdirs zk-writer path acls))
@@ -137,13 +138,17 @@
        (zk/exists-node? zk-reader path watch?))
 
      (add-listener
-        [this listener]
-        (zk/add-listener zk-reader listener))
+       [this listener]
+       (let [curator-listener (reify ConnectionStateListener
+                                (stateChanged
+                                  [this client newState]
+                                  (.stateChanged listener client newState)))]
+         (zk/add-listener zk-reader curator-listener)))
+
+     (sync-path
+       [this path]
+       (zk/sync-path zk-writer path))
 
-      (sync-path
-        [this path]
-        (zk/sync-path zk-writer path))
-     
      (close
        [this]
        (reset! active false)

http://git-wip-us.apache.org/repos/asf/storm/blob/2a782ce7/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj b/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
index 1b2ad1b..3770229 100644
--- a/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
+++ b/storm-core/src/clj/org/apache/storm/pacemaker/pacemaker.clj
@@ -27,16 +27,6 @@
   (:require [clojure.java.jmx :as jmx])
   (:gen-class))
 
-;; This is the old Thrift service that this server is emulating.
-;  void createPath(1: string path) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
-;  bool exists(1: string path) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
-;  void sendPulse(1: Pulse pulse) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
-;  HBRecords getAllPulseForPath(1: string idPrefix) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
-;  HBNodes getAllNodesForPath(1: string idPrefix) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
-;  Pulse getPulse(1: string id) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
-;  void deletePath(1: string idPrefix) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
-;  void deletePulseId(1: string id) throws (1: HBExecutionException e, 2: HBAuthorizationException aze);
-
 
 ;; Stats Functions
 
@@ -105,6 +95,7 @@
     last-five-s)
    "org.apache.storm.pacemaker.pacemaker:stats=Stats_Last_5_Seconds"))
 
+
 ;; Pacemaker Functions
 
 (defn hb-data [conf]

http://git-wip-us.apache.org/repos/asf/storm/blob/2a782ce7/storm-core/src/jvm/backtype/storm/Config.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/Config.java b/storm-core/src/jvm/backtype/storm/Config.java
index 0a00ee9..fdbb65f 100644
--- a/storm-core/src/jvm/backtype/storm/Config.java
+++ b/storm-core/src/jvm/backtype/storm/Config.java
@@ -418,6 +418,13 @@ public class Config extends HashMap<String, Object> {
     public static final String NIMBUS_SUPERVISOR_USERS = "nimbus.supervisor.users";
 
     /**
+     * This is the user that the Nimbus daemon process is running as. May be used when security
+     * is enabled to authorize actions in the cluster.
+     */
+    @isString
+    public static final String NIMBUS_DAEMON_USER = "nimbus.daemon.user";
+
+    /**
      * The maximum buffer size thrift should use when reading messages.
      */
     @isInteger
@@ -798,13 +805,6 @@ public class Config extends HashMap<String, Object> {
      */
     @isString
     public static final String PACEMAKER_AUTH_METHOD = "pacemaker.auth.method";
-
-    /**
-     * These are the kerberos users who are authorized to read hearbeats from
-     * Pacemaker.
-     */
-    @isStringList
-    public static final String PACEMAKER_KERBEROS_USERS = "pacemaker.kerberos.users";
     
     /**
      * List of DRPC servers so that the DRPCSpout knows who to talk to.

http://git-wip-us.apache.org/repos/asf/storm/blob/2a782ce7/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java b/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
index 638d905..e0ddc06 100644
--- a/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
+++ b/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
@@ -22,24 +22,188 @@ import clojure.lang.IFn;
 import java.util.List;
 import org.apache.zookeeper.data.ACL;
 
+/**
+ * ClusterState provides the API for the pluggable state store used by the
+ * Storm daemons. Data is stored in path/value format, and the store supports
+ * listing sub-paths at a given path.
+ * All data should be available across all nodes with eventual consistency.
+ *
+ * IMPORTANT NOTE:
+ * Heartbeats have different api calls used to interact with them. The root
+ * path (/) may or may not be the same as the root path for the other api calls.
+ *
+ * For example, performing these two calls:
+ *     set_data("/path", data, acls);
+ *     void set_worker_hb("/path", heartbeat, acls);
+ * may or may not cause a collision in "/path".
+ * Never use the same paths with the *_hb* methods as you do with the others.
+ */
 public interface ClusterState {
+
+    /**
+     * Registers a callback function that gets called when CuratorEvents happen.
+     * @param callback is a clojure IFn that accepts the type - translated to
+     * clojure keyword as in zookeeper.clj - and the path: (callback type path)
+     * @return is an id that can be passed to unregister(...) to unregister the
+     * callback.
+     */
+    String register(IFn callback);
+
+    /**
+     * Unregisters a callback function that was registered with register(...).
+     * @param id is the String id that was returned from register(...).
+     */
     void unregister(String id);
-    void create_sequential(String path, byte[] data, List<ACL> acls);
-    void mkdirs(String path, List<ACL> acls);
+
+    /**
+     * Path will be appended with a monotonically increasing integer, a new node
+     * will be created there, and data will be put at that node.
+     * @param path The path that the monotonically increasing integer suffix will
+     * be added to.
+     * @param data The data that will be written at the suffixed path's node.
+     * @param acls The acls to apply to the path. May be null.
+     * @return The path with the integer suffix appended.
+     */
+    String create_sequential(String path, byte[] data, List<ACL> acls);
+
+    /**
+     * Creates nodes for path and all its parents. Path elements are separated by
+     * a "/", as in *nix filesystem notation. Equivalent to mkdir -p in *nix.
+     * @param path The path to create, along with all its parents.
+     * @param acls The acls to apply to the path. May be null.
+     * @return path
+     */
+    String mkdirs(String path, List<ACL> acls);
+
+    /**
+     * Deletes the node at a given path, and any child nodes that may exist.
+     * @param path The path to delete
+     */
     void delete_node(String path);
+
+    /**
+     * Creates an ephemeral node at path. Ephemeral nodes are destroyed
+     * by the store when the client disconnects.
+     * @param path The path where a node will be created.
+     * @param data The data to be written at the node.
+     * @param acls The acls to apply to the path. May be null.
+     * @return path
+     */
     void set_ephemeral_node(String path, byte[] data, List<ACL> acls);
+
+    /**
+     * Gets the 'version' of the node at a path. Optionally sets a watch
+     * on that node. The version should increase whenever a write happens.
+     * @param path The path to get the version of.
+     * @param watch Whether or not to set a watch on the path. Watched paths
+     * emit events which are consumed by functions registered with the
+     * register method. Very useful for catching updates to nodes.
+     * @return The integer version of this node.
+     */
     Integer get_version(String path, boolean watch);
+
+    /**
+     * Check if a node exists and optionally set a watch on the path.
+     * @param path The path to check for the existence of a node.
+     * @param watch Whether or not to set a watch on the path. Watched paths
+     * emit events which are consumed by functions registered with the
+     * register method. Very useful for catching updates to nodes.
+     * @return Whether or not a node exists at path.
+     */
     boolean node_exists(String path, boolean watch);
+
+    /**
+     * Get a list of paths of all the child nodes which exist immediately
+     * under path.
+     * @param path The path to look under
+     * @param watch Whether or not to set a watch on the path. Watched paths
+     * emit events which are consumed by functions registered with the
+     * register method. Very useful for catching updates to nodes.
+     * @return list of string paths under path.
+     */
     List<String> get_children(String path, boolean watch);
+
+    /**
+     * Close the connection to the data store.
+     */
     void close();
+
+    /**
+     * Set the value of the node at path to data.
+     * @param path The path whose node we want to set.
+     * @param data The data to put in the node.
+     * @param acls The acls to apply to the path. May be null.
+     */
     void set_data(String path, byte[] data, List<ACL> acls);
-    String register(IFn callback);
+
+    /**
+     * Get the data from the node at path
+     * @param path The path to look under
+     * @param watch Whether or not to set a watch on the path. Watched paths
+     * emit events which are consumed by functions registered with the
+     * register method. Very useful for catching updates to nodes.
+     * @return The data at the node.
+     */
     byte[] get_data(String path, boolean watch);
+
+    /**
+     * Get the data at the node along with its version. Data is returned
+     * in an APersistentMap with clojure keyword keys :data and :version.
+     * @param path The path to look under
+     * @param watch Whether or not to set a watch on the path. Watched paths
+     * emit events which are consumed by functions registered with the
+     * register method. Very useful for catching updates to nodes.
+     * @return An APersistentMap in the form {:data data :version version}
+     */
     APersistentMap get_data_with_version(String path, boolean watch);
+
+    /**
+     * Write a worker heartbeat at the path.
+     * @param path The path whose node we want to set.
+     * @param data The data to put in the node.
+     * @param acls The acls to apply to the path. May be null.
+     */
     void set_worker_hb(String path, byte[] data, List<ACL> acls);
+
+    /**
+     * Get the heartbeat from the node at path
+     * @param path The path to look under
+     * @param watch Whether or not to set a watch on the path. Watched paths
+     * emit events which are consumed by functions registered with the
+     * register method. Very useful for catching updates to nodes.
+     * @return The heartbeat at the node.
+     */
     byte[] get_worker_hb(String path, boolean watch);
+
+    /**
+     * Get a list of paths of all the child nodes which exist immediately
+     * under path. This is similar to get_children, but must be used for
+     * any nodes
+     * @param path The path to look under
+     * @param watch Whether or not to set a watch on the path. Watched paths
+     * emit events which are consumed by functions registered with the
+     * register method. Very useful for catching updates to nodes.
+     * @return list of string paths under path.
+     */
     List<String> get_worker_hb_children(String path, boolean watch);
+
+    /**
+     * Deletes the heartbeat at a given path, and any child nodes that may exist.
+     * @param path The path to delete.
+     */
     void delete_worker_hb(String path);
+
+    /**
+     * Add a ClusterStateListener to the connection.
+     * @param listener A ClusterStateListener to handle changing cluster state
+     * events.
+     */
     void add_listener(ClusterStateListener listener);
+
+    /**
+     * Force consistency on a path. Any writes committed on the path before
+     * this call will be completely propagated when it returns.
+     * @param The path to synchronize.
+     */
     void sync_path(String path);
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/2a782ce7/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftNettyServerCodec.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftNettyServerCodec.java b/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftNettyServerCodec.java
index 7d3018d..c1ca00e 100644
--- a/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftNettyServerCodec.java
+++ b/storm-core/src/jvm/org/apache/storm/pacemaker/codec/ThriftNettyServerCodec.java
@@ -25,7 +25,7 @@ import backtype.storm.messaging.netty.SaslStormServerHandler;
 import backtype.storm.messaging.netty.StormServerHandler;
 import backtype.storm.security.auth.AuthUtils;
 import java.io.IOException;
-import java.util.List;
+import java.util.ArrayList;
 import java.util.Map;
 import org.jboss.netty.channel.ChannelPipeline;
 import org.jboss.netty.channel.ChannelPipelineFactory;
@@ -76,10 +76,12 @@ public class ThriftNettyServerCodec {
                 else if(authMethod == AuthMethod.KERBEROS) {
                     try {
                         LOG.debug("Adding KerberosSaslServerHandler to pacemaker server pipeline.");
+                        ArrayList<String> authorizedUsers = new ArrayList(1);
+                        authorizedUsers.add((String)storm_conf.get(Config.NIMBUS_DAEMON_USER));
                         pipeline.addLast(KERBEROS_HANDLER, new KerberosSaslServerHandler((ISaslServer)server,
                                                                                          storm_conf,
                                                                                          AuthUtils.LOGIN_CONTEXT_PACEMAKER_SERVER,
-                                                                                         (List)storm_conf.get(Config.PACEMAKER_KERBEROS_USERS)));
+                                                                                         authorizedUsers));
                     }
                     catch (IOException e) {
                         throw new RuntimeException(e);


[22/37] storm git commit: Minor tweaks to documentation.

Posted by kn...@apache.org.
Minor tweaks to documentation.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/921db438
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/921db438
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/921db438

Branch: refs/heads/master
Commit: 921db438cc30735f6e0776e2007e5c65935f46e5
Parents: d7b832a
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Tue Nov 17 15:24:08 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Tue Nov 17 15:24:08 2015 -0600

----------------------------------------------------------------------
 docs/documentation/Pacemaker.md | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/921db438/docs/documentation/Pacemaker.md
----------------------------------------------------------------------
diff --git a/docs/documentation/Pacemaker.md b/docs/documentation/Pacemaker.md
index 8acbb36..5084737 100644
--- a/docs/documentation/Pacemaker.md
+++ b/docs/documentation/Pacemaker.md
@@ -1,6 +1,6 @@
 # Pacemaker
 
-### Intro
+## Intro
 Pacemaker is a storm daemon designed to process heartbeats from workers. As Storm is scaled up, ZooKeeper begins to become a bottleneck due to high volumes of writes from workers doing heartbeats. Lots of writes to disk and traffic across the network is generated as ZooKeeper tries to maintain consistency.
 
 Because heartbeats are of an ephemeral nature, they do not need to be persisted to disk or synced across nodes; an in-memory store will do. This is the role of Pacemaker. Pacemaker functions as a simple in-memory key/value store with ZooKeeper-like, directory-style keys and byte array values.
@@ -9,7 +9,7 @@ The corresponding Pacemaker client is a plugin for the `ClusterState` interface,
 
 ------
 
-### Configuration
+## Configuration
 
  - `pacemaker.host` : The host that the Pacemaker daemon is running on
  - `pacemaker.port` : The port that Pacemaker will listen on
@@ -38,7 +38,7 @@ $ storm pacemaker
 
 The Storm cluster should now be pushing all worker heartbeats through Pacemaker.
 
-### Security
+## Security
 
 Currently digest (password-based) and Kerberos security are supported. Security is currently only around reads, not writes. Writes may be performed by anyone, whereas reads may only be performed by authorized and authenticated users. This is an area for future development, as it leaves the cluster open to DoS attacks, but it prevents any sensitive information from reaching unauthorized eyes, which was the main goal.
 


[37/37] storm git commit: Adding STORM-885 to CHANGELOG.md

Posted by kn...@apache.org.
Adding STORM-885 to CHANGELOG.md


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/45792ddd
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/45792ddd
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/45792ddd

Branch: refs/heads/master
Commit: 45792ddd3296f4cd9acc25ca67c24a4b5805e0e3
Parents: 8d4d72c
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Mon Nov 23 15:07:12 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Mon Nov 23 15:07:12 2015 -0600

----------------------------------------------------------------------
 CHANGELOG.md | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/45792ddd/CHANGELOG.md
----------------------------------------------------------------------
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a8beeb0..8e0230e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,5 @@
 ## 0.11.0
+ * STORM-885:  Heartbeat Server (Pacemaker)
  * STORM-1221: Create a common interface for all Trident spout.
  * STORM-1198: Web UI to show resource usages and Total Resources on all supervisors
  * STORM-1167: Add windowing support for storm core.


[30/37] storm git commit: Fixing nits.

Posted by kn...@apache.org.
Fixing nits.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/711e36fa
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/711e36fa
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/711e36fa

Branch: refs/heads/master
Commit: 711e36fafcac33f4fb7238001ea8bc678fab2d56
Parents: 387232c
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Thu Nov 19 11:08:52 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Thu Nov 19 11:08:52 2015 -0600

----------------------------------------------------------------------
 docs/documentation/Pacemaker.md | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/711e36fa/docs/documentation/Pacemaker.md
----------------------------------------------------------------------
diff --git a/docs/documentation/Pacemaker.md b/docs/documentation/Pacemaker.md
index f82f23d..24e3e7f 100644
--- a/docs/documentation/Pacemaker.md
+++ b/docs/documentation/Pacemaker.md
@@ -91,9 +91,9 @@ PacemakerServer {
 
 ### Fault Tolerance
 
-Pacemaker runs as a single daemon instance currently. This makes it a potential Single Point of Failure.
+Pacemaker runs as a single daemon instance, making it a potential Single Point of Failure.
 
-If Pacemaker becomes unreachable by Nimbus, through crash or partition, the workers will continue to run and Nimbus will repeatedly attempt to reconnect. Nimbus functionality will be disrupted, but the topologies themselves will continue to run.
+If Pacemaker becomes unreachable by Nimbus, through crash or network partition, the workers will continue to run, and Nimbus will repeatedly attempt to reconnect. Nimbus functionality will be disrupted, but the topologies themselves will continue to run.
 In case of partition of the cluster where Nimbus and Pacemaker are on the same side of the partition, the workers that are on the other side of the partition will not be able to heartbeat, and Nimbus will reschedule the tasks elsewhere. This is probably what we want to happen anyway.
 
 


[07/37] storm git commit: Merge remote-tracking branch 'asf/master' into STORM-855

Posted by kn...@apache.org.
Merge remote-tracking branch 'asf/master' into STORM-855


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/7f9d00db
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/7f9d00db
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/7f9d00db

Branch: refs/heads/master
Commit: 7f9d00dba940addb854f7f7f765cb711102857e0
Parents: 444ec05 ffb5d03
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Fri Oct 30 17:31:13 2015 -0500
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Fri Oct 30 17:31:13 2015 -0500

----------------------------------------------------------------------
 CHANGELOG.md                                    |    2 +
 STORM-UI-REST-API.md                            |   19 +-
 conf/defaults.yaml                              |    2 +
 .../storm/starter/FastWordCountTopology.java    |   11 +-
 .../src/clj/backtype/storm/daemon/executor.clj  |  104 +-
 .../src/clj/backtype/storm/daemon/nimbus.clj    |   26 +-
 .../src/clj/backtype/storm/daemon/task.clj      |   10 +-
 .../src/clj/backtype/storm/daemon/worker.clj    |   12 +-
 storm-core/src/clj/backtype/storm/disruptor.clj |   25 +-
 storm-core/src/clj/backtype/storm/ui/core.clj   |   16 +-
 storm-core/src/jvm/backtype/storm/Config.java   |   18 +
 .../backtype/storm/generated/TopologyInfo.java  |  602 ++++++++-
 .../storm/generated/TopologyPageInfo.java       |  604 ++++++++-
 .../storm/generated/TopologySummary.java        |  604 ++++++++-
 .../jvm/backtype/storm/scheduler/Cluster.java   |   16 +-
 .../storm/scheduler/TopologyDetails.java        |   51 +
 .../resource/ResourceAwareScheduler.java        |   38 +-
 .../backtype/storm/utils/DisruptorQueue.java    |  411 ++++---
 storm-core/src/py/storm/ttypes.py               | 1145 +++++++++++++++---
 storm-core/src/storm.thrift                     |   18 +
 storm-core/src/ui/public/index.html             |    6 +-
 .../public/templates/index-page-template.html   |   18 +-
 .../templates/topology-page-template.html       |   60 +
 storm-core/src/ui/public/topology.html          |    8 +
 .../scheduler/resource_aware_scheduler_test.clj |   38 +-
 .../utils/DisruptorQueueBackpressureTest.java   |   11 +-
 .../storm/utils/DisruptorQueueTest.java         |   57 +-
 27 files changed, 3343 insertions(+), 589 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/7f9d00db/conf/defaults.yaml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/7f9d00db/storm-core/src/clj/backtype/storm/daemon/worker.clj
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/7f9d00db/storm-core/src/jvm/backtype/storm/Config.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/7f9d00db/storm-core/src/jvm/backtype/storm/generated/TopologyInfo.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/7f9d00db/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/7f9d00db/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/7f9d00db/storm-core/src/py/storm/ttypes.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/7f9d00db/storm-core/src/storm.thrift
----------------------------------------------------------------------


[09/37] storm git commit: Reverting date change.

Posted by kn...@apache.org.
Reverting date change.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/34b1373d
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/34b1373d
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/34b1373d

Branch: refs/heads/master
Commit: 34b1373d4a621ea0b9a18598a800c566e9fc25be
Parents: a8ceb1c
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Fri Oct 30 17:38:14 2015 -0500
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Fri Oct 30 17:38:14 2015 -0500

----------------------------------------------------------------------
 storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java | 2 +-
 storm-core/src/jvm/backtype/storm/generated/TopologySummary.java  | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/34b1373d/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java b/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java
index 99eddc3..257ec9d 100644
--- a/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/TopologyPageInfo.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-29")
 public class TopologyPageInfo implements org.apache.thrift.TBase<TopologyPageInfo, TopologyPageInfo._Fields>, java.io.Serializable, Cloneable, Comparable<TopologyPageInfo> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TopologyPageInfo");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/34b1373d/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java b/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java
index 3770f4d..7bb2e44 100644
--- a/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java
+++ b/storm-core/src/jvm/backtype/storm/generated/TopologySummary.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-29")
 public class TopologySummary implements org.apache.thrift.TBase<TopologySummary, TopologySummary._Fields>, java.io.Serializable, Cloneable, Comparable<TopologySummary> {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TopologySummary");
 


[10/37] storm git commit: Fix spacing with Client

Posted by kn...@apache.org.
Fix spacing with Client


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/33903de8
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/33903de8
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/33903de8

Branch: refs/heads/master
Commit: 33903de82d30021885e73e16a2d479c504e0b163
Parents: 34b1373
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Fri Oct 30 17:48:00 2015 -0500
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Fri Oct 30 17:48:00 2015 -0500

----------------------------------------------------------------------
 .../backtype/storm/messaging/netty/Client.java  | 96 ++++++++++----------
 1 file changed, 48 insertions(+), 48 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/33903de8/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java b/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java
index 7ecd770..a23b699 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java
@@ -217,7 +217,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
     @Override
     public Iterator<TaskMessage> recv(int flags, int clientId) {
         throw new UnsupportedOperationException("Client connection should not receive any messages");
-        }
+    }
 
     @Override
     public void send(int taskId, byte[] payload) {
@@ -235,12 +235,12 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
         if (closing) {
             int numMessages = iteratorSize(msgs);
             LOG.error("discarding {} messages because the Netty client to {} is being closed", numMessages,
-                    dstAddressPrefixedName);
+                      dstAddressPrefixedName);
             return;
         }
 
         if (!hasMessages(msgs)) {
-          return;
+            return;
         }
 
         Channel channel = getConnectedChannel();
@@ -281,7 +281,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
             // We can rely on `notifyInterestChanged` to push these messages as soon as there is spece in Netty's buffer
             // because we know `Channel.isWritable` was false after the messages were already in the buffer.
         }
-        }
+    }
 
     private Channel getConnectedChannel() {
         Channel channel = channelRef.get();
@@ -296,7 +296,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
             }
             return null;
         }
-        }
+    }
 
     public InetSocketAddress getDstAddress() {
         return dstAddress;
@@ -311,7 +311,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
         // We consume the iterator by traversing and thus "emptying" it.
         int msgCount = iteratorSize(msgs);
         messagesLost.getAndAdd(msgCount);
-                    }
+    }
 
     private int iteratorSize(Iterator<TaskMessage> msgs) {
         int size = 0;
@@ -319,8 +319,8 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
             while (msgs.hasNext()) {
                 size++;
                 msgs.next();
-                }
             }
+        }
         return size;
     }
 
@@ -340,21 +340,21 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
 
         ChannelFuture future = channel.write(batch);
         future.addListener(new ChannelFutureListener() {
-            public void operationComplete(ChannelFuture future) throws Exception {
-                pendingMessages.addAndGet(0 - numMessages);
-                if (future.isSuccess()) {
-                    LOG.debug("sent {} messages to {}", numMessages, dstAddressPrefixedName);
-                    messagesSent.getAndAdd(batch.size());
-                } else {
-                    LOG.error("failed to send {} messages to {}: {}", numMessages, dstAddressPrefixedName,
-                            future.getCause());
-                    closeChannelAndReconnect(future.getChannel());
-                    messagesLost.getAndAdd(numMessages);
+                public void operationComplete(ChannelFuture future) throws Exception {
+                    pendingMessages.addAndGet(0 - numMessages);
+                    if (future.isSuccess()) {
+                        LOG.debug("sent {} messages to {}", numMessages, dstAddressPrefixedName);
+                        messagesSent.getAndAdd(batch.size());
+                    } else {
+                        LOG.error("failed to send {} messages to {}: {}", numMessages, dstAddressPrefixedName,
+                                  future.getCause());
+                        closeChannelAndReconnect(future.getChannel());
+                        messagesLost.getAndAdd(numMessages);
+                    }
                 }
-            }
 
-        });
-        }
+            });
+    }
 
     /**
      * Schedule a reconnect if we closed a non-null channel, and acquired the right to
@@ -390,23 +390,23 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
 
     private void waitForPendingMessagesToBeSent() {
         LOG.info("waiting up to {} ms to send {} pending messages to {}",
-                PENDING_MESSAGES_FLUSH_TIMEOUT_MS, pendingMessages.get(), dstAddressPrefixedName);
+                 PENDING_MESSAGES_FLUSH_TIMEOUT_MS, pendingMessages.get(), dstAddressPrefixedName);
         long totalPendingMsgs = pendingMessages.get();
         long startMs = System.currentTimeMillis();
         while (pendingMessages.get() != 0) {
-        try {
+            try {
                 long deltaMs = System.currentTimeMillis() - startMs;
                 if (deltaMs > PENDING_MESSAGES_FLUSH_TIMEOUT_MS) {
                     LOG.error("failed to send all pending messages to {} within timeout, {} of {} messages were not " +
-                            "sent", dstAddressPrefixedName, pendingMessages.get(), totalPendingMsgs);
+                              "sent", dstAddressPrefixedName, pendingMessages.get(), totalPendingMsgs);
                     break;
                 }
                 Thread.sleep(PENDING_MESSAGES_FLUSH_INTERVAL_MS);
             }
             catch (InterruptedException e) {
                 break;
+            }
         }
-    }
 
     }
 
@@ -441,7 +441,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
     /** ISaslClient interface **/
     public void channelConnected(Channel channel) {
 //        setChannel(channel);
-        }
+    }
 
     public void channelReady() {
         saslChannelReady.set(true);
@@ -502,7 +502,7 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
 
         private void reschedule(Throwable t) {
             String baseMsg = String.format("connection attempt %s to %s failed", connectionAttempts,
-                    dstAddressPrefixedName);
+                                           dstAddressPrefixedName);
             String failureMsg = (t == null) ? baseMsg : baseMsg + ": " + t.toString();
             LOG.error(failureMsg);
             long nextDelayMs = retryPolicy.getSleepTimeMs(connectionAttempts.get(), 0);
@@ -519,34 +519,34 @@ public class Client extends ConnectionWithStatus implements IStatefulObject, ISa
                 LOG.debug("connecting to {} [attempt {}]", address.toString(), connectionAttempt);
                 ChannelFuture future = bootstrap.connect(address);
                 future.addListener(new ChannelFutureListener() {
-                    @Override
-                    public void operationComplete(ChannelFuture future) throws Exception {
-                        // This call returns immediately
-                        Channel newChannel = future.getChannel();
-
-                        if (future.isSuccess() && connectionEstablished(newChannel)) {
-                            boolean setChannel = channelRef.compareAndSet(null, newChannel);
-                            checkState(setChannel);
-                            LOG.debug("successfully connected to {}, {} [attempt {}]", address.toString(), newChannel.toString(),
-                                    connectionAttempt);
-                            if (messagesLost.get() > 0) {
-                                LOG.warn("Re-connection to {} was successful but {} messages has been lost so far", address.toString(), messagesLost.get());
-    }
-                        } else {
-                            Throwable cause = future.getCause();
-                            reschedule(cause);
-                            if (newChannel != null) {
-                                newChannel.close();
+                        @Override
+                        public void operationComplete(ChannelFuture future) throws Exception {
+                            // This call returns immediately
+                            Channel newChannel = future.getChannel();
+
+                            if (future.isSuccess() && connectionEstablished(newChannel)) {
+                                boolean setChannel = channelRef.compareAndSet(null, newChannel);
+                                checkState(setChannel);
+                                LOG.debug("successfully connected to {}, {} [attempt {}]", address.toString(), newChannel.toString(),
+                                          connectionAttempt);
+                                if (messagesLost.get() > 0) {
+                                    LOG.warn("Re-connection to {} was successful but {} messages has been lost so far", address.toString(), messagesLost.get());
+                                }
+                            } else {
+                                Throwable cause = future.getCause();
+                                reschedule(cause);
+                                if (newChannel != null) {
+                                    newChannel.close();
+                                }
                             }
                         }
-                    }
-                });
+                    });
             } else {
                 close();
                 throw new RuntimeException("Giving up to scheduleConnect to " + dstAddressPrefixedName + " after " +
-                        connectionAttempts + " failed attempts. " + messagesLost.get() + " messages were lost");
+                                           connectionAttempts + " failed attempts. " + messagesLost.get() + " messages were lost");
 
-    }
+            }
         }
     }
 }


[13/37] storm git commit: Formatting issues.

Posted by kn...@apache.org.
Formatting issues.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/31479552
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/31479552
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/31479552

Branch: refs/heads/master
Commit: 3147955254247b5008427002ca788d8c72c61f13
Parents: 2a782ce
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Mon Nov 9 16:50:13 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Mon Nov 9 16:50:13 2015 -0600

----------------------------------------------------------------------
 .../backtype/storm/messaging/netty/Context.java |  6 +++--
 .../netty/KerberosSaslClientHandler.java        | 17 ++++++------
 .../netty/KerberosSaslNettyClient.java          | 28 ++++++++++----------
 .../netty/KerberosSaslNettyClientState.java     | 10 +++----
 .../netty/KerberosSaslNettyServer.java          | 18 ++++++-------
 .../netty/KerberosSaslNettyServerState.java     |  2 +-
 .../netty/KerberosSaslServerHandler.java        | 19 +++++++------
 .../netty/NettyUncaughtExceptionHandler.java    | 18 ++++++-------
 8 files changed, 59 insertions(+), 59 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/31479552/storm-core/src/jvm/backtype/storm/messaging/netty/Context.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/Context.java b/storm-core/src/jvm/backtype/storm/messaging/netty/Context.java
index 10c5059..5d27a16 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/Context.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/Context.java
@@ -21,9 +21,10 @@ import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
 import org.jboss.netty.util.HashedWheelTimer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-
 import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.TimeUnit;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -54,7 +55,6 @@ public class Context implements IContext {
         int maxWorkers = Utils.getInt(storm_conf.get(Config.STORM_MESSAGING_NETTY_CLIENT_WORKER_THREADS));
 		ThreadFactory bossFactory = new NettyRenameThreadFactory("client" + "-boss");
         ThreadFactory workerFactory = new NettyRenameThreadFactory("client" + "-worker");
-        // TODO investigate impact of having one worker
         if (maxWorkers > 0) {
             clientChannelFactory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(bossFactory),
                     Executors.newCachedThreadPool(workerFactory), maxWorkers);
@@ -103,10 +103,12 @@ public class Context implements IContext {
         for (IConnection conn : connections.values()) {
             conn.close();
         }
+
         connections = null;
 
         //we need to release resources associated with client channel factory
         clientChannelFactory.releaseExternalResources();
+
     }
 
     private String key(String host, int port) {

http://git-wip-us.apache.org/repos/asf/storm/blob/31479552/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslClientHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslClientHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslClientHandler.java
index 9ae34fe..ee0e41d 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslClientHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslClientHandler.java
@@ -31,7 +31,7 @@ import org.slf4j.LoggerFactory;
 public class KerberosSaslClientHandler extends SimpleChannelUpstreamHandler {
 
     private static final Logger LOG = LoggerFactory
-            .getLogger(KerberosSaslClientHandler.class);
+        .getLogger(KerberosSaslClientHandler.class);
     private ISaslClient client;
     long start_time;
     /** Used for client or server's token to send or receive from each other. */
@@ -47,7 +47,7 @@ public class KerberosSaslClientHandler extends SimpleChannelUpstreamHandler {
 
     @Override
     public void channelConnected(ChannelHandlerContext ctx,
-            ChannelStateEvent event) {
+                                 ChannelStateEvent event) {
         // register the newly established channel
         Channel channel = ctx.getChannel();
         client.channelConnected(channel);
@@ -57,14 +57,14 @@ public class KerberosSaslClientHandler extends SimpleChannelUpstreamHandler {
 
         try {
             KerberosSaslNettyClient saslNettyClient = KerberosSaslNettyClientState.getKerberosSaslNettyClient
-                    .get(channel);
+                .get(channel);
 
             if (saslNettyClient == null) {
                 LOG.debug("Creating saslNettyClient now for channel: {}",
                           channel);
                 saslNettyClient = new KerberosSaslNettyClient(storm_conf, jaas_section);
                 KerberosSaslNettyClientState.getKerberosSaslNettyClient.set(channel,
-                        saslNettyClient);
+                                                                            saslNettyClient);
             }
             LOG.debug("Going to initiate Kerberos negotiations.");
             byte[] initialChallenge = saslNettyClient.saslResponse(new SaslMessageToken(new byte[0]));
@@ -80,15 +80,15 @@ public class KerberosSaslClientHandler extends SimpleChannelUpstreamHandler {
 
     @Override
     public void messageReceived(ChannelHandlerContext ctx, MessageEvent event)
-            throws Exception {
+        throws Exception {
         LOG.debug("send/recv time (ms): {}",
-                (System.currentTimeMillis() - start_time));
+                  (System.currentTimeMillis() - start_time));
 
         Channel channel = ctx.getChannel();
 
         // Generate SASL response to server using Channel-local SASL client.
         KerberosSaslNettyClient saslNettyClient = KerberosSaslNettyClientState.getKerberosSaslNettyClient
-                .get(channel);
+            .get(channel);
         if (saslNettyClient == null) {
             throw new Exception("saslNettyClient was unexpectedly null for channel:" + channel);
         }
@@ -111,8 +111,7 @@ public class KerberosSaslClientHandler extends SimpleChannelUpstreamHandler {
                 // perform this request. The client's request will now proceed
                 // to the next pipeline component namely StormClientHandler.
                 Channels.fireMessageReceived(ctx, msg);
-            }
-            else {
+            } else {
                 LOG.warn("Unexpected control message: {}", msg);
             }
             return;

http://git-wip-us.apache.org/repos/asf/storm/blob/31479552/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
index 32afab0..e540a4c 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClient.java
@@ -46,7 +46,7 @@ import org.slf4j.LoggerFactory;
 public class KerberosSaslNettyClient {
 
     private static final Logger LOG = LoggerFactory
-            .getLogger(KerberosSaslNettyClient.class);
+        .getLogger(KerberosSaslNettyClient.class);
 
     /**
      * Used to respond to server's counterpart, SaslServer with SASL tokens
@@ -55,16 +55,16 @@ public class KerberosSaslNettyClient {
     private SaslClient saslClient;
     private Subject subject;
     private String jaas_section;
-    
+
     /**
      * Create a KerberosSaslNettyClient for authentication with servers.
      */
     public KerberosSaslNettyClient(Map storm_conf, String jaas_section) {
         LOG.debug("KerberosSaslNettyClient: Creating SASL {} client to authenticate to server ",
                   SaslUtils.KERBEROS);
-        
+
         LOG.info("Creating Kerberos Client.");
-        
+
         Configuration login_conf;
         try {
             login_conf = AuthUtils.GetConfiguration(storm_conf);
@@ -74,14 +74,14 @@ public class KerberosSaslNettyClient {
             throw t;
         }
         LOG.debug("KerberosSaslNettyClient: authmethod {}", SaslUtils.KERBEROS);
-        
+
         SaslClientCallbackHandler ch = new SaslClientCallbackHandler();
-        
+
         subject = null;
         try {
             LOG.debug("Setting Configuration to login_config: {}", login_conf);
             //specify a configuration object to be used
-            Configuration.setConfiguration(login_conf); 
+            Configuration.setConfiguration(login_conf);
             //now login
             LOG.debug("Trying to login.");
             Login login = new Login(jaas_section, ch);
@@ -91,9 +91,9 @@ public class KerberosSaslNettyClient {
             LOG.error("Client failed to login in principal:" + ex, ex);
             throw new RuntimeException(ex);
         }
-        
+
         //check the credential of our principal
-        if (subject.getPrivateCredentials(KerberosTicket.class).isEmpty()) { 
+        if (subject.getPrivateCredentials(KerberosTicket.class).isEmpty()) {
             LOG.error("Failed to verify user principal.");
             throw new RuntimeException("Fail to verify user principal with section \"" +
                                        jaas_section +
@@ -138,12 +138,12 @@ public class KerberosSaslNettyClient {
                     }
                 });
             LOG.info("Got Client: {}", saslClient);
-            
+
         } catch (PrivilegedActionException e) {
             LOG.error("KerberosSaslNettyClient: Could not create Sasl Netty Client.");
             throw new RuntimeException(e);
         }
-}
+    }
 
     public boolean isComplete() {
         return saslClient.isComplete();
@@ -151,7 +151,7 @@ public class KerberosSaslNettyClient {
 
     /**
      * Respond to server's SASL token.
-     * 
+     *
      * @param saslTokenMessage
      *            contains server's SASL token
      * @return client's response SASL token
@@ -188,7 +188,7 @@ public class KerberosSaslNettyClient {
 
         /**
          * Set private members using topology token.
-         * 
+         *
          * @param topologyToken
          */
         public SaslClientCallbackHandler() {
@@ -196,7 +196,7 @@ public class KerberosSaslNettyClient {
 
         /**
          * Implementation used to respond to SASL tokens from server.
-         * 
+         *
          * @param callbacks
          *            objects that indicate what credential information the
          *            server's SaslServer requires from the client.

http://git-wip-us.apache.org/repos/asf/storm/blob/31479552/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClientState.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClientState.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClientState.java
index 1283d9b..2546aa5 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClientState.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyClientState.java
@@ -22,10 +22,10 @@ import org.jboss.netty.channel.ChannelLocal;
 
 final class KerberosSaslNettyClientState {
 
-	public static final ChannelLocal<KerberosSaslNettyClient> getKerberosSaslNettyClient = new ChannelLocal<KerberosSaslNettyClient>() {
-		protected KerberosSaslNettyClient initialValue(Channel channel) {
-			return null;
-		}
-	};
+    public static final ChannelLocal<KerberosSaslNettyClient> getKerberosSaslNettyClient = new ChannelLocal<KerberosSaslNettyClient>() {
+        protected KerberosSaslNettyClient initialValue(Channel channel) {
+            return null;
+        }
+    };
 
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/31479552/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServer.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServer.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServer.java
index a0003c6..a935608 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServer.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServer.java
@@ -55,7 +55,7 @@ class KerberosSaslNettyServer {
     private Subject subject;
     private String jaas_section;
     private List<String> authorizedUsers;
-    
+
     KerberosSaslNettyServer(Map storm_conf, String jaas_section, List<String> authorizedUsers) {
         this.authorizedUsers = authorizedUsers;
         LOG.debug("Getting Configuration.");
@@ -67,17 +67,17 @@ class KerberosSaslNettyServer {
             LOG.error("Failed to get login_conf: ", t);
             throw t;
         }
-            
+
         LOG.debug("KerberosSaslNettyServer: authmethod {}", SaslUtils.KERBEROS);
 
         KerberosSaslCallbackHandler ch = new KerberosSaslNettyServer.KerberosSaslCallbackHandler(storm_conf, authorizedUsers);
-        
+
         //login our principal
         subject = null;
         try {
             LOG.debug("Setting Configuration to login_config: {}", login_conf);
             //specify a configuration object to be used
-            Configuration.setConfiguration(login_conf); 
+            Configuration.setConfiguration(login_conf);
             //now login
             LOG.debug("Trying to login.");
             Login login = new Login(jaas_section, ch);
@@ -87,9 +87,9 @@ class KerberosSaslNettyServer {
             LOG.error("Server failed to login in principal:", ex);
             throw new RuntimeException(ex);
         }
-        
+
         //check the credential of our principal
-        if (subject.getPrivateCredentials(KerberosTicket.class).isEmpty()) { 
+        if (subject.getPrivateCredentials(KerberosTicket.class).isEmpty()) {
             LOG.error("Failed to verifyuser principal.");
             throw new RuntimeException("Fail to verify user principal with section \""
                                        + jaas_section
@@ -97,7 +97,7 @@ class KerberosSaslNettyServer {
                                        + login_conf);
         }
 
-        try {    
+        try {
             LOG.info("Creating Kerberos Server.");
             final CallbackHandler fch = ch;
             Principal p = (Principal)subject.getPrincipals().toArray()[0];
@@ -123,7 +123,7 @@ class KerberosSaslNettyServer {
                         }
                     });
             LOG.info("Got Server: {}", saslServer);
-                 
+
         } catch (PrivilegedActionException e) {
             LOG.error("KerberosSaslNettyServer: Could not create SaslServer: ", e);
             throw new RuntimeException(e);
@@ -192,7 +192,7 @@ class KerberosSaslNettyServer {
     /**
      * Used by SaslTokenMessage::processToken() to respond to server SASL
      * tokens.
-     * 
+     *
      * @param token
      *            Server's SASL token
      * @return token to send back to the server.

http://git-wip-us.apache.org/repos/asf/storm/blob/31479552/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServerState.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServerState.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServerState.java
index 064dc91..e7a127e 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServerState.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslNettyServerState.java
@@ -26,5 +26,5 @@ final class KerberosSaslNettyServerState {
             protected KerberosSaslNettyServer initialValue(Channel channel) {
                 return null;
             }
-	};
+        };
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/31479552/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslServerHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslServerHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslServerHandler.java
index 3ed3fd7..e4a6e29 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslServerHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/KerberosSaslServerHandler.java
@@ -36,9 +36,9 @@ public class KerberosSaslServerHandler extends SimpleChannelUpstreamHandler {
     private Map storm_conf;
     private String jaas_section;
     private List<String> authorizedUsers;
-    
+
     private static final Logger LOG = LoggerFactory
-            .getLogger(KerberosSaslServerHandler.class);
+        .getLogger(KerberosSaslServerHandler.class);
 
     public KerberosSaslServerHandler(ISaslServer server, Map storm_conf, String jaas_section, List<String> authorizedUsers) throws IOException {
         this.server = server;
@@ -49,14 +49,14 @@ public class KerberosSaslServerHandler extends SimpleChannelUpstreamHandler {
 
     @Override
     public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)
-            throws Exception {
+        throws Exception {
         Object msg = e.getMessage();
         if (msg == null)
             return;
 
         Channel channel = ctx.getChannel();
 
-        
+
         if (msg instanceof SaslMessageToken) {
             // initialize server-side SASL functionality, if we haven't yet
             // (in which case we are looking at the first SASL message from the
@@ -71,11 +71,11 @@ public class KerberosSaslServerHandler extends SimpleChannelUpstreamHandler {
                     try {
                         saslNettyServer = new KerberosSaslNettyServer(storm_conf, jaas_section, authorizedUsers);
                     } catch (RuntimeException ioe) {
-                        LOG.error("Error occurred while creating saslNettyServer on server {} for client {}", 
+                        LOG.error("Error occurred while creating saslNettyServer on server {} for client {}",
                                   channel.getLocalAddress(), channel.getRemoteAddress());
                         saslNettyServer = null;
                     }
-                    
+
                     KerberosSaslNettyServerState.getKerberosSaslNettyServer.set(channel,
                                                                                 saslNettyServer);
                 } else {
@@ -85,17 +85,16 @@ public class KerberosSaslServerHandler extends SimpleChannelUpstreamHandler {
 
                 byte[] responseBytes = saslNettyServer.response(((SaslMessageToken) msg)
                                                                 .getSaslToken());
-                    
+
                 SaslMessageToken saslTokenMessageRequest = new SaslMessageToken(responseBytes);
 
                 if(saslTokenMessageRequest.getSaslToken() == null) {
                     channel.write(ControlMessage.SASL_COMPLETE_REQUEST);
-                }
-                else {   
+                } else {
                     // Send response to client.
                     channel.write(saslTokenMessageRequest);
                 }
-                    
+
                 if (saslNettyServer.isComplete()) {
                     // If authentication of client is complete, we will also send a
                     // SASL-Complete message to the client.

http://git-wip-us.apache.org/repos/asf/storm/blob/31479552/storm-core/src/jvm/backtype/storm/messaging/netty/NettyUncaughtExceptionHandler.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/messaging/netty/NettyUncaughtExceptionHandler.java b/storm-core/src/jvm/backtype/storm/messaging/netty/NettyUncaughtExceptionHandler.java
index 3d31544..ad8b5d9 100644
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/NettyUncaughtExceptionHandler.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/NettyUncaughtExceptionHandler.java
@@ -22,14 +22,14 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class NettyUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler {
-  private static final Logger LOG = LoggerFactory.getLogger(NettyUncaughtExceptionHandler.class);
-  @Override
-  public void uncaughtException(Thread t, Throwable e) {
-    try {
-      Utils.handleUncaughtException(e);
-    } catch (Error error) {
-      LOG.info("Received error in netty thread.. terminating server...");
-      Runtime.getRuntime().exit(1);
+    private static final Logger LOG = LoggerFactory.getLogger(NettyUncaughtExceptionHandler.class);
+    @Override
+    public void uncaughtException(Thread t, Throwable e) {
+        try {
+            Utils.handleUncaughtException(e);
+        } catch (Error error) {
+            LOG.info("Received error in netty thread.. terminating server...");
+            Runtime.getRuntime().exit(1);
+        }
     }
-  }
 }


[21/37] storm git commit: Minor tweaks to documentation.

Posted by kn...@apache.org.
Minor tweaks to documentation.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/d7b832a4
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/d7b832a4
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/d7b832a4

Branch: refs/heads/master
Commit: d7b832a44f03ec5e868bc83275fe9eef27e806ed
Parents: c93a0ff
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Tue Nov 17 15:23:08 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Tue Nov 17 15:23:08 2015 -0600

----------------------------------------------------------------------
 docs/documentation/Pacemaker.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/d7b832a4/docs/documentation/Pacemaker.md
----------------------------------------------------------------------
diff --git a/docs/documentation/Pacemaker.md b/docs/documentation/Pacemaker.md
index cd7a0a6..8acbb36 100644
--- a/docs/documentation/Pacemaker.md
+++ b/docs/documentation/Pacemaker.md
@@ -1,5 +1,4 @@
 # Pacemaker
-------
 
 ### Intro
 Pacemaker is a storm daemon designed to process heartbeats from workers. As Storm is scaled up, ZooKeeper begins to become a bottleneck due to high volumes of writes from workers doing heartbeats. Lots of writes to disk and traffic across the network is generated as ZooKeeper tries to maintain consistency.
@@ -8,6 +7,7 @@ Because heartbeats are of an ephemeral nature, they do not need to be persisted
 
 The corresponding Pacemaker client is a plugin for the `ClusterState` interface, `org.apache.storm.pacemaker.pacemaker_state_factory`. Heartbeat calls are funneled by the `ClusterState` produced by `pacemaker_state_factory` into the Pacemaker daemon, while other set/get operations are forwarded to ZooKeeper.
 
+------
 
 ### Configuration
 


[32/37] storm git commit: Fixing ClusterState.

Posted by kn...@apache.org.
Fixing ClusterState.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/61921bba
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/61921bba
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/61921bba

Branch: refs/heads/master
Commit: 61921bbadd24cfa438bd4a744e103ccb50afaa83
Parents: aee4864
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Fri Nov 20 22:36:58 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Fri Nov 20 22:36:58 2015 -0600

----------------------------------------------------------------------
 storm-core/src/jvm/backtype/storm/cluster/ClusterState.java | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/61921bba/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java b/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
index bca085f..1bcc645 100644
--- a/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
+++ b/storm-core/src/jvm/backtype/storm/cluster/ClusterState.java
@@ -87,9 +87,8 @@ public interface ClusterState {
      * @param path The path where a node will be created.
      * @param data The data to be written at the node.
      * @param acls The acls to apply to the path. May be null.
-     * @return path
      */
-    String set_ephemeral_node(String path, byte[] data, List<ACL> acls);
+    void set_ephemeral_node(String path, byte[] data, List<ACL> acls);
 
     /**
      * Gets the 'version' of the node at a path. Optionally sets a watch


[17/37] storm git commit: Merge remote-tracking branch 'asf/master' into STORM-855

Posted by kn...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/jvm/backtype/storm/generated/HBNodes.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/generated/HBNodes.java
index a6dc65d,0000000..b6fc526
mode 100644,000000..100644
--- a/storm-core/src/jvm/backtype/storm/generated/HBNodes.java
+++ b/storm-core/src/jvm/backtype/storm/generated/HBNodes.java
@@@ -1,461 -1,0 +1,461 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +/**
-  * Autogenerated by Thrift Compiler (0.9.2)
++ * Autogenerated by Thrift Compiler (0.9.3)
 + *
 + * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 + *  @generated
 + */
 +package backtype.storm.generated;
 +
 +import org.apache.thrift.scheme.IScheme;
 +import org.apache.thrift.scheme.SchemeFactory;
 +import org.apache.thrift.scheme.StandardScheme;
 +
 +import org.apache.thrift.scheme.TupleScheme;
 +import org.apache.thrift.protocol.TTupleProtocol;
 +import org.apache.thrift.protocol.TProtocolException;
 +import org.apache.thrift.EncodingUtils;
 +import org.apache.thrift.TException;
 +import org.apache.thrift.async.AsyncMethodCallback;
 +import org.apache.thrift.server.AbstractNonblockingServer.*;
 +import java.util.List;
 +import java.util.ArrayList;
 +import java.util.Map;
 +import java.util.HashMap;
 +import java.util.EnumMap;
 +import java.util.Set;
 +import java.util.HashSet;
 +import java.util.EnumSet;
 +import java.util.Collections;
 +import java.util.BitSet;
 +import java.nio.ByteBuffer;
 +import java.util.Arrays;
 +import javax.annotation.Generated;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
- @Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 +public class HBNodes implements org.apache.thrift.TBase<HBNodes, HBNodes._Fields>, java.io.Serializable, Cloneable, Comparable<HBNodes> {
 +  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBNodes");
 +
 +  private static final org.apache.thrift.protocol.TField PULSE_IDS_FIELD_DESC = new org.apache.thrift.protocol.TField("pulseIds", org.apache.thrift.protocol.TType.LIST, (short)1);
 +
 +  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
 +  static {
 +    schemes.put(StandardScheme.class, new HBNodesStandardSchemeFactory());
 +    schemes.put(TupleScheme.class, new HBNodesTupleSchemeFactory());
 +  }
 +
 +  private List<String> pulseIds; // required
 +
 +  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
 +  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
 +    PULSE_IDS((short)1, "pulseIds");
 +
 +    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 +
 +    static {
 +      for (_Fields field : EnumSet.allOf(_Fields.class)) {
 +        byName.put(field.getFieldName(), field);
 +      }
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches fieldId, or null if its not found.
 +     */
 +    public static _Fields findByThriftId(int fieldId) {
 +      switch(fieldId) {
 +        case 1: // PULSE_IDS
 +          return PULSE_IDS;
 +        default:
 +          return null;
 +      }
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches fieldId, throwing an exception
 +     * if it is not found.
 +     */
 +    public static _Fields findByThriftIdOrThrow(int fieldId) {
 +      _Fields fields = findByThriftId(fieldId);
 +      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
 +      return fields;
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches name, or null if its not found.
 +     */
 +    public static _Fields findByName(String name) {
 +      return byName.get(name);
 +    }
 +
 +    private final short _thriftId;
 +    private final String _fieldName;
 +
 +    _Fields(short thriftId, String fieldName) {
 +      _thriftId = thriftId;
 +      _fieldName = fieldName;
 +    }
 +
 +    public short getThriftFieldId() {
 +      return _thriftId;
 +    }
 +
 +    public String getFieldName() {
 +      return _fieldName;
 +    }
 +  }
 +
 +  // isset id assignments
 +  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
 +  static {
 +    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
 +    tmpMap.put(_Fields.PULSE_IDS, new org.apache.thrift.meta_data.FieldMetaData("pulseIds", org.apache.thrift.TFieldRequirementType.DEFAULT, 
 +        new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
 +            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
 +    metaDataMap = Collections.unmodifiableMap(tmpMap);
 +    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HBNodes.class, metaDataMap);
 +  }
 +
 +  public HBNodes() {
 +  }
 +
 +  public HBNodes(
 +    List<String> pulseIds)
 +  {
 +    this();
 +    this.pulseIds = pulseIds;
 +  }
 +
 +  /**
 +   * Performs a deep copy on <i>other</i>.
 +   */
 +  public HBNodes(HBNodes other) {
 +    if (other.is_set_pulseIds()) {
 +      List<String> __this__pulseIds = new ArrayList<String>(other.pulseIds);
 +      this.pulseIds = __this__pulseIds;
 +    }
 +  }
 +
 +  public HBNodes deepCopy() {
 +    return new HBNodes(this);
 +  }
 +
 +  @Override
 +  public void clear() {
 +    this.pulseIds = null;
 +  }
 +
 +  public int get_pulseIds_size() {
 +    return (this.pulseIds == null) ? 0 : this.pulseIds.size();
 +  }
 +
 +  public java.util.Iterator<String> get_pulseIds_iterator() {
 +    return (this.pulseIds == null) ? null : this.pulseIds.iterator();
 +  }
 +
 +  public void add_to_pulseIds(String elem) {
 +    if (this.pulseIds == null) {
 +      this.pulseIds = new ArrayList<String>();
 +    }
 +    this.pulseIds.add(elem);
 +  }
 +
 +  public List<String> get_pulseIds() {
 +    return this.pulseIds;
 +  }
 +
 +  public void set_pulseIds(List<String> pulseIds) {
 +    this.pulseIds = pulseIds;
 +  }
 +
 +  public void unset_pulseIds() {
 +    this.pulseIds = null;
 +  }
 +
 +  /** Returns true if field pulseIds is set (has been assigned a value) and false otherwise */
 +  public boolean is_set_pulseIds() {
 +    return this.pulseIds != null;
 +  }
 +
 +  public void set_pulseIds_isSet(boolean value) {
 +    if (!value) {
 +      this.pulseIds = null;
 +    }
 +  }
 +
 +  public void setFieldValue(_Fields field, Object value) {
 +    switch (field) {
 +    case PULSE_IDS:
 +      if (value == null) {
 +        unset_pulseIds();
 +      } else {
 +        set_pulseIds((List<String>)value);
 +      }
 +      break;
 +
 +    }
 +  }
 +
 +  public Object getFieldValue(_Fields field) {
 +    switch (field) {
 +    case PULSE_IDS:
 +      return get_pulseIds();
 +
 +    }
 +    throw new IllegalStateException();
 +  }
 +
 +  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
 +  public boolean isSet(_Fields field) {
 +    if (field == null) {
 +      throw new IllegalArgumentException();
 +    }
 +
 +    switch (field) {
 +    case PULSE_IDS:
 +      return is_set_pulseIds();
 +    }
 +    throw new IllegalStateException();
 +  }
 +
 +  @Override
 +  public boolean equals(Object that) {
 +    if (that == null)
 +      return false;
 +    if (that instanceof HBNodes)
 +      return this.equals((HBNodes)that);
 +    return false;
 +  }
 +
 +  public boolean equals(HBNodes that) {
 +    if (that == null)
 +      return false;
 +
 +    boolean this_present_pulseIds = true && this.is_set_pulseIds();
 +    boolean that_present_pulseIds = true && that.is_set_pulseIds();
 +    if (this_present_pulseIds || that_present_pulseIds) {
 +      if (!(this_present_pulseIds && that_present_pulseIds))
 +        return false;
 +      if (!this.pulseIds.equals(that.pulseIds))
 +        return false;
 +    }
 +
 +    return true;
 +  }
 +
 +  @Override
 +  public int hashCode() {
 +    List<Object> list = new ArrayList<Object>();
 +
 +    boolean present_pulseIds = true && (is_set_pulseIds());
 +    list.add(present_pulseIds);
 +    if (present_pulseIds)
 +      list.add(pulseIds);
 +
 +    return list.hashCode();
 +  }
 +
 +  @Override
 +  public int compareTo(HBNodes other) {
 +    if (!getClass().equals(other.getClass())) {
 +      return getClass().getName().compareTo(other.getClass().getName());
 +    }
 +
 +    int lastComparison = 0;
 +
 +    lastComparison = Boolean.valueOf(is_set_pulseIds()).compareTo(other.is_set_pulseIds());
 +    if (lastComparison != 0) {
 +      return lastComparison;
 +    }
 +    if (is_set_pulseIds()) {
 +      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.pulseIds, other.pulseIds);
 +      if (lastComparison != 0) {
 +        return lastComparison;
 +      }
 +    }
 +    return 0;
 +  }
 +
 +  public _Fields fieldForId(int fieldId) {
 +    return _Fields.findByThriftId(fieldId);
 +  }
 +
 +  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
 +    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
 +  }
 +
 +  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
 +    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
 +  }
 +
 +  @Override
 +  public String toString() {
 +    StringBuilder sb = new StringBuilder("HBNodes(");
 +    boolean first = true;
 +
 +    sb.append("pulseIds:");
 +    if (this.pulseIds == null) {
 +      sb.append("null");
 +    } else {
 +      sb.append(this.pulseIds);
 +    }
 +    first = false;
 +    sb.append(")");
 +    return sb.toString();
 +  }
 +
 +  public void validate() throws org.apache.thrift.TException {
 +    // check for required fields
 +    // check for sub-struct validity
 +  }
 +
 +  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
 +    try {
 +      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
 +    } catch (org.apache.thrift.TException te) {
 +      throw new java.io.IOException(te);
 +    }
 +  }
 +
 +  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
 +    try {
 +      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
 +    } catch (org.apache.thrift.TException te) {
 +      throw new java.io.IOException(te);
 +    }
 +  }
 +
 +  private static class HBNodesStandardSchemeFactory implements SchemeFactory {
 +    public HBNodesStandardScheme getScheme() {
 +      return new HBNodesStandardScheme();
 +    }
 +  }
 +
 +  private static class HBNodesStandardScheme extends StandardScheme<HBNodes> {
 +
 +    public void read(org.apache.thrift.protocol.TProtocol iprot, HBNodes struct) throws org.apache.thrift.TException {
 +      org.apache.thrift.protocol.TField schemeField;
 +      iprot.readStructBegin();
 +      while (true)
 +      {
 +        schemeField = iprot.readFieldBegin();
 +        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
 +          break;
 +        }
 +        switch (schemeField.id) {
 +          case 1: // PULSE_IDS
 +            if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
 +              {
-                 org.apache.thrift.protocol.TList _list674 = iprot.readListBegin();
-                 struct.pulseIds = new ArrayList<String>(_list674.size);
-                 String _elem675;
-                 for (int _i676 = 0; _i676 < _list674.size; ++_i676)
++                org.apache.thrift.protocol.TList _list706 = iprot.readListBegin();
++                struct.pulseIds = new ArrayList<String>(_list706.size);
++                String _elem707;
++                for (int _i708 = 0; _i708 < _list706.size; ++_i708)
 +                {
-                   _elem675 = iprot.readString();
-                   struct.pulseIds.add(_elem675);
++                  _elem707 = iprot.readString();
++                  struct.pulseIds.add(_elem707);
 +                }
 +                iprot.readListEnd();
 +              }
 +              struct.set_pulseIds_isSet(true);
 +            } else { 
 +              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +            }
 +            break;
 +          default:
 +            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +        }
 +        iprot.readFieldEnd();
 +      }
 +      iprot.readStructEnd();
 +      struct.validate();
 +    }
 +
 +    public void write(org.apache.thrift.protocol.TProtocol oprot, HBNodes struct) throws org.apache.thrift.TException {
 +      struct.validate();
 +
 +      oprot.writeStructBegin(STRUCT_DESC);
 +      if (struct.pulseIds != null) {
 +        oprot.writeFieldBegin(PULSE_IDS_FIELD_DESC);
 +        {
 +          oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.pulseIds.size()));
-           for (String _iter677 : struct.pulseIds)
++          for (String _iter709 : struct.pulseIds)
 +          {
-             oprot.writeString(_iter677);
++            oprot.writeString(_iter709);
 +          }
 +          oprot.writeListEnd();
 +        }
 +        oprot.writeFieldEnd();
 +      }
 +      oprot.writeFieldStop();
 +      oprot.writeStructEnd();
 +    }
 +
 +  }
 +
 +  private static class HBNodesTupleSchemeFactory implements SchemeFactory {
 +    public HBNodesTupleScheme getScheme() {
 +      return new HBNodesTupleScheme();
 +    }
 +  }
 +
 +  private static class HBNodesTupleScheme extends TupleScheme<HBNodes> {
 +
 +    @Override
 +    public void write(org.apache.thrift.protocol.TProtocol prot, HBNodes struct) throws org.apache.thrift.TException {
 +      TTupleProtocol oprot = (TTupleProtocol) prot;
 +      BitSet optionals = new BitSet();
 +      if (struct.is_set_pulseIds()) {
 +        optionals.set(0);
 +      }
 +      oprot.writeBitSet(optionals, 1);
 +      if (struct.is_set_pulseIds()) {
 +        {
 +          oprot.writeI32(struct.pulseIds.size());
-           for (String _iter678 : struct.pulseIds)
++          for (String _iter710 : struct.pulseIds)
 +          {
-             oprot.writeString(_iter678);
++            oprot.writeString(_iter710);
 +          }
 +        }
 +      }
 +    }
 +
 +    @Override
 +    public void read(org.apache.thrift.protocol.TProtocol prot, HBNodes struct) throws org.apache.thrift.TException {
 +      TTupleProtocol iprot = (TTupleProtocol) prot;
 +      BitSet incoming = iprot.readBitSet(1);
 +      if (incoming.get(0)) {
 +        {
-           org.apache.thrift.protocol.TList _list679 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
-           struct.pulseIds = new ArrayList<String>(_list679.size);
-           String _elem680;
-           for (int _i681 = 0; _i681 < _list679.size; ++_i681)
++          org.apache.thrift.protocol.TList _list711 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
++          struct.pulseIds = new ArrayList<String>(_list711.size);
++          String _elem712;
++          for (int _i713 = 0; _i713 < _list711.size; ++_i713)
 +          {
-             _elem680 = iprot.readString();
-             struct.pulseIds.add(_elem680);
++            _elem712 = iprot.readString();
++            struct.pulseIds.add(_elem712);
 +          }
 +        }
 +        struct.set_pulseIds_isSet(true);
 +      }
 +    }
 +  }
 +
 +}
 +

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/jvm/backtype/storm/generated/HBPulse.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/generated/HBPulse.java
index e9afbfc,0000000..aa244f5
mode 100644,000000..100644
--- a/storm-core/src/jvm/backtype/storm/generated/HBPulse.java
+++ b/storm-core/src/jvm/backtype/storm/generated/HBPulse.java
@@@ -1,522 -1,0 +1,522 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +/**
-  * Autogenerated by Thrift Compiler (0.9.2)
++ * Autogenerated by Thrift Compiler (0.9.3)
 + *
 + * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 + *  @generated
 + */
 +package backtype.storm.generated;
 +
 +import org.apache.thrift.scheme.IScheme;
 +import org.apache.thrift.scheme.SchemeFactory;
 +import org.apache.thrift.scheme.StandardScheme;
 +
 +import org.apache.thrift.scheme.TupleScheme;
 +import org.apache.thrift.protocol.TTupleProtocol;
 +import org.apache.thrift.protocol.TProtocolException;
 +import org.apache.thrift.EncodingUtils;
 +import org.apache.thrift.TException;
 +import org.apache.thrift.async.AsyncMethodCallback;
 +import org.apache.thrift.server.AbstractNonblockingServer.*;
 +import java.util.List;
 +import java.util.ArrayList;
 +import java.util.Map;
 +import java.util.HashMap;
 +import java.util.EnumMap;
 +import java.util.Set;
 +import java.util.HashSet;
 +import java.util.EnumSet;
 +import java.util.Collections;
 +import java.util.BitSet;
 +import java.nio.ByteBuffer;
 +import java.util.Arrays;
 +import javax.annotation.Generated;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
- @Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 +public class HBPulse implements org.apache.thrift.TBase<HBPulse, HBPulse._Fields>, java.io.Serializable, Cloneable, Comparable<HBPulse> {
 +  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBPulse");
 +
 +  private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.STRING, (short)1);
 +  private static final org.apache.thrift.protocol.TField DETAILS_FIELD_DESC = new org.apache.thrift.protocol.TField("details", org.apache.thrift.protocol.TType.STRING, (short)2);
 +
 +  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
 +  static {
 +    schemes.put(StandardScheme.class, new HBPulseStandardSchemeFactory());
 +    schemes.put(TupleScheme.class, new HBPulseTupleSchemeFactory());
 +  }
 +
 +  private String id; // required
 +  private ByteBuffer details; // required
 +
 +  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
 +  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
 +    ID((short)1, "id"),
 +    DETAILS((short)2, "details");
 +
 +    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 +
 +    static {
 +      for (_Fields field : EnumSet.allOf(_Fields.class)) {
 +        byName.put(field.getFieldName(), field);
 +      }
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches fieldId, or null if its not found.
 +     */
 +    public static _Fields findByThriftId(int fieldId) {
 +      switch(fieldId) {
 +        case 1: // ID
 +          return ID;
 +        case 2: // DETAILS
 +          return DETAILS;
 +        default:
 +          return null;
 +      }
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches fieldId, throwing an exception
 +     * if it is not found.
 +     */
 +    public static _Fields findByThriftIdOrThrow(int fieldId) {
 +      _Fields fields = findByThriftId(fieldId);
 +      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
 +      return fields;
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches name, or null if its not found.
 +     */
 +    public static _Fields findByName(String name) {
 +      return byName.get(name);
 +    }
 +
 +    private final short _thriftId;
 +    private final String _fieldName;
 +
 +    _Fields(short thriftId, String fieldName) {
 +      _thriftId = thriftId;
 +      _fieldName = fieldName;
 +    }
 +
 +    public short getThriftFieldId() {
 +      return _thriftId;
 +    }
 +
 +    public String getFieldName() {
 +      return _fieldName;
 +    }
 +  }
 +
 +  // isset id assignments
 +  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
 +  static {
 +    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
 +    tmpMap.put(_Fields.ID, new org.apache.thrift.meta_data.FieldMetaData("id", org.apache.thrift.TFieldRequirementType.REQUIRED, 
 +        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
 +    tmpMap.put(_Fields.DETAILS, new org.apache.thrift.meta_data.FieldMetaData("details", org.apache.thrift.TFieldRequirementType.DEFAULT, 
 +        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING        , true)));
 +    metaDataMap = Collections.unmodifiableMap(tmpMap);
 +    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HBPulse.class, metaDataMap);
 +  }
 +
 +  public HBPulse() {
 +  }
 +
 +  public HBPulse(
 +    String id,
 +    ByteBuffer details)
 +  {
 +    this();
 +    this.id = id;
 +    this.details = org.apache.thrift.TBaseHelper.copyBinary(details);
 +  }
 +
 +  /**
 +   * Performs a deep copy on <i>other</i>.
 +   */
 +  public HBPulse(HBPulse other) {
 +    if (other.is_set_id()) {
 +      this.id = other.id;
 +    }
 +    if (other.is_set_details()) {
 +      this.details = org.apache.thrift.TBaseHelper.copyBinary(other.details);
 +    }
 +  }
 +
 +  public HBPulse deepCopy() {
 +    return new HBPulse(this);
 +  }
 +
 +  @Override
 +  public void clear() {
 +    this.id = null;
 +    this.details = null;
 +  }
 +
 +  public String get_id() {
 +    return this.id;
 +  }
 +
 +  public void set_id(String id) {
 +    this.id = id;
 +  }
 +
 +  public void unset_id() {
 +    this.id = null;
 +  }
 +
 +  /** Returns true if field id is set (has been assigned a value) and false otherwise */
 +  public boolean is_set_id() {
 +    return this.id != null;
 +  }
 +
 +  public void set_id_isSet(boolean value) {
 +    if (!value) {
 +      this.id = null;
 +    }
 +  }
 +
 +  public byte[] get_details() {
 +    set_details(org.apache.thrift.TBaseHelper.rightSize(details));
 +    return details == null ? null : details.array();
 +  }
 +
 +  public ByteBuffer buffer_for_details() {
 +    return org.apache.thrift.TBaseHelper.copyBinary(details);
 +  }
 +
 +  public void set_details(byte[] details) {
 +    this.details = details == null ? (ByteBuffer)null : ByteBuffer.wrap(Arrays.copyOf(details, details.length));
 +  }
 +
 +  public void set_details(ByteBuffer details) {
 +    this.details = org.apache.thrift.TBaseHelper.copyBinary(details);
 +  }
 +
 +  public void unset_details() {
 +    this.details = null;
 +  }
 +
 +  /** Returns true if field details is set (has been assigned a value) and false otherwise */
 +  public boolean is_set_details() {
 +    return this.details != null;
 +  }
 +
 +  public void set_details_isSet(boolean value) {
 +    if (!value) {
 +      this.details = null;
 +    }
 +  }
 +
 +  public void setFieldValue(_Fields field, Object value) {
 +    switch (field) {
 +    case ID:
 +      if (value == null) {
 +        unset_id();
 +      } else {
 +        set_id((String)value);
 +      }
 +      break;
 +
 +    case DETAILS:
 +      if (value == null) {
 +        unset_details();
 +      } else {
 +        set_details((ByteBuffer)value);
 +      }
 +      break;
 +
 +    }
 +  }
 +
 +  public Object getFieldValue(_Fields field) {
 +    switch (field) {
 +    case ID:
 +      return get_id();
 +
 +    case DETAILS:
 +      return get_details();
 +
 +    }
 +    throw new IllegalStateException();
 +  }
 +
 +  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
 +  public boolean isSet(_Fields field) {
 +    if (field == null) {
 +      throw new IllegalArgumentException();
 +    }
 +
 +    switch (field) {
 +    case ID:
 +      return is_set_id();
 +    case DETAILS:
 +      return is_set_details();
 +    }
 +    throw new IllegalStateException();
 +  }
 +
 +  @Override
 +  public boolean equals(Object that) {
 +    if (that == null)
 +      return false;
 +    if (that instanceof HBPulse)
 +      return this.equals((HBPulse)that);
 +    return false;
 +  }
 +
 +  public boolean equals(HBPulse that) {
 +    if (that == null)
 +      return false;
 +
 +    boolean this_present_id = true && this.is_set_id();
 +    boolean that_present_id = true && that.is_set_id();
 +    if (this_present_id || that_present_id) {
 +      if (!(this_present_id && that_present_id))
 +        return false;
 +      if (!this.id.equals(that.id))
 +        return false;
 +    }
 +
 +    boolean this_present_details = true && this.is_set_details();
 +    boolean that_present_details = true && that.is_set_details();
 +    if (this_present_details || that_present_details) {
 +      if (!(this_present_details && that_present_details))
 +        return false;
 +      if (!this.details.equals(that.details))
 +        return false;
 +    }
 +
 +    return true;
 +  }
 +
 +  @Override
 +  public int hashCode() {
 +    List<Object> list = new ArrayList<Object>();
 +
 +    boolean present_id = true && (is_set_id());
 +    list.add(present_id);
 +    if (present_id)
 +      list.add(id);
 +
 +    boolean present_details = true && (is_set_details());
 +    list.add(present_details);
 +    if (present_details)
 +      list.add(details);
 +
 +    return list.hashCode();
 +  }
 +
 +  @Override
 +  public int compareTo(HBPulse other) {
 +    if (!getClass().equals(other.getClass())) {
 +      return getClass().getName().compareTo(other.getClass().getName());
 +    }
 +
 +    int lastComparison = 0;
 +
 +    lastComparison = Boolean.valueOf(is_set_id()).compareTo(other.is_set_id());
 +    if (lastComparison != 0) {
 +      return lastComparison;
 +    }
 +    if (is_set_id()) {
 +      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, other.id);
 +      if (lastComparison != 0) {
 +        return lastComparison;
 +      }
 +    }
 +    lastComparison = Boolean.valueOf(is_set_details()).compareTo(other.is_set_details());
 +    if (lastComparison != 0) {
 +      return lastComparison;
 +    }
 +    if (is_set_details()) {
 +      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.details, other.details);
 +      if (lastComparison != 0) {
 +        return lastComparison;
 +      }
 +    }
 +    return 0;
 +  }
 +
 +  public _Fields fieldForId(int fieldId) {
 +    return _Fields.findByThriftId(fieldId);
 +  }
 +
 +  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
 +    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
 +  }
 +
 +  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
 +    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
 +  }
 +
 +  @Override
 +  public String toString() {
 +    StringBuilder sb = new StringBuilder("HBPulse(");
 +    boolean first = true;
 +
 +    sb.append("id:");
 +    if (this.id == null) {
 +      sb.append("null");
 +    } else {
 +      sb.append(this.id);
 +    }
 +    first = false;
 +    if (!first) sb.append(", ");
 +    sb.append("details:");
 +    if (this.details == null) {
 +      sb.append("null");
 +    } else {
 +      org.apache.thrift.TBaseHelper.toString(this.details, sb);
 +    }
 +    first = false;
 +    sb.append(")");
 +    return sb.toString();
 +  }
 +
 +  public void validate() throws org.apache.thrift.TException {
 +    // check for required fields
 +    if (!is_set_id()) {
 +      throw new org.apache.thrift.protocol.TProtocolException("Required field 'id' is unset! Struct:" + toString());
 +    }
 +
 +    // check for sub-struct validity
 +  }
 +
 +  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
 +    try {
 +      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
 +    } catch (org.apache.thrift.TException te) {
 +      throw new java.io.IOException(te);
 +    }
 +  }
 +
 +  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
 +    try {
 +      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
 +    } catch (org.apache.thrift.TException te) {
 +      throw new java.io.IOException(te);
 +    }
 +  }
 +
 +  private static class HBPulseStandardSchemeFactory implements SchemeFactory {
 +    public HBPulseStandardScheme getScheme() {
 +      return new HBPulseStandardScheme();
 +    }
 +  }
 +
 +  private static class HBPulseStandardScheme extends StandardScheme<HBPulse> {
 +
 +    public void read(org.apache.thrift.protocol.TProtocol iprot, HBPulse struct) throws org.apache.thrift.TException {
 +      org.apache.thrift.protocol.TField schemeField;
 +      iprot.readStructBegin();
 +      while (true)
 +      {
 +        schemeField = iprot.readFieldBegin();
 +        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
 +          break;
 +        }
 +        switch (schemeField.id) {
 +          case 1: // ID
 +            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
 +              struct.id = iprot.readString();
 +              struct.set_id_isSet(true);
 +            } else { 
 +              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +            }
 +            break;
 +          case 2: // DETAILS
 +            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
 +              struct.details = iprot.readBinary();
 +              struct.set_details_isSet(true);
 +            } else { 
 +              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +            }
 +            break;
 +          default:
 +            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +        }
 +        iprot.readFieldEnd();
 +      }
 +      iprot.readStructEnd();
 +      struct.validate();
 +    }
 +
 +    public void write(org.apache.thrift.protocol.TProtocol oprot, HBPulse struct) throws org.apache.thrift.TException {
 +      struct.validate();
 +
 +      oprot.writeStructBegin(STRUCT_DESC);
 +      if (struct.id != null) {
 +        oprot.writeFieldBegin(ID_FIELD_DESC);
 +        oprot.writeString(struct.id);
 +        oprot.writeFieldEnd();
 +      }
 +      if (struct.details != null) {
 +        oprot.writeFieldBegin(DETAILS_FIELD_DESC);
 +        oprot.writeBinary(struct.details);
 +        oprot.writeFieldEnd();
 +      }
 +      oprot.writeFieldStop();
 +      oprot.writeStructEnd();
 +    }
 +
 +  }
 +
 +  private static class HBPulseTupleSchemeFactory implements SchemeFactory {
 +    public HBPulseTupleScheme getScheme() {
 +      return new HBPulseTupleScheme();
 +    }
 +  }
 +
 +  private static class HBPulseTupleScheme extends TupleScheme<HBPulse> {
 +
 +    @Override
 +    public void write(org.apache.thrift.protocol.TProtocol prot, HBPulse struct) throws org.apache.thrift.TException {
 +      TTupleProtocol oprot = (TTupleProtocol) prot;
 +      oprot.writeString(struct.id);
 +      BitSet optionals = new BitSet();
 +      if (struct.is_set_details()) {
 +        optionals.set(0);
 +      }
 +      oprot.writeBitSet(optionals, 1);
 +      if (struct.is_set_details()) {
 +        oprot.writeBinary(struct.details);
 +      }
 +    }
 +
 +    @Override
 +    public void read(org.apache.thrift.protocol.TProtocol prot, HBPulse struct) throws org.apache.thrift.TException {
 +      TTupleProtocol iprot = (TTupleProtocol) prot;
 +      struct.id = iprot.readString();
 +      struct.set_id_isSet(true);
 +      BitSet incoming = iprot.readBitSet(1);
 +      if (incoming.get(0)) {
 +        struct.details = iprot.readBinary();
 +        struct.set_details_isSet(true);
 +      }
 +    }
 +  }
 +
 +}
 +

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/jvm/backtype/storm/generated/HBRecords.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/generated/HBRecords.java
index 90ee8bd,0000000..4767068
mode 100644,000000..100644
--- a/storm-core/src/jvm/backtype/storm/generated/HBRecords.java
+++ b/storm-core/src/jvm/backtype/storm/generated/HBRecords.java
@@@ -1,466 -1,0 +1,466 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +/**
-  * Autogenerated by Thrift Compiler (0.9.2)
++ * Autogenerated by Thrift Compiler (0.9.3)
 + *
 + * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 + *  @generated
 + */
 +package backtype.storm.generated;
 +
 +import org.apache.thrift.scheme.IScheme;
 +import org.apache.thrift.scheme.SchemeFactory;
 +import org.apache.thrift.scheme.StandardScheme;
 +
 +import org.apache.thrift.scheme.TupleScheme;
 +import org.apache.thrift.protocol.TTupleProtocol;
 +import org.apache.thrift.protocol.TProtocolException;
 +import org.apache.thrift.EncodingUtils;
 +import org.apache.thrift.TException;
 +import org.apache.thrift.async.AsyncMethodCallback;
 +import org.apache.thrift.server.AbstractNonblockingServer.*;
 +import java.util.List;
 +import java.util.ArrayList;
 +import java.util.Map;
 +import java.util.HashMap;
 +import java.util.EnumMap;
 +import java.util.Set;
 +import java.util.HashSet;
 +import java.util.EnumSet;
 +import java.util.Collections;
 +import java.util.BitSet;
 +import java.nio.ByteBuffer;
 +import java.util.Arrays;
 +import javax.annotation.Generated;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
- @Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 +public class HBRecords implements org.apache.thrift.TBase<HBRecords, HBRecords._Fields>, java.io.Serializable, Cloneable, Comparable<HBRecords> {
 +  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBRecords");
 +
 +  private static final org.apache.thrift.protocol.TField PULSES_FIELD_DESC = new org.apache.thrift.protocol.TField("pulses", org.apache.thrift.protocol.TType.LIST, (short)1);
 +
 +  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
 +  static {
 +    schemes.put(StandardScheme.class, new HBRecordsStandardSchemeFactory());
 +    schemes.put(TupleScheme.class, new HBRecordsTupleSchemeFactory());
 +  }
 +
 +  private List<HBPulse> pulses; // required
 +
 +  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
 +  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
 +    PULSES((short)1, "pulses");
 +
 +    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 +
 +    static {
 +      for (_Fields field : EnumSet.allOf(_Fields.class)) {
 +        byName.put(field.getFieldName(), field);
 +      }
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches fieldId, or null if its not found.
 +     */
 +    public static _Fields findByThriftId(int fieldId) {
 +      switch(fieldId) {
 +        case 1: // PULSES
 +          return PULSES;
 +        default:
 +          return null;
 +      }
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches fieldId, throwing an exception
 +     * if it is not found.
 +     */
 +    public static _Fields findByThriftIdOrThrow(int fieldId) {
 +      _Fields fields = findByThriftId(fieldId);
 +      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
 +      return fields;
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches name, or null if its not found.
 +     */
 +    public static _Fields findByName(String name) {
 +      return byName.get(name);
 +    }
 +
 +    private final short _thriftId;
 +    private final String _fieldName;
 +
 +    _Fields(short thriftId, String fieldName) {
 +      _thriftId = thriftId;
 +      _fieldName = fieldName;
 +    }
 +
 +    public short getThriftFieldId() {
 +      return _thriftId;
 +    }
 +
 +    public String getFieldName() {
 +      return _fieldName;
 +    }
 +  }
 +
 +  // isset id assignments
 +  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
 +  static {
 +    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
 +    tmpMap.put(_Fields.PULSES, new org.apache.thrift.meta_data.FieldMetaData("pulses", org.apache.thrift.TFieldRequirementType.DEFAULT, 
 +        new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
 +            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, HBPulse.class))));
 +    metaDataMap = Collections.unmodifiableMap(tmpMap);
 +    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HBRecords.class, metaDataMap);
 +  }
 +
 +  public HBRecords() {
 +  }
 +
 +  public HBRecords(
 +    List<HBPulse> pulses)
 +  {
 +    this();
 +    this.pulses = pulses;
 +  }
 +
 +  /**
 +   * Performs a deep copy on <i>other</i>.
 +   */
 +  public HBRecords(HBRecords other) {
 +    if (other.is_set_pulses()) {
 +      List<HBPulse> __this__pulses = new ArrayList<HBPulse>(other.pulses.size());
 +      for (HBPulse other_element : other.pulses) {
 +        __this__pulses.add(new HBPulse(other_element));
 +      }
 +      this.pulses = __this__pulses;
 +    }
 +  }
 +
 +  public HBRecords deepCopy() {
 +    return new HBRecords(this);
 +  }
 +
 +  @Override
 +  public void clear() {
 +    this.pulses = null;
 +  }
 +
 +  public int get_pulses_size() {
 +    return (this.pulses == null) ? 0 : this.pulses.size();
 +  }
 +
 +  public java.util.Iterator<HBPulse> get_pulses_iterator() {
 +    return (this.pulses == null) ? null : this.pulses.iterator();
 +  }
 +
 +  public void add_to_pulses(HBPulse elem) {
 +    if (this.pulses == null) {
 +      this.pulses = new ArrayList<HBPulse>();
 +    }
 +    this.pulses.add(elem);
 +  }
 +
 +  public List<HBPulse> get_pulses() {
 +    return this.pulses;
 +  }
 +
 +  public void set_pulses(List<HBPulse> pulses) {
 +    this.pulses = pulses;
 +  }
 +
 +  public void unset_pulses() {
 +    this.pulses = null;
 +  }
 +
 +  /** Returns true if field pulses is set (has been assigned a value) and false otherwise */
 +  public boolean is_set_pulses() {
 +    return this.pulses != null;
 +  }
 +
 +  public void set_pulses_isSet(boolean value) {
 +    if (!value) {
 +      this.pulses = null;
 +    }
 +  }
 +
 +  public void setFieldValue(_Fields field, Object value) {
 +    switch (field) {
 +    case PULSES:
 +      if (value == null) {
 +        unset_pulses();
 +      } else {
 +        set_pulses((List<HBPulse>)value);
 +      }
 +      break;
 +
 +    }
 +  }
 +
 +  public Object getFieldValue(_Fields field) {
 +    switch (field) {
 +    case PULSES:
 +      return get_pulses();
 +
 +    }
 +    throw new IllegalStateException();
 +  }
 +
 +  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
 +  public boolean isSet(_Fields field) {
 +    if (field == null) {
 +      throw new IllegalArgumentException();
 +    }
 +
 +    switch (field) {
 +    case PULSES:
 +      return is_set_pulses();
 +    }
 +    throw new IllegalStateException();
 +  }
 +
 +  @Override
 +  public boolean equals(Object that) {
 +    if (that == null)
 +      return false;
 +    if (that instanceof HBRecords)
 +      return this.equals((HBRecords)that);
 +    return false;
 +  }
 +
 +  public boolean equals(HBRecords that) {
 +    if (that == null)
 +      return false;
 +
 +    boolean this_present_pulses = true && this.is_set_pulses();
 +    boolean that_present_pulses = true && that.is_set_pulses();
 +    if (this_present_pulses || that_present_pulses) {
 +      if (!(this_present_pulses && that_present_pulses))
 +        return false;
 +      if (!this.pulses.equals(that.pulses))
 +        return false;
 +    }
 +
 +    return true;
 +  }
 +
 +  @Override
 +  public int hashCode() {
 +    List<Object> list = new ArrayList<Object>();
 +
 +    boolean present_pulses = true && (is_set_pulses());
 +    list.add(present_pulses);
 +    if (present_pulses)
 +      list.add(pulses);
 +
 +    return list.hashCode();
 +  }
 +
 +  @Override
 +  public int compareTo(HBRecords other) {
 +    if (!getClass().equals(other.getClass())) {
 +      return getClass().getName().compareTo(other.getClass().getName());
 +    }
 +
 +    int lastComparison = 0;
 +
 +    lastComparison = Boolean.valueOf(is_set_pulses()).compareTo(other.is_set_pulses());
 +    if (lastComparison != 0) {
 +      return lastComparison;
 +    }
 +    if (is_set_pulses()) {
 +      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.pulses, other.pulses);
 +      if (lastComparison != 0) {
 +        return lastComparison;
 +      }
 +    }
 +    return 0;
 +  }
 +
 +  public _Fields fieldForId(int fieldId) {
 +    return _Fields.findByThriftId(fieldId);
 +  }
 +
 +  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
 +    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
 +  }
 +
 +  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
 +    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
 +  }
 +
 +  @Override
 +  public String toString() {
 +    StringBuilder sb = new StringBuilder("HBRecords(");
 +    boolean first = true;
 +
 +    sb.append("pulses:");
 +    if (this.pulses == null) {
 +      sb.append("null");
 +    } else {
 +      sb.append(this.pulses);
 +    }
 +    first = false;
 +    sb.append(")");
 +    return sb.toString();
 +  }
 +
 +  public void validate() throws org.apache.thrift.TException {
 +    // check for required fields
 +    // check for sub-struct validity
 +  }
 +
 +  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
 +    try {
 +      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
 +    } catch (org.apache.thrift.TException te) {
 +      throw new java.io.IOException(te);
 +    }
 +  }
 +
 +  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
 +    try {
 +      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
 +    } catch (org.apache.thrift.TException te) {
 +      throw new java.io.IOException(te);
 +    }
 +  }
 +
 +  private static class HBRecordsStandardSchemeFactory implements SchemeFactory {
 +    public HBRecordsStandardScheme getScheme() {
 +      return new HBRecordsStandardScheme();
 +    }
 +  }
 +
 +  private static class HBRecordsStandardScheme extends StandardScheme<HBRecords> {
 +
 +    public void read(org.apache.thrift.protocol.TProtocol iprot, HBRecords struct) throws org.apache.thrift.TException {
 +      org.apache.thrift.protocol.TField schemeField;
 +      iprot.readStructBegin();
 +      while (true)
 +      {
 +        schemeField = iprot.readFieldBegin();
 +        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
 +          break;
 +        }
 +        switch (schemeField.id) {
 +          case 1: // PULSES
 +            if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
 +              {
-                 org.apache.thrift.protocol.TList _list666 = iprot.readListBegin();
-                 struct.pulses = new ArrayList<HBPulse>(_list666.size);
-                 HBPulse _elem667;
-                 for (int _i668 = 0; _i668 < _list666.size; ++_i668)
++                org.apache.thrift.protocol.TList _list698 = iprot.readListBegin();
++                struct.pulses = new ArrayList<HBPulse>(_list698.size);
++                HBPulse _elem699;
++                for (int _i700 = 0; _i700 < _list698.size; ++_i700)
 +                {
-                   _elem667 = new HBPulse();
-                   _elem667.read(iprot);
-                   struct.pulses.add(_elem667);
++                  _elem699 = new HBPulse();
++                  _elem699.read(iprot);
++                  struct.pulses.add(_elem699);
 +                }
 +                iprot.readListEnd();
 +              }
 +              struct.set_pulses_isSet(true);
 +            } else { 
 +              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +            }
 +            break;
 +          default:
 +            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +        }
 +        iprot.readFieldEnd();
 +      }
 +      iprot.readStructEnd();
 +      struct.validate();
 +    }
 +
 +    public void write(org.apache.thrift.protocol.TProtocol oprot, HBRecords struct) throws org.apache.thrift.TException {
 +      struct.validate();
 +
 +      oprot.writeStructBegin(STRUCT_DESC);
 +      if (struct.pulses != null) {
 +        oprot.writeFieldBegin(PULSES_FIELD_DESC);
 +        {
 +          oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.pulses.size()));
-           for (HBPulse _iter669 : struct.pulses)
++          for (HBPulse _iter701 : struct.pulses)
 +          {
-             _iter669.write(oprot);
++            _iter701.write(oprot);
 +          }
 +          oprot.writeListEnd();
 +        }
 +        oprot.writeFieldEnd();
 +      }
 +      oprot.writeFieldStop();
 +      oprot.writeStructEnd();
 +    }
 +
 +  }
 +
 +  private static class HBRecordsTupleSchemeFactory implements SchemeFactory {
 +    public HBRecordsTupleScheme getScheme() {
 +      return new HBRecordsTupleScheme();
 +    }
 +  }
 +
 +  private static class HBRecordsTupleScheme extends TupleScheme<HBRecords> {
 +
 +    @Override
 +    public void write(org.apache.thrift.protocol.TProtocol prot, HBRecords struct) throws org.apache.thrift.TException {
 +      TTupleProtocol oprot = (TTupleProtocol) prot;
 +      BitSet optionals = new BitSet();
 +      if (struct.is_set_pulses()) {
 +        optionals.set(0);
 +      }
 +      oprot.writeBitSet(optionals, 1);
 +      if (struct.is_set_pulses()) {
 +        {
 +          oprot.writeI32(struct.pulses.size());
-           for (HBPulse _iter670 : struct.pulses)
++          for (HBPulse _iter702 : struct.pulses)
 +          {
-             _iter670.write(oprot);
++            _iter702.write(oprot);
 +          }
 +        }
 +      }
 +    }
 +
 +    @Override
 +    public void read(org.apache.thrift.protocol.TProtocol prot, HBRecords struct) throws org.apache.thrift.TException {
 +      TTupleProtocol iprot = (TTupleProtocol) prot;
 +      BitSet incoming = iprot.readBitSet(1);
 +      if (incoming.get(0)) {
 +        {
-           org.apache.thrift.protocol.TList _list671 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-           struct.pulses = new ArrayList<HBPulse>(_list671.size);
-           HBPulse _elem672;
-           for (int _i673 = 0; _i673 < _list671.size; ++_i673)
++          org.apache.thrift.protocol.TList _list703 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
++          struct.pulses = new ArrayList<HBPulse>(_list703.size);
++          HBPulse _elem704;
++          for (int _i705 = 0; _i705 < _list703.size; ++_i705)
 +          {
-             _elem672 = new HBPulse();
-             _elem672.read(iprot);
-             struct.pulses.add(_elem672);
++            _elem704 = new HBPulse();
++            _elem704.read(iprot);
++            struct.pulses.add(_elem704);
 +          }
 +        }
 +        struct.set_pulses_isSet(true);
 +      }
 +    }
 +  }
 +
 +}
 +

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/jvm/backtype/storm/generated/HBServerMessageType.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/generated/HBServerMessageType.java
index 6f940c2,0000000..3cafc14
mode 100644,000000..100644
--- a/storm-core/src/jvm/backtype/storm/generated/HBServerMessageType.java
+++ b/storm-core/src/jvm/backtype/storm/generated/HBServerMessageType.java
@@@ -1,113 -1,0 +1,113 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +/**
-  * Autogenerated by Thrift Compiler (0.9.2)
++ * Autogenerated by Thrift Compiler (0.9.3)
 + *
 + * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 + *  @generated
 + */
 +package backtype.storm.generated;
 +
 +
 +import java.util.Map;
 +import java.util.HashMap;
 +import org.apache.thrift.TEnum;
 +
 +public enum HBServerMessageType implements org.apache.thrift.TEnum {
 +  CREATE_PATH(0),
 +  CREATE_PATH_RESPONSE(1),
 +  EXISTS(2),
 +  EXISTS_RESPONSE(3),
 +  SEND_PULSE(4),
 +  SEND_PULSE_RESPONSE(5),
 +  GET_ALL_PULSE_FOR_PATH(6),
 +  GET_ALL_PULSE_FOR_PATH_RESPONSE(7),
 +  GET_ALL_NODES_FOR_PATH(8),
 +  GET_ALL_NODES_FOR_PATH_RESPONSE(9),
 +  GET_PULSE(10),
 +  GET_PULSE_RESPONSE(11),
 +  DELETE_PATH(12),
 +  DELETE_PATH_RESPONSE(13),
 +  DELETE_PULSE_ID(14),
 +  DELETE_PULSE_ID_RESPONSE(15),
 +  CONTROL_MESSAGE(16),
 +  SASL_MESSAGE_TOKEN(17),
 +  NOT_AUTHORIZED(18);
 +
 +  private final int value;
 +
 +  private HBServerMessageType(int value) {
 +    this.value = value;
 +  }
 +
 +  /**
 +   * Get the integer value of this enum value, as defined in the Thrift IDL.
 +   */
 +  public int getValue() {
 +    return value;
 +  }
 +
 +  /**
 +   * Find a the enum type by its integer value, as defined in the Thrift IDL.
 +   * @return null if the value is not found.
 +   */
 +  public static HBServerMessageType findByValue(int value) { 
 +    switch (value) {
 +      case 0:
 +        return CREATE_PATH;
 +      case 1:
 +        return CREATE_PATH_RESPONSE;
 +      case 2:
 +        return EXISTS;
 +      case 3:
 +        return EXISTS_RESPONSE;
 +      case 4:
 +        return SEND_PULSE;
 +      case 5:
 +        return SEND_PULSE_RESPONSE;
 +      case 6:
 +        return GET_ALL_PULSE_FOR_PATH;
 +      case 7:
 +        return GET_ALL_PULSE_FOR_PATH_RESPONSE;
 +      case 8:
 +        return GET_ALL_NODES_FOR_PATH;
 +      case 9:
 +        return GET_ALL_NODES_FOR_PATH_RESPONSE;
 +      case 10:
 +        return GET_PULSE;
 +      case 11:
 +        return GET_PULSE_RESPONSE;
 +      case 12:
 +        return DELETE_PATH;
 +      case 13:
 +        return DELETE_PATH_RESPONSE;
 +      case 14:
 +        return DELETE_PULSE_ID;
 +      case 15:
 +        return DELETE_PULSE_ID_RESPONSE;
 +      case 16:
 +        return CONTROL_MESSAGE;
 +      case 17:
 +        return SASL_MESSAGE_TOKEN;
 +      case 18:
 +        return NOT_AUTHORIZED;
 +      default:
 +        return null;
 +    }
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/generated/Nimbus.java
index b0d007b,f8a94ab..360dba5
--- a/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
+++ b/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
@@@ -15611,14 -15611,14 +15611,14 @@@ public class Nimbus 
              case 0: // SUCCESS
                if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
                  {
--                  org.apache.thrift.protocol.TList _list698 = iprot.readListBegin();
--                  struct.success = new ArrayList<ProfileRequest>(_list698.size);
--                  ProfileRequest _elem699;
--                  for (int _i700 = 0; _i700 < _list698.size; ++_i700)
++                  org.apache.thrift.protocol.TList _list714 = iprot.readListBegin();
++                  struct.success = new ArrayList<ProfileRequest>(_list714.size);
++                  ProfileRequest _elem715;
++                  for (int _i716 = 0; _i716 < _list714.size; ++_i716)
                    {
--                    _elem699 = new ProfileRequest();
--                    _elem699.read(iprot);
--                    struct.success.add(_elem699);
++                    _elem715 = new ProfileRequest();
++                    _elem715.read(iprot);
++                    struct.success.add(_elem715);
                    }
                    iprot.readListEnd();
                  }
@@@ -15644,9 -15644,9 +15644,9 @@@
            oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
            {
              oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.success.size()));
--            for (ProfileRequest _iter701 : struct.success)
++            for (ProfileRequest _iter717 : struct.success)
              {
--              _iter701.write(oprot);
++              _iter717.write(oprot);
              }
              oprot.writeListEnd();
            }
@@@ -15677,9 -15677,9 +15677,9 @@@
          if (struct.is_set_success()) {
            {
              oprot.writeI32(struct.success.size());
--            for (ProfileRequest _iter702 : struct.success)
++            for (ProfileRequest _iter718 : struct.success)
              {
--              _iter702.write(oprot);
++              _iter718.write(oprot);
              }
            }
          }
@@@ -15691,14 -15691,14 +15691,14 @@@
          BitSet incoming = iprot.readBitSet(1);
          if (incoming.get(0)) {
            {
--            org.apache.thrift.protocol.TList _list703 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
--            struct.success = new ArrayList<ProfileRequest>(_list703.size);
--            ProfileRequest _elem704;
--            for (int _i705 = 0; _i705 < _list703.size; ++_i705)
++            org.apache.thrift.protocol.TList _list719 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
++            struct.success = new ArrayList<ProfileRequest>(_list719.size);
++            ProfileRequest _elem720;
++            for (int _i721 = 0; _i721 < _list719.size; ++_i721)
              {
--              _elem704 = new ProfileRequest();
--              _elem704.read(iprot);
--              struct.success.add(_elem704);
++              _elem720 = new ProfileRequest();
++              _elem720.read(iprot);
++              struct.success.add(_elem720);
              }
            }
            struct.set_success_isSet(true);

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/jvm/backtype/storm/messaging/netty/Client.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
index 5f23064,bca3936..684e9ea
--- a/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
+++ b/storm-core/src/jvm/backtype/storm/messaging/netty/Server.java
@@@ -47,8 -32,25 +47,14 @@@ import java.util.concurrent.LinkedBlock
  import java.util.concurrent.ThreadFactory;
  import java.io.IOException;
  
 -import org.jboss.netty.bootstrap.ServerBootstrap;
 -import org.jboss.netty.channel.Channel;
 -import org.jboss.netty.channel.ChannelFactory;
 -import org.jboss.netty.channel.group.ChannelGroup;
 -import org.jboss.netty.channel.group.DefaultChannelGroup;
 -import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
 -
 -import org.slf4j.Logger;
 -import org.slf4j.LoggerFactory;
  
 -import backtype.storm.Config;
 -import backtype.storm.grouping.Load;
  import backtype.storm.messaging.ConnectionWithStatus;
+ import backtype.storm.messaging.IConnection;
+ import backtype.storm.messaging.IConnectionCallback;
+ import backtype.storm.messaging.TaskMessage;
+ import backtype.storm.metric.api.IStatefulObject;
+ import backtype.storm.serialization.KryoValuesSerializer;
+ import backtype.storm.utils.Utils;
  
  class Server extends ConnectionWithStatus implements IStatefulObject, ISaslServer {
  
@@@ -124,48 -107,7 +111,7 @@@
          Channel channel = bootstrap.bind(new InetSocketAddress(port));
          allChannels.add(channel);
      }
- 
-     private ArrayList<TaskMessage>[] groupMessages(List<TaskMessage> msgs) {
-         ArrayList<TaskMessage> messageGroups[] = new ArrayList[queueCount];
- 
-         for (TaskMessage message : msgs) {
-             int task = message.task();
- 
-             if (task == -1) {
-                 closing = true;
-                 return null;
-             }
- 
-             Integer queueId = getMessageQueueId(task);
- 
-             if (null == messageGroups[queueId]) {
-                 messageGroups[queueId] = new ArrayList<>();
-             }
-             messageGroups[queueId].add(message);
-         }
-         return messageGroups;
-     }
- 
-     private Integer getMessageQueueId(int task) {
-         // try to construct the map from taskId -> queueId in round robin manner.
-         Integer queueId = taskToQueueId.get(task);
-         if (null == queueId) {
-             synchronized (this) {
-                 queueId = taskToQueueId.get(task);
-                 if (queueId == null) {
-                     queueId = roundRobinQueueId++;
-                     if (roundRobinQueueId == queueCount) {
-                         roundRobinQueueId = 0;
-                     }
-                     HashMap<Integer, Integer> newRef = new HashMap<>(taskToQueueId);
-                     newRef.put(task, queueId);
-                     taskToQueueId = newRef;
-                 }
-             }
-         }
-         return queueId;
-     }
- 
 - 
++    
      private void addReceiveCount(String from, int amount) {
          //This is possibly lossy in the case where a value is deleted
          // because it has received no messages over the metrics collection
@@@ -208,35 -140,11 +144,11 @@@
          }
      }
  
-     public Iterator<TaskMessage> recv(int flags, int receiverId)  {
-         if (closing) {
-             return closeMessage.iterator();
-         }
- 
-         ArrayList<TaskMessage> ret;
-         int queueId = receiverId % queueCount;
-         if ((flags & 0x01) == 0x01) {
-             //non-blocking
-             ret = message_queue[queueId].poll();
-         } else {
-             try {
-                 ArrayList<TaskMessage> request = message_queue[queueId].take();
-                 LOG.debug("request to be processed: {}", request);
-                 ret = request;
-             } catch (InterruptedException e) {
-                 LOG.info("exception within msg receiving", e);
-                 ret = null;
-             }
-         }
- 
-         if (null != ret) {
-             messagesDequeued.addAndGet(ret.size());
-             pendingMessages[queueId].addAndGet(0 - ret.size());
-             return ret.iterator();
-         }
-         return null;
+     @Override
+     public void registerRecv(IConnectionCallback cb) {
+         _cb = cb;
      }
 -   
 +
      /**
       * register a newly created channel
       * @param channel newly created channel

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/py/storm/Nimbus.py
----------------------------------------------------------------------
diff --cc storm-core/src/py/storm/Nimbus.py
index a2d4c0b,14e5f2e..c1e1b02
--- a/storm-core/src/py/storm/Nimbus.py
+++ b/storm-core/src/py/storm/Nimbus.py
@@@ -3566,11 -3811,11 +3811,11 @@@ class getComponentPendingProfileActions
        if fid == 0:
          if ftype == TType.LIST:
            self.success = []
--          (_etype627, _size624) = iprot.readListBegin()
--          for _i628 in xrange(_size624):
--            _elem629 = ProfileRequest()
--            _elem629.read(iprot)
--            self.success.append(_elem629)
++          (_etype641, _size638) = iprot.readListBegin()
++          for _i642 in xrange(_size638):
++            _elem643 = ProfileRequest()
++            _elem643.read(iprot)
++            self.success.append(_elem643)
            iprot.readListEnd()
          else:
            iprot.skip(ftype)
@@@ -3587,8 -3832,8 +3832,8 @@@
      if self.success is not None:
        oprot.writeFieldBegin('success', TType.LIST, 0)
        oprot.writeListBegin(TType.STRUCT, len(self.success))
--      for iter630 in self.success:
--        iter630.write(oprot)
++      for iter644 in self.success:
++        iter644.write(oprot)
        oprot.writeListEnd()
        oprot.writeFieldEnd()
      oprot.writeFieldStop()

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/py/storm/ttypes.py
----------------------------------------------------------------------
diff --cc storm-core/src/py/storm/ttypes.py
index c6b2816,23a82e1..4d8120c
--- a/storm-core/src/py/storm/ttypes.py
+++ b/storm-core/src/py/storm/ttypes.py
@@@ -9441,596 -9376,3 +9441,596 @@@ class DRPCExecutionException(TException
  
    def __ne__(self, other):
      return not (self == other)
 +
 +class HBMessageData:
 +  """
 +  Attributes:
 +   - path
 +   - pulse
 +   - boolval
 +   - records
 +   - nodes
 +   - message_blob
 +  """
 +
 +  thrift_spec = (
 +    None, # 0
 +    (1, TType.STRING, 'path', None, None, ), # 1
 +    (2, TType.STRUCT, 'pulse', (HBPulse, HBPulse.thrift_spec), None, ), # 2
 +    (3, TType.BOOL, 'boolval', None, None, ), # 3
 +    (4, TType.STRUCT, 'records', (HBRecords, HBRecords.thrift_spec), None, ), # 4
 +    (5, TType.STRUCT, 'nodes', (HBNodes, HBNodes.thrift_spec), None, ), # 5
 +    None, # 6
 +    (7, TType.STRING, 'message_blob', None, None, ), # 7
 +  )
 +
 +  def __init__(self, path=None, pulse=None, boolval=None, records=None, nodes=None, message_blob=None,):
 +    self.path = path
 +    self.pulse = pulse
 +    self.boolval = boolval
 +    self.records = records
 +    self.nodes = nodes
 +    self.message_blob = message_blob
 +
 +  def read(self, iprot):
 +    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
 +      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
 +      return
 +    iprot.readStructBegin()
 +    while True:
 +      (fname, ftype, fid) = iprot.readFieldBegin()
 +      if ftype == TType.STOP:
 +        break
 +      if fid == 1:
 +        if ftype == TType.STRING:
 +          self.path = iprot.readString().decode('utf-8')
 +        else:
 +          iprot.skip(ftype)
 +      elif fid == 2:
 +        if ftype == TType.STRUCT:
 +          self.pulse = HBPulse()
 +          self.pulse.read(iprot)
 +        else:
 +          iprot.skip(ftype)
 +      elif fid == 3:
 +        if ftype == TType.BOOL:
-           self.boolval = iprot.readBool();
++          self.boolval = iprot.readBool()
 +        else:
 +          iprot.skip(ftype)
 +      elif fid == 4:
 +        if ftype == TType.STRUCT:
 +          self.records = HBRecords()
 +          self.records.read(iprot)
 +        else:
 +          iprot.skip(ftype)
 +      elif fid == 5:
 +        if ftype == TType.STRUCT:
 +          self.nodes = HBNodes()
 +          self.nodes.read(iprot)
 +        else:
 +          iprot.skip(ftype)
 +      elif fid == 7:
 +        if ftype == TType.STRING:
-           self.message_blob = iprot.readString();
++          self.message_blob = iprot.readString()
 +        else:
 +          iprot.skip(ftype)
 +      else:
 +        iprot.skip(ftype)
 +      iprot.readFieldEnd()
 +    iprot.readStructEnd()
 +
 +  def write(self, oprot):
 +    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
 +      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
 +      return
 +    oprot.writeStructBegin('HBMessageData')
 +    if self.path is not None:
 +      oprot.writeFieldBegin('path', TType.STRING, 1)
 +      oprot.writeString(self.path.encode('utf-8'))
 +      oprot.writeFieldEnd()
 +    if self.pulse is not None:
 +      oprot.writeFieldBegin('pulse', TType.STRUCT, 2)
 +      self.pulse.write(oprot)
 +      oprot.writeFieldEnd()
 +    if self.boolval is not None:
 +      oprot.writeFieldBegin('boolval', TType.BOOL, 3)
 +      oprot.writeBool(self.boolval)
 +      oprot.writeFieldEnd()
 +    if self.records is not None:
 +      oprot.writeFieldBegin('records', TType.STRUCT, 4)
 +      self.records.write(oprot)
 +      oprot.writeFieldEnd()
 +    if self.nodes is not None:
 +      oprot.writeFieldBegin('nodes', TType.STRUCT, 5)
 +      self.nodes.write(oprot)
 +      oprot.writeFieldEnd()
 +    if self.message_blob is not None:
 +      oprot.writeFieldBegin('message_blob', TType.STRING, 7)
 +      oprot.writeString(self.message_blob)
 +      oprot.writeFieldEnd()
 +    oprot.writeFieldStop()
 +    oprot.writeStructEnd()
 +
 +  def validate(self):
 +    return
 +
 +
 +  def __hash__(self):
 +    value = 17
 +    value = (value * 31) ^ hash(self.path)
 +    value = (value * 31) ^ hash(self.pulse)
 +    value = (value * 31) ^ hash(self.boolval)
 +    value = (value * 31) ^ hash(self.records)
 +    value = (value * 31) ^ hash(self.nodes)
 +    value = (value * 31) ^ hash(self.message_blob)
 +    return value
 +
 +  def __repr__(self):
 +    L = ['%s=%r' % (key, value)
 +      for key, value in self.__dict__.iteritems()]
 +    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 +
 +  def __eq__(self, other):
 +    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
 +
 +  def __ne__(self, other):
 +    return not (self == other)
 +
 +class HBMessage:
 +  """
 +  Attributes:
 +   - type
 +   - data
 +   - message_id
 +  """
 +
 +  thrift_spec = (
 +    None, # 0
 +    (1, TType.I32, 'type', None, None, ), # 1
 +    (2, TType.STRUCT, 'data', (HBMessageData, HBMessageData.thrift_spec), None, ), # 2
 +    (3, TType.I32, 'message_id', None, -1, ), # 3
 +  )
 +
 +  def __init__(self, type=None, data=None, message_id=thrift_spec[3][4],):
 +    self.type = type
 +    self.data = data
 +    self.message_id = message_id
 +
 +  def read(self, iprot):
 +    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
 +      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
 +      return
 +    iprot.readStructBegin()
 +    while True:
 +      (fname, ftype, fid) = iprot.readFieldBegin()
 +      if ftype == TType.STOP:
 +        break
 +      if fid == 1:
 +        if ftype == TType.I32:
-           self.type = iprot.readI32();
++          self.type = iprot.readI32()
 +        else:
 +          iprot.skip(ftype)
 +      elif fid == 2:
 +        if ftype == TType.STRUCT:
 +          self.data = HBMessageData()
 +          self.data.read(iprot)
 +        else:
 +          iprot.skip(ftype)
 +      elif fid == 3:
 +        if ftype == TType.I32:
-           self.message_id = iprot.readI32();
++          self.message_id = iprot.readI32()
 +        else:
 +          iprot.skip(ftype)
 +      else:
 +        iprot.skip(ftype)
 +      iprot.readFieldEnd()
 +    iprot.readStructEnd()
 +
 +  def write(self, oprot):
 +    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
 +      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
 +      return
 +    oprot.writeStructBegin('HBMessage')
 +    if self.type is not None:
 +      oprot.writeFieldBegin('type', TType.I32, 1)
 +      oprot.writeI32(self.type)
 +      oprot.writeFieldEnd()
 +    if self.data is not None:
 +      oprot.writeFieldBegin('data', TType.STRUCT, 2)
 +      self.data.write(oprot)
 +      oprot.writeFieldEnd()
 +    if self.message_id is not None:
 +      oprot.writeFieldBegin('message_id', TType.I32, 3)
 +      oprot.writeI32(self.message_id)
 +      oprot.writeFieldEnd()
 +    oprot.writeFieldStop()
 +    oprot.writeStructEnd()
 +
 +  def validate(self):
 +    return
 +
 +
 +  def __hash__(self):
 +    value = 17
 +    value = (value * 31) ^ hash(self.type)
 +    value = (value * 31) ^ hash(self.data)
 +    value = (value * 31) ^ hash(self.message_id)
 +    return value
 +
 +  def __repr__(self):
 +    L = ['%s=%r' % (key, value)
 +      for key, value in self.__dict__.iteritems()]
 +    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 +
 +  def __eq__(self, other):
 +    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
 +
 +  def __ne__(self, other):
 +    return not (self == other)
 +
 +class HBAuthorizationException(TException):
 +  """
 +  Attributes:
 +   - msg
 +  """
 +
 +  thrift_spec = (
 +    None, # 0
 +    (1, TType.STRING, 'msg', None, None, ), # 1
 +  )
 +
 +  def __init__(self, msg=None,):
 +    self.msg = msg
 +
 +  def read(self, iprot):
 +    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
 +      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
 +      return
 +    iprot.readStructBegin()
 +    while True:
 +      (fname, ftype, fid) = iprot.readFieldBegin()
 +      if ftype == TType.STOP:
 +        break
 +      if fid == 1:
 +        if ftype == TType.STRING:
 +          self.msg = iprot.readString().decode('utf-8')
 +        else:
 +          iprot.skip(ftype)
 +      else:
 +        iprot.skip(ftype)
 +      iprot.readFieldEnd()
 +    iprot.readStructEnd()
 +
 +  def write(self, oprot):
 +    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
 +      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
 +      return
 +    oprot.writeStructBegin('HBAuthorizationException')
 +    if self.msg is not None:
 +      oprot.writeFieldBegin('msg', TType.STRING, 1)
 +      oprot.writeString(self.msg.encode('utf-8'))
 +      oprot.writeFieldEnd()
 +    oprot.writeFieldStop()
 +    oprot.writeStructEnd()
 +
 +  def validate(self):
 +    if self.msg is None:
 +      raise TProtocol.TProtocolException(message='Required field msg is unset!')
 +    return
 +
 +
 +  def __str__(self):
 +    return repr(self)
 +
 +  def __hash__(self):
 +    value = 17
 +    value = (value * 31) ^ hash(self.msg)
 +    return value
 +
 +  def __repr__(self):
 +    L = ['%s=%r' % (key, value)
 +      for key, value in self.__dict__.iteritems()]
 +    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 +
 +  def __eq__(self, other):
 +    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
 +
 +  def __ne__(self, other):
 +    return not (self == other)
 +
 +class HBExecutionException(TException):
 +  """
 +  Attributes:
 +   - msg
 +  """
 +
 +  thrift_spec = (
 +    None, # 0
 +    (1, TType.STRING, 'msg', None, None, ), # 1
 +  )
 +
 +  def __init__(self, msg=None,):
 +    self.msg = msg
 +
 +  def read(self, iprot):
 +    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
 +      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
 +      return
 +    iprot.readStructBegin()
 +    while True:
 +      (fname, ftype, fid) = iprot.readFieldBegin()
 +      if ftype == TType.STOP:
 +        break
 +      if fid == 1:
 +        if ftype == TType.STRING:
 +          self.msg = iprot.readString().decode('utf-8')
 +        else:
 +          iprot.skip(ftype)
 +      else:
 +        iprot.skip(ftype)
 +      iprot.readFieldEnd()
 +    iprot.readStructEnd()
 +
 +  def write(self, oprot):
 +    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
 +      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
 +      return
 +    oprot.writeStructBegin('HBExecutionException')
 +    if self.msg is not None:
 +      oprot.writeFieldBegin('msg', TType.STRING, 1)
 +      oprot.writeString(self.msg.encode('utf-8'))
 +      oprot.writeFieldEnd()
 +    oprot.writeFieldStop()
 +    oprot.writeStructEnd()
 +
 +  def validate(self):
 +    if self.msg is None:
 +      raise TProtocol.TProtocolException(message='Required field msg is unset!')
 +    return
 +
 +
 +  def __str__(self):
 +    return repr(self)
 +
 +  def __hash__(self):
 +    value = 17
 +    value = (value * 31) ^ hash(self.msg)
 +    return value
 +
 +  def __repr__(self):
 +    L = ['%s=%r' % (key, value)
 +      for key, value in self.__dict__.iteritems()]
 +    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 +
 +  def __eq__(self, other):
 +    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
 +
 +  def __ne__(self, other):
 +    return not (self == other)
 +
 +class HBPulse:
 +  """
 +  Attributes:
 +   - id
 +   - details
 +  """
 +
 +  thrift_spec = (
 +    None, # 0
 +    (1, TType.STRING, 'id', None, None, ), # 1
 +    (2, TType.STRING, 'details', None, None, ), # 2
 +  )
 +
 +  def __init__(self, id=None, details=None,):
 +    self.id = id
 +    self.details = details
 +
 +  def read(self, iprot):
 +    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
 +      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
 +      return
 +    iprot.readStructBegin()
 +    while True:
 +      (fname, ftype, fid) = iprot.readFieldBegin()
 +      if ftype == TType.STOP:
 +        break
 +      if fid == 1:
 +        if ftype == TType.STRING:
 +          self.id = iprot.readString().decode('utf-8')
 +        else:
 +          iprot.skip(ftype)
 +      elif fid == 2:
 +        if ftype == TType.STRING:
-           self.details = iprot.readString();
++          self.details = iprot.readString()
 +        else:
 +          iprot.skip(ftype)
 +      else:
 +        iprot.skip(ftype)
 +      iprot.readFieldEnd()
 +    iprot.readStructEnd()
 +
 +  def write(self, oprot):
 +    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
 +      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
 +      return
 +    oprot.writeStructBegin('HBPulse')
 +    if self.id is not None:
 +      oprot.writeFieldBegin('id', TType.STRING, 1)
 +      oprot.writeString(self.id.encode('utf-8'))
 +      oprot.writeFieldEnd()
 +    if self.details is not None:
 +      oprot.writeFieldBegin('details', TType.STRING, 2)
 +      oprot.writeString(self.details)
 +      oprot.writeFieldEnd()
 +    oprot.writeFieldStop()
 +    oprot.writeStructEnd()
 +
 +  def validate(self):
 +    if self.id is None:
 +      raise TProtocol.TProtocolException(message='Required field id is unset!')
 +    return
 +
 +
 +  def __hash__(self):
 +    value = 17
 +    value = (value * 31) ^ hash(self.id)
 +    value = (value * 31) ^ hash(self.details)
 +    return value
 +
 +  def __repr__(self):
 +    L = ['%s=%r' % (key, value)
 +      for key, value in self.__dict__.iteritems()]
 +    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 +
 +  def __eq__(self, other):
 +    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
 +
 +  def __ne__(self, other):
 +    return not (self == other)
 +
 +class HBRecords:
 +  """
 +  Attributes:
 +   - pulses
 +  """
 +
 +  thrift_spec = (
 +    None, # 0
 +    (1, TType.LIST, 'pulses', (TType.STRUCT,(HBPulse, HBPulse.thrift_spec)), None, ), # 1
 +  )
 +
 +  def __init__(self, pulses=None,):
 +    self.pulses = pulses
 +
 +  def read(self, iprot):
 +    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
 +      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
 +      return
 +    iprot.readStructBegin()
 +    while True:
 +      (fname, ftype, fid) = iprot.readFieldBegin()
 +      if ftype == TType.STOP:
 +        break
 +      if fid == 1:
 +        if ftype == TType.LIST:
 +          self.pulses = []
-           (_etype599, _size596) = iprot.readListBegin()
-           for _i600 in xrange(_size596):
-             _elem601 = HBPulse()
-             _elem601.read(iprot)
-             self.pulses.append(_elem601)
++          (_etype627, _size624) = iprot.readListBegin()
++          for _i628 in xrange(_size624):
++            _elem629 = HBPulse()
++            _elem629.read(iprot)
++            self.pulses.append(_elem629)
 +          iprot.readListEnd()
 +        else:
 +          iprot.skip(ftype)
 +      else:
 +        iprot.skip(ftype)
 +      iprot.readFieldEnd()
 +    iprot.readStructEnd()
 +
 +  def write(self, oprot):
 +    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
 +      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
 +      return
 +    oprot.writeStructBegin('HBRecords')
 +    if self.pulses is not None:
 +      oprot.writeFieldBegin('pulses', TType.LIST, 1)
 +      oprot.writeListBegin(TType.STRUCT, len(self.pulses))
-       for iter602 in self.pulses:
-         iter602.write(oprot)
++      for iter630 in self.pulses:
++        iter630.write(oprot)
 +      oprot.writeListEnd()
 +      oprot.writeFieldEnd()
 +    oprot.writeFieldStop()
 +    oprot.writeStructEnd()
 +
 +  def validate(self):
 +    return
 +
 +
 +  def __hash__(self):
 +    value = 17
 +    value = (value * 31) ^ hash(self.pulses)
 +    return value
 +
 +  def __repr__(self):
 +    L = ['%s=%r' % (key, value)
 +      for key, value in self.__dict__.iteritems()]
 +    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 +
 +  def __eq__(self, other):
 +    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
 +
 +  def __ne__(self, other):
 +    return not (self == other)
 +
 +class HBNodes:
 +  """
 +  Attributes:
 +   - pulseIds
 +  """
 +
 +  thrift_spec = (
 +    None, # 0
 +    (1, TType.LIST, 'pulseIds', (TType.STRING,None), None, ), # 1
 +  )
 +
 +  def __init__(self, pulseIds=None,):
 +    self.pulseIds = pulseIds
 +
 +  def read(self, iprot):
 +    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
 +      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
 +      return
 +    iprot.readStructBegin()
 +    while True:
 +      (fname, ftype, fid) = iprot.readFieldBegin()
 +      if ftype == TType.STOP:
 +        break
 +      if fid == 1:
 +        if ftype == TType.LIST:
 +          self.pulseIds = []
-           (_etype606, _size603) = iprot.readListBegin()
-           for _i607 in xrange(_size603):
-             _elem608 = iprot.readString().decode('utf-8')
-             self.pulseIds.append(_elem608)
++          (_etype634, _size631) = iprot.readListBegin()
++          for _i635 in xrange(_size631):
++            _elem636 = iprot.readString().decode('utf-8')
++            self.pulseIds.append(_elem636)
 +          iprot.readListEnd()
 +        else:
 +          iprot.skip(ftype)
 +      else:
 +        iprot.skip(ftype)
 +      iprot.readFieldEnd()
 +    iprot.readStructEnd()
 +
 +  def write(self, oprot):
 +    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
 +      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
 +      return
 +    oprot.writeStructBegin('HBNodes')
 +    if self.pulseIds is not None:
 +      oprot.writeFieldBegin('pulseIds', TType.LIST, 1)
 +      oprot.writeListBegin(TType.STRING, len(self.pulseIds))
-       for iter609 in self.pulseIds:
-         oprot.writeString(iter609.encode('utf-8'))
++      for iter637 in self.pulseIds:
++        oprot.writeString(iter637.encode('utf-8'))
 +      oprot.writeListEnd()
 +      oprot.writeFieldEnd()
 +    oprot.writeFieldStop()
 +    oprot.writeStructEnd()
 +
 +  def validate(self):
 +    return
 +
 +
 +  def __hash__(self):
 +    value = 17
 +    value = (value * 31) ^ hash(self.pulseIds)
 +    return value
 +
 +  def __repr__(self):
 +    L = ['%s=%r' % (key, value)
 +      for key, value in self.__dict__.iteritems()]
 +    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
 +
 +  def __eq__(self, other):
 +    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
 +
 +  def __ne__(self, other):
 +    return not (self == other)


[18/37] storm git commit: Merge remote-tracking branch 'asf/master' into STORM-855

Posted by kn...@apache.org.
Merge remote-tracking branch 'asf/master' into STORM-855


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/4645c190
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/4645c190
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/4645c190

Branch: refs/heads/master
Commit: 4645c190d094d334894acbdea0a51c67cdef7c14
Parents: 62d725a 77015f0
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Thu Nov 12 16:11:11 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Thu Nov 12 16:11:11 2015 -0600

----------------------------------------------------------------------
 CHANGELOG.md                                    |   6 +
 DEVELOPER.md                                    |   2 +
 README.markdown                                 |   2 +
 bin/storm.py                                    |  14 +-
 conf/defaults.yaml                              |   2 +
 dev-tools/travis/travis-script.sh               |   4 +-
 .../documentation/Setting-up-a-Storm-cluster.md |  19 +
 pom.xml                                         |   2 +-
 .../clj/backtype/storm/command/healthcheck.clj  |  88 ++++
 storm-core/src/clj/backtype/storm/config.clj    |   7 +
 .../src/clj/backtype/storm/daemon/executor.clj  | 103 ++---
 .../src/clj/backtype/storm/daemon/nimbus.clj    |  86 ++--
 .../clj/backtype/storm/daemon/supervisor.clj    |  11 +
 .../src/clj/backtype/storm/daemon/worker.clj    |  59 ++-
 .../src/clj/backtype/storm/messaging/loader.clj |  76 +--
 .../src/clj/backtype/storm/messaging/local.clj  |  72 +--
 storm-core/src/genthrift.sh                     |   2 +-
 storm-core/src/jvm/backtype/storm/Config.java   |  26 +-
 .../storm/generated/AlreadyAliveException.java  |  11 +-
 .../backtype/storm/generated/Assignment.java    |   4 +-
 .../storm/generated/AuthorizationException.java |   4 +-
 .../src/jvm/backtype/storm/generated/Bolt.java  |   4 +-
 .../storm/generated/BoltAggregateStats.java     |  12 +-
 .../jvm/backtype/storm/generated/BoltStats.java |   4 +-
 .../storm/generated/ClusterSummary.java         |   6 +-
 .../storm/generated/ClusterWorkerHeartbeat.java |   8 +-
 .../storm/generated/CommonAggregateStats.java   |  16 +-
 .../generated/ComponentAggregateStats.java      |   4 +-
 .../storm/generated/ComponentCommon.java        |   6 +-
 .../storm/generated/ComponentObject.java        |   2 +-
 .../storm/generated/ComponentPageInfo.java      |  10 +-
 .../backtype/storm/generated/ComponentType.java |   2 +-
 .../backtype/storm/generated/Credentials.java   |   4 +-
 .../storm/generated/DRPCExecutionException.java |   4 +-
 .../backtype/storm/generated/DRPCRequest.java   |   4 +-
 .../backtype/storm/generated/DebugOptions.java  |   8 +-
 .../storm/generated/DistributedRPC.java         |   4 +-
 .../generated/DistributedRPCInvocations.java    |   4 +-
 .../jvm/backtype/storm/generated/ErrorInfo.java |   8 +-
 .../storm/generated/ExecutorAggregateStats.java |   4 +-
 .../backtype/storm/generated/ExecutorInfo.java  |   8 +-
 .../storm/generated/ExecutorSpecificStats.java  |   2 +-
 .../backtype/storm/generated/ExecutorStats.java |   6 +-
 .../storm/generated/ExecutorSummary.java        |   8 +-
 .../storm/generated/GetInfoOptions.java         |   4 +-
 .../storm/generated/GlobalStreamId.java         |   4 +-
 .../jvm/backtype/storm/generated/Grouping.java  |   2 +-
 .../generated/HBAuthorizationException.java     |   4 +-
 .../storm/generated/HBExecutionException.java   |   4 +-
 .../jvm/backtype/storm/generated/HBMessage.java |   6 +-
 .../backtype/storm/generated/HBMessageData.java |   2 +-
 .../jvm/backtype/storm/generated/HBNodes.java   |  36 +-
 .../jvm/backtype/storm/generated/HBPulse.java   |   4 +-
 .../jvm/backtype/storm/generated/HBRecords.java |  40 +-
 .../storm/generated/HBServerMessageType.java    |   2 +-
 .../generated/InvalidTopologyException.java     |   4 +-
 .../backtype/storm/generated/JavaObject.java    |   4 +-
 .../backtype/storm/generated/JavaObjectArg.java |   2 +-
 .../backtype/storm/generated/KillOptions.java   |   6 +-
 .../storm/generated/LSApprovedWorkers.java      |   4 +-
 .../generated/LSSupervisorAssignments.java      |   4 +-
 .../storm/generated/LSSupervisorId.java         |   4 +-
 .../backtype/storm/generated/LSTopoHistory.java |   6 +-
 .../storm/generated/LSTopoHistoryList.java      |   4 +-
 .../storm/generated/LSWorkerHeartbeat.java      |   8 +-
 .../storm/generated/LocalAssignment.java        |   4 +-
 .../storm/generated/LocalStateData.java         |   4 +-
 .../jvm/backtype/storm/generated/LogConfig.java |  52 +--
 .../jvm/backtype/storm/generated/LogLevel.java  |   8 +-
 .../storm/generated/LogLevelAction.java         |   2 +-
 .../jvm/backtype/storm/generated/Nimbus.java    |  48 +-
 .../backtype/storm/generated/NimbusSummary.java |  10 +-
 .../jvm/backtype/storm/generated/NodeInfo.java  |   4 +-
 .../storm/generated/NotAliveException.java      |   4 +-
 .../backtype/storm/generated/NullStruct.java    |   4 +-
 .../storm/generated/NumErrorsChoice.java        |   2 +-
 .../backtype/storm/generated/ProfileAction.java |   2 +-
 .../storm/generated/ProfileRequest.java         |   6 +-
 .../storm/generated/RebalanceOptions.java       |   8 +-
 .../storm/generated/ShellComponent.java         |   4 +-
 .../storm/generated/SpecificAggregateStats.java |   2 +-
 .../storm/generated/SpoutAggregateStats.java    |   6 +-
 .../jvm/backtype/storm/generated/SpoutSpec.java |   4 +-
 .../backtype/storm/generated/SpoutStats.java    |   4 +-
 .../storm/generated/StateSpoutSpec.java         |   4 +-
 .../jvm/backtype/storm/generated/StormBase.java |   8 +-
 .../backtype/storm/generated/StormTopology.java |   4 +-
 .../backtype/storm/generated/StreamInfo.java    |   6 +-
 .../backtype/storm/generated/SubmitOptions.java |   4 +-
 .../storm/generated/SupervisorInfo.java         |   8 +-
 .../storm/generated/SupervisorSummary.java      |  10 +-
 .../storm/generated/ThriftSerializedObject.java |   4 +-
 .../storm/generated/TopologyActionOptions.java  |   2 +-
 .../storm/generated/TopologyHistoryInfo.java    |   4 +-
 .../backtype/storm/generated/TopologyInfo.java  |  20 +-
 .../storm/generated/TopologyInitialStatus.java  |   2 +-
 .../storm/generated/TopologyPageInfo.java       |  26 +-
 .../backtype/storm/generated/TopologyStats.java |   4 +-
 .../storm/generated/TopologyStatus.java         |   2 +-
 .../storm/generated/TopologySummary.java        |  26 +-
 .../storm/generated/WorkerResources.java        |  10 +-
 .../storm/messaging/AddressedTuple.java         |  46 ++
 .../DeserializingConnectionCallback.java        |  60 +++
 .../backtype/storm/messaging/IConnection.java   |  10 +-
 .../storm/messaging/IConnectionCallback.java    |  31 ++
 .../backtype/storm/messaging/local/Context.java | 164 +++++++
 .../backtype/storm/messaging/netty/Client.java  |   3 +-
 .../backtype/storm/messaging/netty/Server.java  | 127 +-----
 .../nimbus/ITopologyActionNotifierPlugin.java   |  43 ++
 .../security/auth/SimpleTransportPlugin.java    |   2 +-
 .../backtype/storm/tuple/AddressedTuple.java    |  48 ++
 storm-core/src/py/storm/DistributedRPC-remote   |   2 +-
 storm-core/src/py/storm/DistributedRPC.py       |  20 +-
 .../py/storm/DistributedRPCInvocations-remote   |   2 +-
 .../src/py/storm/DistributedRPCInvocations.py   |  41 +-
 storm-core/src/py/storm/Nimbus-remote           |   2 +-
 storm-core/src/py/storm/Nimbus.py               | 457 ++++++++++++++-----
 storm-core/src/py/storm/constants.py            |   2 +-
 storm-core/src/py/storm/ttypes.py               | 284 ++++++------
 .../storm/messaging/netty_unit_test.clj         | 122 ++---
 .../test/clj/backtype/storm/messaging_test.clj  |  25 -
 .../test/clj/backtype/storm/nimbus_test.clj     |  97 ++--
 .../nimbus/InMemoryTopologyActionNotifier.java  |  53 +++
 123 files changed, 1790 insertions(+), 1065 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/bin/storm.py
----------------------------------------------------------------------
diff --cc bin/storm.py
index 947c92b,42d0573..497691d
--- a/bin/storm.py
+++ b/bin/storm.py
@@@ -641,8 -620,8 +652,9 @@@ COMMANDS = {"jar": jar, "kill": kill, "
              "remoteconfvalue": print_remoteconfvalue, "repl": repl, "classpath": print_classpath,
              "activate": activate, "deactivate": deactivate, "rebalance": rebalance, "help": print_usage,
              "list": listtopos, "dev-zookeeper": dev_zookeeper, "version": version, "monitor": monitor,
 -            "upload-credentials": upload_credentials, "get-errors": get_errors, "set_log_level": set_log_level,
 -            "kill_workers": kill_workers, "node-health-check": healthcheck}
 +            "upload-credentials": upload_credentials, "pacemaker": pacemaker, "heartbeats": heartbeats,
-             "get-errors": get_errors, "set_log_level": set_log_level, "kill_workers": kill_workers }
++            "get-errors": get_errors, "set_log_level": set_log_level, "kill_workers": kill_workers,
++            "node-health-check": healthcheck}
  
  def parse_config(config_list):
      global CONFIG_OPTS

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/conf/defaults.yaml
----------------------------------------------------------------------
diff --cc conf/defaults.yaml
index 583ca2e,b498571..d0d05a9
--- a/conf/defaults.yaml
+++ b/conf/defaults.yaml
@@@ -49,9 -49,10 +49,11 @@@ storm.auth.simple-white-list.users: [
  storm.auth.simple-acl.users: []
  storm.auth.simple-acl.users.commands: []
  storm.auth.simple-acl.admins: []
 +storm.cluster.state.store: "backtype.storm.cluster_state.zookeeper_state_factory"
  storm.meta.serialization.delegate: "backtype.storm.serialization.GzipThriftSerializationDelegate"
  storm.codedistributor.class: "backtype.storm.codedistributor.LocalFileSystemCodeDistributor"
+ storm.health.check.dir: "healthchecks"
+ storm.health.check.timeout.ms: 5000
  
  ### nimbus.* configs are for the master
  nimbus.seeds : ["localhost"]

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/clj/backtype/storm/config.clj
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/clj/backtype/storm/daemon/worker.clj
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/jvm/backtype/storm/Config.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/jvm/backtype/storm/generated/HBAuthorizationException.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/generated/HBAuthorizationException.java
index cd5fed7,0000000..db21af4
mode 100644,000000..100644
--- a/storm-core/src/jvm/backtype/storm/generated/HBAuthorizationException.java
+++ b/storm-core/src/jvm/backtype/storm/generated/HBAuthorizationException.java
@@@ -1,406 -1,0 +1,406 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +/**
-  * Autogenerated by Thrift Compiler (0.9.2)
++ * Autogenerated by Thrift Compiler (0.9.3)
 + *
 + * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 + *  @generated
 + */
 +package backtype.storm.generated;
 +
 +import org.apache.thrift.scheme.IScheme;
 +import org.apache.thrift.scheme.SchemeFactory;
 +import org.apache.thrift.scheme.StandardScheme;
 +
 +import org.apache.thrift.scheme.TupleScheme;
 +import org.apache.thrift.protocol.TTupleProtocol;
 +import org.apache.thrift.protocol.TProtocolException;
 +import org.apache.thrift.EncodingUtils;
 +import org.apache.thrift.TException;
 +import org.apache.thrift.async.AsyncMethodCallback;
 +import org.apache.thrift.server.AbstractNonblockingServer.*;
 +import java.util.List;
 +import java.util.ArrayList;
 +import java.util.Map;
 +import java.util.HashMap;
 +import java.util.EnumMap;
 +import java.util.Set;
 +import java.util.HashSet;
 +import java.util.EnumSet;
 +import java.util.Collections;
 +import java.util.BitSet;
 +import java.nio.ByteBuffer;
 +import java.util.Arrays;
 +import javax.annotation.Generated;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
- @Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 +public class HBAuthorizationException extends TException implements org.apache.thrift.TBase<HBAuthorizationException, HBAuthorizationException._Fields>, java.io.Serializable, Cloneable, Comparable<HBAuthorizationException> {
 +  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBAuthorizationException");
 +
 +  private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.STRING, (short)1);
 +
 +  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
 +  static {
 +    schemes.put(StandardScheme.class, new HBAuthorizationExceptionStandardSchemeFactory());
 +    schemes.put(TupleScheme.class, new HBAuthorizationExceptionTupleSchemeFactory());
 +  }
 +
 +  private String msg; // required
 +
 +  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
 +  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
 +    MSG((short)1, "msg");
 +
 +    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 +
 +    static {
 +      for (_Fields field : EnumSet.allOf(_Fields.class)) {
 +        byName.put(field.getFieldName(), field);
 +      }
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches fieldId, or null if its not found.
 +     */
 +    public static _Fields findByThriftId(int fieldId) {
 +      switch(fieldId) {
 +        case 1: // MSG
 +          return MSG;
 +        default:
 +          return null;
 +      }
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches fieldId, throwing an exception
 +     * if it is not found.
 +     */
 +    public static _Fields findByThriftIdOrThrow(int fieldId) {
 +      _Fields fields = findByThriftId(fieldId);
 +      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
 +      return fields;
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches name, or null if its not found.
 +     */
 +    public static _Fields findByName(String name) {
 +      return byName.get(name);
 +    }
 +
 +    private final short _thriftId;
 +    private final String _fieldName;
 +
 +    _Fields(short thriftId, String fieldName) {
 +      _thriftId = thriftId;
 +      _fieldName = fieldName;
 +    }
 +
 +    public short getThriftFieldId() {
 +      return _thriftId;
 +    }
 +
 +    public String getFieldName() {
 +      return _fieldName;
 +    }
 +  }
 +
 +  // isset id assignments
 +  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
 +  static {
 +    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
 +    tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.REQUIRED, 
 +        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
 +    metaDataMap = Collections.unmodifiableMap(tmpMap);
 +    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HBAuthorizationException.class, metaDataMap);
 +  }
 +
 +  public HBAuthorizationException() {
 +  }
 +
 +  public HBAuthorizationException(
 +    String msg)
 +  {
 +    this();
 +    this.msg = msg;
 +  }
 +
 +  /**
 +   * Performs a deep copy on <i>other</i>.
 +   */
 +  public HBAuthorizationException(HBAuthorizationException other) {
 +    if (other.is_set_msg()) {
 +      this.msg = other.msg;
 +    }
 +  }
 +
 +  public HBAuthorizationException deepCopy() {
 +    return new HBAuthorizationException(this);
 +  }
 +
 +  @Override
 +  public void clear() {
 +    this.msg = null;
 +  }
 +
 +  public String get_msg() {
 +    return this.msg;
 +  }
 +
 +  public void set_msg(String msg) {
 +    this.msg = msg;
 +  }
 +
 +  public void unset_msg() {
 +    this.msg = null;
 +  }
 +
 +  /** Returns true if field msg is set (has been assigned a value) and false otherwise */
 +  public boolean is_set_msg() {
 +    return this.msg != null;
 +  }
 +
 +  public void set_msg_isSet(boolean value) {
 +    if (!value) {
 +      this.msg = null;
 +    }
 +  }
 +
 +  public void setFieldValue(_Fields field, Object value) {
 +    switch (field) {
 +    case MSG:
 +      if (value == null) {
 +        unset_msg();
 +      } else {
 +        set_msg((String)value);
 +      }
 +      break;
 +
 +    }
 +  }
 +
 +  public Object getFieldValue(_Fields field) {
 +    switch (field) {
 +    case MSG:
 +      return get_msg();
 +
 +    }
 +    throw new IllegalStateException();
 +  }
 +
 +  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
 +  public boolean isSet(_Fields field) {
 +    if (field == null) {
 +      throw new IllegalArgumentException();
 +    }
 +
 +    switch (field) {
 +    case MSG:
 +      return is_set_msg();
 +    }
 +    throw new IllegalStateException();
 +  }
 +
 +  @Override
 +  public boolean equals(Object that) {
 +    if (that == null)
 +      return false;
 +    if (that instanceof HBAuthorizationException)
 +      return this.equals((HBAuthorizationException)that);
 +    return false;
 +  }
 +
 +  public boolean equals(HBAuthorizationException that) {
 +    if (that == null)
 +      return false;
 +
 +    boolean this_present_msg = true && this.is_set_msg();
 +    boolean that_present_msg = true && that.is_set_msg();
 +    if (this_present_msg || that_present_msg) {
 +      if (!(this_present_msg && that_present_msg))
 +        return false;
 +      if (!this.msg.equals(that.msg))
 +        return false;
 +    }
 +
 +    return true;
 +  }
 +
 +  @Override
 +  public int hashCode() {
 +    List<Object> list = new ArrayList<Object>();
 +
 +    boolean present_msg = true && (is_set_msg());
 +    list.add(present_msg);
 +    if (present_msg)
 +      list.add(msg);
 +
 +    return list.hashCode();
 +  }
 +
 +  @Override
 +  public int compareTo(HBAuthorizationException other) {
 +    if (!getClass().equals(other.getClass())) {
 +      return getClass().getName().compareTo(other.getClass().getName());
 +    }
 +
 +    int lastComparison = 0;
 +
 +    lastComparison = Boolean.valueOf(is_set_msg()).compareTo(other.is_set_msg());
 +    if (lastComparison != 0) {
 +      return lastComparison;
 +    }
 +    if (is_set_msg()) {
 +      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, other.msg);
 +      if (lastComparison != 0) {
 +        return lastComparison;
 +      }
 +    }
 +    return 0;
 +  }
 +
 +  public _Fields fieldForId(int fieldId) {
 +    return _Fields.findByThriftId(fieldId);
 +  }
 +
 +  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
 +    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
 +  }
 +
 +  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
 +    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
 +  }
 +
 +  @Override
 +  public String toString() {
 +    StringBuilder sb = new StringBuilder("HBAuthorizationException(");
 +    boolean first = true;
 +
 +    sb.append("msg:");
 +    if (this.msg == null) {
 +      sb.append("null");
 +    } else {
 +      sb.append(this.msg);
 +    }
 +    first = false;
 +    sb.append(")");
 +    return sb.toString();
 +  }
 +
 +  public void validate() throws org.apache.thrift.TException {
 +    // check for required fields
 +    if (!is_set_msg()) {
 +      throw new org.apache.thrift.protocol.TProtocolException("Required field 'msg' is unset! Struct:" + toString());
 +    }
 +
 +    // check for sub-struct validity
 +  }
 +
 +  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
 +    try {
 +      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
 +    } catch (org.apache.thrift.TException te) {
 +      throw new java.io.IOException(te);
 +    }
 +  }
 +
 +  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
 +    try {
 +      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
 +    } catch (org.apache.thrift.TException te) {
 +      throw new java.io.IOException(te);
 +    }
 +  }
 +
 +  private static class HBAuthorizationExceptionStandardSchemeFactory implements SchemeFactory {
 +    public HBAuthorizationExceptionStandardScheme getScheme() {
 +      return new HBAuthorizationExceptionStandardScheme();
 +    }
 +  }
 +
 +  private static class HBAuthorizationExceptionStandardScheme extends StandardScheme<HBAuthorizationException> {
 +
 +    public void read(org.apache.thrift.protocol.TProtocol iprot, HBAuthorizationException struct) throws org.apache.thrift.TException {
 +      org.apache.thrift.protocol.TField schemeField;
 +      iprot.readStructBegin();
 +      while (true)
 +      {
 +        schemeField = iprot.readFieldBegin();
 +        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
 +          break;
 +        }
 +        switch (schemeField.id) {
 +          case 1: // MSG
 +            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
 +              struct.msg = iprot.readString();
 +              struct.set_msg_isSet(true);
 +            } else { 
 +              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +            }
 +            break;
 +          default:
 +            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +        }
 +        iprot.readFieldEnd();
 +      }
 +      iprot.readStructEnd();
 +      struct.validate();
 +    }
 +
 +    public void write(org.apache.thrift.protocol.TProtocol oprot, HBAuthorizationException struct) throws org.apache.thrift.TException {
 +      struct.validate();
 +
 +      oprot.writeStructBegin(STRUCT_DESC);
 +      if (struct.msg != null) {
 +        oprot.writeFieldBegin(MSG_FIELD_DESC);
 +        oprot.writeString(struct.msg);
 +        oprot.writeFieldEnd();
 +      }
 +      oprot.writeFieldStop();
 +      oprot.writeStructEnd();
 +    }
 +
 +  }
 +
 +  private static class HBAuthorizationExceptionTupleSchemeFactory implements SchemeFactory {
 +    public HBAuthorizationExceptionTupleScheme getScheme() {
 +      return new HBAuthorizationExceptionTupleScheme();
 +    }
 +  }
 +
 +  private static class HBAuthorizationExceptionTupleScheme extends TupleScheme<HBAuthorizationException> {
 +
 +    @Override
 +    public void write(org.apache.thrift.protocol.TProtocol prot, HBAuthorizationException struct) throws org.apache.thrift.TException {
 +      TTupleProtocol oprot = (TTupleProtocol) prot;
 +      oprot.writeString(struct.msg);
 +    }
 +
 +    @Override
 +    public void read(org.apache.thrift.protocol.TProtocol prot, HBAuthorizationException struct) throws org.apache.thrift.TException {
 +      TTupleProtocol iprot = (TTupleProtocol) prot;
 +      struct.msg = iprot.readString();
 +      struct.set_msg_isSet(true);
 +    }
 +  }
 +
 +}
 +

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/jvm/backtype/storm/generated/HBExecutionException.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/generated/HBExecutionException.java
index bbc0ef4,0000000..bf391fe
mode 100644,000000..100644
--- a/storm-core/src/jvm/backtype/storm/generated/HBExecutionException.java
+++ b/storm-core/src/jvm/backtype/storm/generated/HBExecutionException.java
@@@ -1,406 -1,0 +1,406 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +/**
-  * Autogenerated by Thrift Compiler (0.9.2)
++ * Autogenerated by Thrift Compiler (0.9.3)
 + *
 + * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 + *  @generated
 + */
 +package backtype.storm.generated;
 +
 +import org.apache.thrift.scheme.IScheme;
 +import org.apache.thrift.scheme.SchemeFactory;
 +import org.apache.thrift.scheme.StandardScheme;
 +
 +import org.apache.thrift.scheme.TupleScheme;
 +import org.apache.thrift.protocol.TTupleProtocol;
 +import org.apache.thrift.protocol.TProtocolException;
 +import org.apache.thrift.EncodingUtils;
 +import org.apache.thrift.TException;
 +import org.apache.thrift.async.AsyncMethodCallback;
 +import org.apache.thrift.server.AbstractNonblockingServer.*;
 +import java.util.List;
 +import java.util.ArrayList;
 +import java.util.Map;
 +import java.util.HashMap;
 +import java.util.EnumMap;
 +import java.util.Set;
 +import java.util.HashSet;
 +import java.util.EnumSet;
 +import java.util.Collections;
 +import java.util.BitSet;
 +import java.nio.ByteBuffer;
 +import java.util.Arrays;
 +import javax.annotation.Generated;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
- @Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 +public class HBExecutionException extends TException implements org.apache.thrift.TBase<HBExecutionException, HBExecutionException._Fields>, java.io.Serializable, Cloneable, Comparable<HBExecutionException> {
 +  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBExecutionException");
 +
 +  private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.STRING, (short)1);
 +
 +  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
 +  static {
 +    schemes.put(StandardScheme.class, new HBExecutionExceptionStandardSchemeFactory());
 +    schemes.put(TupleScheme.class, new HBExecutionExceptionTupleSchemeFactory());
 +  }
 +
 +  private String msg; // required
 +
 +  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
 +  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
 +    MSG((short)1, "msg");
 +
 +    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 +
 +    static {
 +      for (_Fields field : EnumSet.allOf(_Fields.class)) {
 +        byName.put(field.getFieldName(), field);
 +      }
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches fieldId, or null if its not found.
 +     */
 +    public static _Fields findByThriftId(int fieldId) {
 +      switch(fieldId) {
 +        case 1: // MSG
 +          return MSG;
 +        default:
 +          return null;
 +      }
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches fieldId, throwing an exception
 +     * if it is not found.
 +     */
 +    public static _Fields findByThriftIdOrThrow(int fieldId) {
 +      _Fields fields = findByThriftId(fieldId);
 +      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
 +      return fields;
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches name, or null if its not found.
 +     */
 +    public static _Fields findByName(String name) {
 +      return byName.get(name);
 +    }
 +
 +    private final short _thriftId;
 +    private final String _fieldName;
 +
 +    _Fields(short thriftId, String fieldName) {
 +      _thriftId = thriftId;
 +      _fieldName = fieldName;
 +    }
 +
 +    public short getThriftFieldId() {
 +      return _thriftId;
 +    }
 +
 +    public String getFieldName() {
 +      return _fieldName;
 +    }
 +  }
 +
 +  // isset id assignments
 +  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
 +  static {
 +    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
 +    tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.REQUIRED, 
 +        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
 +    metaDataMap = Collections.unmodifiableMap(tmpMap);
 +    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HBExecutionException.class, metaDataMap);
 +  }
 +
 +  public HBExecutionException() {
 +  }
 +
 +  public HBExecutionException(
 +    String msg)
 +  {
 +    this();
 +    this.msg = msg;
 +  }
 +
 +  /**
 +   * Performs a deep copy on <i>other</i>.
 +   */
 +  public HBExecutionException(HBExecutionException other) {
 +    if (other.is_set_msg()) {
 +      this.msg = other.msg;
 +    }
 +  }
 +
 +  public HBExecutionException deepCopy() {
 +    return new HBExecutionException(this);
 +  }
 +
 +  @Override
 +  public void clear() {
 +    this.msg = null;
 +  }
 +
 +  public String get_msg() {
 +    return this.msg;
 +  }
 +
 +  public void set_msg(String msg) {
 +    this.msg = msg;
 +  }
 +
 +  public void unset_msg() {
 +    this.msg = null;
 +  }
 +
 +  /** Returns true if field msg is set (has been assigned a value) and false otherwise */
 +  public boolean is_set_msg() {
 +    return this.msg != null;
 +  }
 +
 +  public void set_msg_isSet(boolean value) {
 +    if (!value) {
 +      this.msg = null;
 +    }
 +  }
 +
 +  public void setFieldValue(_Fields field, Object value) {
 +    switch (field) {
 +    case MSG:
 +      if (value == null) {
 +        unset_msg();
 +      } else {
 +        set_msg((String)value);
 +      }
 +      break;
 +
 +    }
 +  }
 +
 +  public Object getFieldValue(_Fields field) {
 +    switch (field) {
 +    case MSG:
 +      return get_msg();
 +
 +    }
 +    throw new IllegalStateException();
 +  }
 +
 +  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
 +  public boolean isSet(_Fields field) {
 +    if (field == null) {
 +      throw new IllegalArgumentException();
 +    }
 +
 +    switch (field) {
 +    case MSG:
 +      return is_set_msg();
 +    }
 +    throw new IllegalStateException();
 +  }
 +
 +  @Override
 +  public boolean equals(Object that) {
 +    if (that == null)
 +      return false;
 +    if (that instanceof HBExecutionException)
 +      return this.equals((HBExecutionException)that);
 +    return false;
 +  }
 +
 +  public boolean equals(HBExecutionException that) {
 +    if (that == null)
 +      return false;
 +
 +    boolean this_present_msg = true && this.is_set_msg();
 +    boolean that_present_msg = true && that.is_set_msg();
 +    if (this_present_msg || that_present_msg) {
 +      if (!(this_present_msg && that_present_msg))
 +        return false;
 +      if (!this.msg.equals(that.msg))
 +        return false;
 +    }
 +
 +    return true;
 +  }
 +
 +  @Override
 +  public int hashCode() {
 +    List<Object> list = new ArrayList<Object>();
 +
 +    boolean present_msg = true && (is_set_msg());
 +    list.add(present_msg);
 +    if (present_msg)
 +      list.add(msg);
 +
 +    return list.hashCode();
 +  }
 +
 +  @Override
 +  public int compareTo(HBExecutionException other) {
 +    if (!getClass().equals(other.getClass())) {
 +      return getClass().getName().compareTo(other.getClass().getName());
 +    }
 +
 +    int lastComparison = 0;
 +
 +    lastComparison = Boolean.valueOf(is_set_msg()).compareTo(other.is_set_msg());
 +    if (lastComparison != 0) {
 +      return lastComparison;
 +    }
 +    if (is_set_msg()) {
 +      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, other.msg);
 +      if (lastComparison != 0) {
 +        return lastComparison;
 +      }
 +    }
 +    return 0;
 +  }
 +
 +  public _Fields fieldForId(int fieldId) {
 +    return _Fields.findByThriftId(fieldId);
 +  }
 +
 +  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
 +    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
 +  }
 +
 +  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
 +    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
 +  }
 +
 +  @Override
 +  public String toString() {
 +    StringBuilder sb = new StringBuilder("HBExecutionException(");
 +    boolean first = true;
 +
 +    sb.append("msg:");
 +    if (this.msg == null) {
 +      sb.append("null");
 +    } else {
 +      sb.append(this.msg);
 +    }
 +    first = false;
 +    sb.append(")");
 +    return sb.toString();
 +  }
 +
 +  public void validate() throws org.apache.thrift.TException {
 +    // check for required fields
 +    if (!is_set_msg()) {
 +      throw new org.apache.thrift.protocol.TProtocolException("Required field 'msg' is unset! Struct:" + toString());
 +    }
 +
 +    // check for sub-struct validity
 +  }
 +
 +  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
 +    try {
 +      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
 +    } catch (org.apache.thrift.TException te) {
 +      throw new java.io.IOException(te);
 +    }
 +  }
 +
 +  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
 +    try {
 +      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
 +    } catch (org.apache.thrift.TException te) {
 +      throw new java.io.IOException(te);
 +    }
 +  }
 +
 +  private static class HBExecutionExceptionStandardSchemeFactory implements SchemeFactory {
 +    public HBExecutionExceptionStandardScheme getScheme() {
 +      return new HBExecutionExceptionStandardScheme();
 +    }
 +  }
 +
 +  private static class HBExecutionExceptionStandardScheme extends StandardScheme<HBExecutionException> {
 +
 +    public void read(org.apache.thrift.protocol.TProtocol iprot, HBExecutionException struct) throws org.apache.thrift.TException {
 +      org.apache.thrift.protocol.TField schemeField;
 +      iprot.readStructBegin();
 +      while (true)
 +      {
 +        schemeField = iprot.readFieldBegin();
 +        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
 +          break;
 +        }
 +        switch (schemeField.id) {
 +          case 1: // MSG
 +            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
 +              struct.msg = iprot.readString();
 +              struct.set_msg_isSet(true);
 +            } else { 
 +              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +            }
 +            break;
 +          default:
 +            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +        }
 +        iprot.readFieldEnd();
 +      }
 +      iprot.readStructEnd();
 +      struct.validate();
 +    }
 +
 +    public void write(org.apache.thrift.protocol.TProtocol oprot, HBExecutionException struct) throws org.apache.thrift.TException {
 +      struct.validate();
 +
 +      oprot.writeStructBegin(STRUCT_DESC);
 +      if (struct.msg != null) {
 +        oprot.writeFieldBegin(MSG_FIELD_DESC);
 +        oprot.writeString(struct.msg);
 +        oprot.writeFieldEnd();
 +      }
 +      oprot.writeFieldStop();
 +      oprot.writeStructEnd();
 +    }
 +
 +  }
 +
 +  private static class HBExecutionExceptionTupleSchemeFactory implements SchemeFactory {
 +    public HBExecutionExceptionTupleScheme getScheme() {
 +      return new HBExecutionExceptionTupleScheme();
 +    }
 +  }
 +
 +  private static class HBExecutionExceptionTupleScheme extends TupleScheme<HBExecutionException> {
 +
 +    @Override
 +    public void write(org.apache.thrift.protocol.TProtocol prot, HBExecutionException struct) throws org.apache.thrift.TException {
 +      TTupleProtocol oprot = (TTupleProtocol) prot;
 +      oprot.writeString(struct.msg);
 +    }
 +
 +    @Override
 +    public void read(org.apache.thrift.protocol.TProtocol prot, HBExecutionException struct) throws org.apache.thrift.TException {
 +      TTupleProtocol iprot = (TTupleProtocol) prot;
 +      struct.msg = iprot.readString();
 +      struct.set_msg_isSet(true);
 +    }
 +  }
 +
 +}
 +

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/jvm/backtype/storm/generated/HBMessage.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/generated/HBMessage.java
index 25bbd16,0000000..b3a2147
mode 100644,000000..100644
--- a/storm-core/src/jvm/backtype/storm/generated/HBMessage.java
+++ b/storm-core/src/jvm/backtype/storm/generated/HBMessage.java
@@@ -1,636 -1,0 +1,636 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +/**
-  * Autogenerated by Thrift Compiler (0.9.2)
++ * Autogenerated by Thrift Compiler (0.9.3)
 + *
 + * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 + *  @generated
 + */
 +package backtype.storm.generated;
 +
 +import org.apache.thrift.scheme.IScheme;
 +import org.apache.thrift.scheme.SchemeFactory;
 +import org.apache.thrift.scheme.StandardScheme;
 +
 +import org.apache.thrift.scheme.TupleScheme;
 +import org.apache.thrift.protocol.TTupleProtocol;
 +import org.apache.thrift.protocol.TProtocolException;
 +import org.apache.thrift.EncodingUtils;
 +import org.apache.thrift.TException;
 +import org.apache.thrift.async.AsyncMethodCallback;
 +import org.apache.thrift.server.AbstractNonblockingServer.*;
 +import java.util.List;
 +import java.util.ArrayList;
 +import java.util.Map;
 +import java.util.HashMap;
 +import java.util.EnumMap;
 +import java.util.Set;
 +import java.util.HashSet;
 +import java.util.EnumSet;
 +import java.util.Collections;
 +import java.util.BitSet;
 +import java.nio.ByteBuffer;
 +import java.util.Arrays;
 +import javax.annotation.Generated;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
- @Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-10-9")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
 +public class HBMessage implements org.apache.thrift.TBase<HBMessage, HBMessage._Fields>, java.io.Serializable, Cloneable, Comparable<HBMessage> {
 +  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBMessage");
 +
 +  private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("type", org.apache.thrift.protocol.TType.I32, (short)1);
 +  private static final org.apache.thrift.protocol.TField DATA_FIELD_DESC = new org.apache.thrift.protocol.TField("data", org.apache.thrift.protocol.TType.STRUCT, (short)2);
 +  private static final org.apache.thrift.protocol.TField MESSAGE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("message_id", org.apache.thrift.protocol.TType.I32, (short)3);
 +
 +  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
 +  static {
 +    schemes.put(StandardScheme.class, new HBMessageStandardSchemeFactory());
 +    schemes.put(TupleScheme.class, new HBMessageTupleSchemeFactory());
 +  }
 +
 +  private HBServerMessageType type; // required
 +  private HBMessageData data; // required
 +  private int message_id; // optional
 +
 +  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
 +  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
 +    /**
 +     * 
 +     * @see HBServerMessageType
 +     */
 +    TYPE((short)1, "type"),
 +    DATA((short)2, "data"),
 +    MESSAGE_ID((short)3, "message_id");
 +
 +    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 +
 +    static {
 +      for (_Fields field : EnumSet.allOf(_Fields.class)) {
 +        byName.put(field.getFieldName(), field);
 +      }
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches fieldId, or null if its not found.
 +     */
 +    public static _Fields findByThriftId(int fieldId) {
 +      switch(fieldId) {
 +        case 1: // TYPE
 +          return TYPE;
 +        case 2: // DATA
 +          return DATA;
 +        case 3: // MESSAGE_ID
 +          return MESSAGE_ID;
 +        default:
 +          return null;
 +      }
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches fieldId, throwing an exception
 +     * if it is not found.
 +     */
 +    public static _Fields findByThriftIdOrThrow(int fieldId) {
 +      _Fields fields = findByThriftId(fieldId);
 +      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
 +      return fields;
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches name, or null if its not found.
 +     */
 +    public static _Fields findByName(String name) {
 +      return byName.get(name);
 +    }
 +
 +    private final short _thriftId;
 +    private final String _fieldName;
 +
 +    _Fields(short thriftId, String fieldName) {
 +      _thriftId = thriftId;
 +      _fieldName = fieldName;
 +    }
 +
 +    public short getThriftFieldId() {
 +      return _thriftId;
 +    }
 +
 +    public String getFieldName() {
 +      return _fieldName;
 +    }
 +  }
 +
 +  // isset id assignments
 +  private static final int __MESSAGE_ID_ISSET_ID = 0;
 +  private byte __isset_bitfield = 0;
 +  private static final _Fields optionals[] = {_Fields.MESSAGE_ID};
 +  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
 +  static {
 +    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
 +    tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData("type", org.apache.thrift.TFieldRequirementType.DEFAULT, 
 +        new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, HBServerMessageType.class)));
 +    tmpMap.put(_Fields.DATA, new org.apache.thrift.meta_data.FieldMetaData("data", org.apache.thrift.TFieldRequirementType.DEFAULT, 
 +        new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, HBMessageData.class)));
 +    tmpMap.put(_Fields.MESSAGE_ID, new org.apache.thrift.meta_data.FieldMetaData("message_id", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
 +        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
 +    metaDataMap = Collections.unmodifiableMap(tmpMap);
 +    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HBMessage.class, metaDataMap);
 +  }
 +
 +  public HBMessage() {
 +    this.message_id = -1;
 +
 +  }
 +
 +  public HBMessage(
 +    HBServerMessageType type,
 +    HBMessageData data)
 +  {
 +    this();
 +    this.type = type;
 +    this.data = data;
 +  }
 +
 +  /**
 +   * Performs a deep copy on <i>other</i>.
 +   */
 +  public HBMessage(HBMessage other) {
 +    __isset_bitfield = other.__isset_bitfield;
 +    if (other.is_set_type()) {
 +      this.type = other.type;
 +    }
 +    if (other.is_set_data()) {
 +      this.data = new HBMessageData(other.data);
 +    }
 +    this.message_id = other.message_id;
 +  }
 +
 +  public HBMessage deepCopy() {
 +    return new HBMessage(this);
 +  }
 +
 +  @Override
 +  public void clear() {
 +    this.type = null;
 +    this.data = null;
 +    this.message_id = -1;
 +
 +  }
 +
 +  /**
 +   * 
 +   * @see HBServerMessageType
 +   */
 +  public HBServerMessageType get_type() {
 +    return this.type;
 +  }
 +
 +  /**
 +   * 
 +   * @see HBServerMessageType
 +   */
 +  public void set_type(HBServerMessageType type) {
 +    this.type = type;
 +  }
 +
 +  public void unset_type() {
 +    this.type = null;
 +  }
 +
 +  /** Returns true if field type is set (has been assigned a value) and false otherwise */
 +  public boolean is_set_type() {
 +    return this.type != null;
 +  }
 +
 +  public void set_type_isSet(boolean value) {
 +    if (!value) {
 +      this.type = null;
 +    }
 +  }
 +
 +  public HBMessageData get_data() {
 +    return this.data;
 +  }
 +
 +  public void set_data(HBMessageData data) {
 +    this.data = data;
 +  }
 +
 +  public void unset_data() {
 +    this.data = null;
 +  }
 +
 +  /** Returns true if field data is set (has been assigned a value) and false otherwise */
 +  public boolean is_set_data() {
 +    return this.data != null;
 +  }
 +
 +  public void set_data_isSet(boolean value) {
 +    if (!value) {
 +      this.data = null;
 +    }
 +  }
 +
 +  public int get_message_id() {
 +    return this.message_id;
 +  }
 +
 +  public void set_message_id(int message_id) {
 +    this.message_id = message_id;
 +    set_message_id_isSet(true);
 +  }
 +
 +  public void unset_message_id() {
 +    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __MESSAGE_ID_ISSET_ID);
 +  }
 +
 +  /** Returns true if field message_id is set (has been assigned a value) and false otherwise */
 +  public boolean is_set_message_id() {
 +    return EncodingUtils.testBit(__isset_bitfield, __MESSAGE_ID_ISSET_ID);
 +  }
 +
 +  public void set_message_id_isSet(boolean value) {
 +    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __MESSAGE_ID_ISSET_ID, value);
 +  }
 +
 +  public void setFieldValue(_Fields field, Object value) {
 +    switch (field) {
 +    case TYPE:
 +      if (value == null) {
 +        unset_type();
 +      } else {
 +        set_type((HBServerMessageType)value);
 +      }
 +      break;
 +
 +    case DATA:
 +      if (value == null) {
 +        unset_data();
 +      } else {
 +        set_data((HBMessageData)value);
 +      }
 +      break;
 +
 +    case MESSAGE_ID:
 +      if (value == null) {
 +        unset_message_id();
 +      } else {
 +        set_message_id((Integer)value);
 +      }
 +      break;
 +
 +    }
 +  }
 +
 +  public Object getFieldValue(_Fields field) {
 +    switch (field) {
 +    case TYPE:
 +      return get_type();
 +
 +    case DATA:
 +      return get_data();
 +
 +    case MESSAGE_ID:
-       return Integer.valueOf(get_message_id());
++      return get_message_id();
 +
 +    }
 +    throw new IllegalStateException();
 +  }
 +
 +  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
 +  public boolean isSet(_Fields field) {
 +    if (field == null) {
 +      throw new IllegalArgumentException();
 +    }
 +
 +    switch (field) {
 +    case TYPE:
 +      return is_set_type();
 +    case DATA:
 +      return is_set_data();
 +    case MESSAGE_ID:
 +      return is_set_message_id();
 +    }
 +    throw new IllegalStateException();
 +  }
 +
 +  @Override
 +  public boolean equals(Object that) {
 +    if (that == null)
 +      return false;
 +    if (that instanceof HBMessage)
 +      return this.equals((HBMessage)that);
 +    return false;
 +  }
 +
 +  public boolean equals(HBMessage that) {
 +    if (that == null)
 +      return false;
 +
 +    boolean this_present_type = true && this.is_set_type();
 +    boolean that_present_type = true && that.is_set_type();
 +    if (this_present_type || that_present_type) {
 +      if (!(this_present_type && that_present_type))
 +        return false;
 +      if (!this.type.equals(that.type))
 +        return false;
 +    }
 +
 +    boolean this_present_data = true && this.is_set_data();
 +    boolean that_present_data = true && that.is_set_data();
 +    if (this_present_data || that_present_data) {
 +      if (!(this_present_data && that_present_data))
 +        return false;
 +      if (!this.data.equals(that.data))
 +        return false;
 +    }
 +
 +    boolean this_present_message_id = true && this.is_set_message_id();
 +    boolean that_present_message_id = true && that.is_set_message_id();
 +    if (this_present_message_id || that_present_message_id) {
 +      if (!(this_present_message_id && that_present_message_id))
 +        return false;
 +      if (this.message_id != that.message_id)
 +        return false;
 +    }
 +
 +    return true;
 +  }
 +
 +  @Override
 +  public int hashCode() {
 +    List<Object> list = new ArrayList<Object>();
 +
 +    boolean present_type = true && (is_set_type());
 +    list.add(present_type);
 +    if (present_type)
 +      list.add(type.getValue());
 +
 +    boolean present_data = true && (is_set_data());
 +    list.add(present_data);
 +    if (present_data)
 +      list.add(data);
 +
 +    boolean present_message_id = true && (is_set_message_id());
 +    list.add(present_message_id);
 +    if (present_message_id)
 +      list.add(message_id);
 +
 +    return list.hashCode();
 +  }
 +
 +  @Override
 +  public int compareTo(HBMessage other) {
 +    if (!getClass().equals(other.getClass())) {
 +      return getClass().getName().compareTo(other.getClass().getName());
 +    }
 +
 +    int lastComparison = 0;
 +
 +    lastComparison = Boolean.valueOf(is_set_type()).compareTo(other.is_set_type());
 +    if (lastComparison != 0) {
 +      return lastComparison;
 +    }
 +    if (is_set_type()) {
 +      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type, other.type);
 +      if (lastComparison != 0) {
 +        return lastComparison;
 +      }
 +    }
 +    lastComparison = Boolean.valueOf(is_set_data()).compareTo(other.is_set_data());
 +    if (lastComparison != 0) {
 +      return lastComparison;
 +    }
 +    if (is_set_data()) {
 +      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.data, other.data);
 +      if (lastComparison != 0) {
 +        return lastComparison;
 +      }
 +    }
 +    lastComparison = Boolean.valueOf(is_set_message_id()).compareTo(other.is_set_message_id());
 +    if (lastComparison != 0) {
 +      return lastComparison;
 +    }
 +    if (is_set_message_id()) {
 +      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.message_id, other.message_id);
 +      if (lastComparison != 0) {
 +        return lastComparison;
 +      }
 +    }
 +    return 0;
 +  }
 +
 +  public _Fields fieldForId(int fieldId) {
 +    return _Fields.findByThriftId(fieldId);
 +  }
 +
 +  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
 +    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
 +  }
 +
 +  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
 +    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
 +  }
 +
 +  @Override
 +  public String toString() {
 +    StringBuilder sb = new StringBuilder("HBMessage(");
 +    boolean first = true;
 +
 +    sb.append("type:");
 +    if (this.type == null) {
 +      sb.append("null");
 +    } else {
 +      sb.append(this.type);
 +    }
 +    first = false;
 +    if (!first) sb.append(", ");
 +    sb.append("data:");
 +    if (this.data == null) {
 +      sb.append("null");
 +    } else {
 +      sb.append(this.data);
 +    }
 +    first = false;
 +    if (is_set_message_id()) {
 +      if (!first) sb.append(", ");
 +      sb.append("message_id:");
 +      sb.append(this.message_id);
 +      first = false;
 +    }
 +    sb.append(")");
 +    return sb.toString();
 +  }
 +
 +  public void validate() throws org.apache.thrift.TException {
 +    // check for required fields
 +    // check for sub-struct validity
 +  }
 +
 +  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
 +    try {
 +      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
 +    } catch (org.apache.thrift.TException te) {
 +      throw new java.io.IOException(te);
 +    }
 +  }
 +
 +  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
 +    try {
 +      // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
 +      __isset_bitfield = 0;
 +      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
 +    } catch (org.apache.thrift.TException te) {
 +      throw new java.io.IOException(te);
 +    }
 +  }
 +
 +  private static class HBMessageStandardSchemeFactory implements SchemeFactory {
 +    public HBMessageStandardScheme getScheme() {
 +      return new HBMessageStandardScheme();
 +    }
 +  }
 +
 +  private static class HBMessageStandardScheme extends StandardScheme<HBMessage> {
 +
 +    public void read(org.apache.thrift.protocol.TProtocol iprot, HBMessage struct) throws org.apache.thrift.TException {
 +      org.apache.thrift.protocol.TField schemeField;
 +      iprot.readStructBegin();
 +      while (true)
 +      {
 +        schemeField = iprot.readFieldBegin();
 +        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
 +          break;
 +        }
 +        switch (schemeField.id) {
 +          case 1: // TYPE
 +            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
 +              struct.type = backtype.storm.generated.HBServerMessageType.findByValue(iprot.readI32());
 +              struct.set_type_isSet(true);
 +            } else { 
 +              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +            }
 +            break;
 +          case 2: // DATA
 +            if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
 +              struct.data = new HBMessageData();
 +              struct.data.read(iprot);
 +              struct.set_data_isSet(true);
 +            } else { 
 +              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +            }
 +            break;
 +          case 3: // MESSAGE_ID
 +            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
 +              struct.message_id = iprot.readI32();
 +              struct.set_message_id_isSet(true);
 +            } else { 
 +              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +            }
 +            break;
 +          default:
 +            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
 +        }
 +        iprot.readFieldEnd();
 +      }
 +      iprot.readStructEnd();
 +      struct.validate();
 +    }
 +
 +    public void write(org.apache.thrift.protocol.TProtocol oprot, HBMessage struct) throws org.apache.thrift.TException {
 +      struct.validate();
 +
 +      oprot.writeStructBegin(STRUCT_DESC);
 +      if (struct.type != null) {
 +        oprot.writeFieldBegin(TYPE_FIELD_DESC);
 +        oprot.writeI32(struct.type.getValue());
 +        oprot.writeFieldEnd();
 +      }
 +      if (struct.data != null) {
 +        oprot.writeFieldBegin(DATA_FIELD_DESC);
 +        struct.data.write(oprot);
 +        oprot.writeFieldEnd();
 +      }
 +      if (struct.is_set_message_id()) {
 +        oprot.writeFieldBegin(MESSAGE_ID_FIELD_DESC);
 +        oprot.writeI32(struct.message_id);
 +        oprot.writeFieldEnd();
 +      }
 +      oprot.writeFieldStop();
 +      oprot.writeStructEnd();
 +    }
 +
 +  }
 +
 +  private static class HBMessageTupleSchemeFactory implements SchemeFactory {
 +    public HBMessageTupleScheme getScheme() {
 +      return new HBMessageTupleScheme();
 +    }
 +  }
 +
 +  private static class HBMessageTupleScheme extends TupleScheme<HBMessage> {
 +
 +    @Override
 +    public void write(org.apache.thrift.protocol.TProtocol prot, HBMessage struct) throws org.apache.thrift.TException {
 +      TTupleProtocol oprot = (TTupleProtocol) prot;
 +      BitSet optionals = new BitSet();
 +      if (struct.is_set_type()) {
 +        optionals.set(0);
 +      }
 +      if (struct.is_set_data()) {
 +        optionals.set(1);
 +      }
 +      if (struct.is_set_message_id()) {
 +        optionals.set(2);
 +      }
 +      oprot.writeBitSet(optionals, 3);
 +      if (struct.is_set_type()) {
 +        oprot.writeI32(struct.type.getValue());
 +      }
 +      if (struct.is_set_data()) {
 +        struct.data.write(oprot);
 +      }
 +      if (struct.is_set_message_id()) {
 +        oprot.writeI32(struct.message_id);
 +      }
 +    }
 +
 +    @Override
 +    public void read(org.apache.thrift.protocol.TProtocol prot, HBMessage struct) throws org.apache.thrift.TException {
 +      TTupleProtocol iprot = (TTupleProtocol) prot;
 +      BitSet incoming = iprot.readBitSet(3);
 +      if (incoming.get(0)) {
 +        struct.type = backtype.storm.generated.HBServerMessageType.findByValue(iprot.readI32());
 +        struct.set_type_isSet(true);
 +      }
 +      if (incoming.get(1)) {
 +        struct.data = new HBMessageData();
 +        struct.data.read(iprot);
 +        struct.set_data_isSet(true);
 +      }
 +      if (incoming.get(2)) {
 +        struct.message_id = iprot.readI32();
 +        struct.set_message_id_isSet(true);
 +      }
 +    }
 +  }
 +
 +}
 +

http://git-wip-us.apache.org/repos/asf/storm/blob/4645c190/storm-core/src/jvm/backtype/storm/generated/HBMessageData.java
----------------------------------------------------------------------
diff --cc storm-core/src/jvm/backtype/storm/generated/HBMessageData.java
index 6724f3d,0000000..dba2f8b
mode 100644,000000..100644
--- a/storm-core/src/jvm/backtype/storm/generated/HBMessageData.java
+++ b/storm-core/src/jvm/backtype/storm/generated/HBMessageData.java
@@@ -1,640 -1,0 +1,640 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +/**
-  * Autogenerated by Thrift Compiler (0.9.2)
++ * Autogenerated by Thrift Compiler (0.9.3)
 + *
 + * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 + *  @generated
 + */
 +package backtype.storm.generated;
 +
 +import org.apache.thrift.scheme.IScheme;
 +import org.apache.thrift.scheme.SchemeFactory;
 +import org.apache.thrift.scheme.StandardScheme;
 +
 +import org.apache.thrift.scheme.TupleScheme;
 +import org.apache.thrift.protocol.TTupleProtocol;
 +import org.apache.thrift.protocol.TProtocolException;
 +import org.apache.thrift.EncodingUtils;
 +import org.apache.thrift.TException;
 +import org.apache.thrift.async.AsyncMethodCallback;
 +import org.apache.thrift.server.AbstractNonblockingServer.*;
 +import java.util.List;
 +import java.util.ArrayList;
 +import java.util.Map;
 +import java.util.HashMap;
 +import java.util.EnumMap;
 +import java.util.Set;
 +import java.util.HashSet;
 +import java.util.EnumSet;
 +import java.util.Collections;
 +import java.util.BitSet;
 +import java.nio.ByteBuffer;
 +import java.util.Arrays;
 +import javax.annotation.Generated;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
 +public class HBMessageData extends org.apache.thrift.TUnion<HBMessageData, HBMessageData._Fields> {
 +  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HBMessageData");
 +  private static final org.apache.thrift.protocol.TField PATH_FIELD_DESC = new org.apache.thrift.protocol.TField("path", org.apache.thrift.protocol.TType.STRING, (short)1);
 +  private static final org.apache.thrift.protocol.TField PULSE_FIELD_DESC = new org.apache.thrift.protocol.TField("pulse", org.apache.thrift.protocol.TType.STRUCT, (short)2);
 +  private static final org.apache.thrift.protocol.TField BOOLVAL_FIELD_DESC = new org.apache.thrift.protocol.TField("boolval", org.apache.thrift.protocol.TType.BOOL, (short)3);
 +  private static final org.apache.thrift.protocol.TField RECORDS_FIELD_DESC = new org.apache.thrift.protocol.TField("records", org.apache.thrift.protocol.TType.STRUCT, (short)4);
 +  private static final org.apache.thrift.protocol.TField NODES_FIELD_DESC = new org.apache.thrift.protocol.TField("nodes", org.apache.thrift.protocol.TType.STRUCT, (short)5);
 +  private static final org.apache.thrift.protocol.TField MESSAGE_BLOB_FIELD_DESC = new org.apache.thrift.protocol.TField("message_blob", org.apache.thrift.protocol.TType.STRING, (short)7);
 +
 +  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
 +  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
 +    PATH((short)1, "path"),
 +    PULSE((short)2, "pulse"),
 +    BOOLVAL((short)3, "boolval"),
 +    RECORDS((short)4, "records"),
 +    NODES((short)5, "nodes"),
 +    MESSAGE_BLOB((short)7, "message_blob");
 +
 +    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 +
 +    static {
 +      for (_Fields field : EnumSet.allOf(_Fields.class)) {
 +        byName.put(field.getFieldName(), field);
 +      }
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches fieldId, or null if its not found.
 +     */
 +    public static _Fields findByThriftId(int fieldId) {
 +      switch(fieldId) {
 +        case 1: // PATH
 +          return PATH;
 +        case 2: // PULSE
 +          return PULSE;
 +        case 3: // BOOLVAL
 +          return BOOLVAL;
 +        case 4: // RECORDS
 +          return RECORDS;
 +        case 5: // NODES
 +          return NODES;
 +        case 7: // MESSAGE_BLOB
 +          return MESSAGE_BLOB;
 +        default:
 +          return null;
 +      }
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches fieldId, throwing an exception
 +     * if it is not found.
 +     */
 +    public static _Fields findByThriftIdOrThrow(int fieldId) {
 +      _Fields fields = findByThriftId(fieldId);
 +      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
 +      return fields;
 +    }
 +
 +    /**
 +     * Find the _Fields constant that matches name, or null if its not found.
 +     */
 +    public static _Fields findByName(String name) {
 +      return byName.get(name);
 +    }
 +
 +    private final short _thriftId;
 +    private final String _fieldName;
 +
 +    _Fields(short thriftId, String fieldName) {
 +      _thriftId = thriftId;
 +      _fieldName = fieldName;
 +    }
 +
 +    public short getThriftFieldId() {
 +      return _thriftId;
 +    }
 +
 +    public String getFieldName() {
 +      return _fieldName;
 +    }
 +  }
 +
 +  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
 +  static {
 +    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
 +    tmpMap.put(_Fields.PATH, new org.apache.thrift.meta_data.FieldMetaData("path", org.apache.thrift.TFieldRequirementType.DEFAULT, 
 +        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
 +    tmpMap.put(_Fields.PULSE, new org.apache.thrift.meta_data.FieldMetaData("pulse", org.apache.thrift.TFieldRequirementType.DEFAULT, 
 +        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT        , "HBPulse")));
 +    tmpMap.put(_Fields.BOOLVAL, new org.apache.thrift.meta_data.FieldMetaData("boolval", org.apache.thrift.TFieldRequirementType.DEFAULT, 
 +        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
 +    tmpMap.put(_Fields.RECORDS, new org.apache.thrift.meta_data.FieldMetaData("records", org.apache.thrift.TFieldRequirementType.DEFAULT, 
 +        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT        , "HBRecords")));
 +    tmpMap.put(_Fields.NODES, new org.apache.thrift.meta_data.FieldMetaData("nodes", org.apache.thrift.TFieldRequirementType.DEFAULT, 
 +        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT        , "HBNodes")));
 +    tmpMap.put(_Fields.MESSAGE_BLOB, new org.apache.thrift.meta_data.FieldMetaData("message_blob", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
 +        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING        , true)));
 +    metaDataMap = Collections.unmodifiableMap(tmpMap);
 +    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HBMessageData.class, metaDataMap);
 +  }
 +
 +  public HBMessageData() {
 +    super();
 +  }
 +
 +  public HBMessageData(_Fields setField, Object value) {
 +    super(setField, value);
 +  }
 +
 +  public HBMessageData(HBMessageData other) {
 +    super(other);
 +  }
 +  public HBMessageData deepCopy() {
 +    return new HBMessageData(this);
 +  }
 +
 +  public static HBMessageData path(String value) {
 +    HBMessageData x = new HBMessageData();
 +    x.set_path(value);
 +    return x;
 +  }
 +
 +  public static HBMessageData pulse(HBPulse value) {
 +    HBMessageData x = new HBMessageData();
 +    x.set_pulse(value);
 +    return x;
 +  }
 +
 +  public static HBMessageData boolval(boolean value) {
 +    HBMessageData x = new HBMessageData();
 +    x.set_boolval(value);
 +    return x;
 +  }
 +
 +  public static HBMessageData records(HBRecords value) {
 +    HBMessageData x = new HBMessageData();
 +    x.set_records(value);
 +    return x;
 +  }
 +
 +  public static HBMessageData nodes(HBNodes value) {
 +    HBMessageData x = new HBMessageData();
 +    x.set_nodes(value);
 +    return x;
 +  }
 +
 +  public static HBMessageData message_blob(ByteBuffer value) {
 +    HBMessageData x = new HBMessageData();
 +    x.set_message_blob(value);
 +    return x;
 +  }
 +
 +  public static HBMessageData message_blob(byte[] value) {
 +    HBMessageData x = new HBMessageData();
 +    x.set_message_blob(ByteBuffer.wrap(Arrays.copyOf(value, value.length)));
 +    return x;
 +  }
 +
 +
 +  @Override
 +  protected void checkType(_Fields setField, Object value) throws ClassCastException {
 +    switch (setField) {
 +      case PATH:
 +        if (value instanceof String) {
 +          break;
 +        }
 +        throw new ClassCastException("Was expecting value of type String for field 'path', but got " + value.getClass().getSimpleName());
 +      case PULSE:
 +        if (value instanceof HBPulse) {
 +          break;
 +        }
 +        throw new ClassCastException("Was expecting value of type HBPulse for field 'pulse', but got " + value.getClass().getSimpleName());
 +      case BOOLVAL:
 +        if (value instanceof Boolean) {
 +          break;
 +        }
 +        throw new ClassCastException("Was expecting value of type Boolean for field 'boolval', but got " + value.getClass().getSimpleName());
 +      case RECORDS:
 +        if (value instanceof HBRecords) {
 +          break;
 +        }
 +        throw new ClassCastException("Was expecting value of type HBRecords for field 'records', but got " + value.getClass().getSimpleName());
 +      case NODES:
 +        if (value instanceof HBNodes) {
 +          break;
 +        }
 +        throw new ClassCastException("Was expecting value of type HBNodes for field 'nodes', but got " + value.getClass().getSimpleName());
 +      case MESSAGE_BLOB:
 +        if (value instanceof ByteBuffer) {
 +          break;
 +        }
 +        throw new ClassCastException("Was expecting value of type ByteBuffer for field 'message_blob', but got " + value.getClass().getSimpleName());
 +      default:
 +        throw new IllegalArgumentException("Unknown field id " + setField);
 +    }
 +  }
 +
 +  @Override
 +  protected Object standardSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TField field) throws org.apache.thrift.TException {
 +    _Fields setField = _Fields.findByThriftId(field.id);
 +    if (setField != null) {
 +      switch (setField) {
 +        case PATH:
 +          if (field.type == PATH_FIELD_DESC.type) {
 +            String path;
 +            path = iprot.readString();
 +            return path;
 +          } else {
 +            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
 +            return null;
 +          }
 +        case PULSE:
 +          if (field.type == PULSE_FIELD_DESC.type) {
 +            HBPulse pulse;
 +            pulse = new HBPulse();
 +            pulse.read(iprot);
 +            return pulse;
 +          } else {
 +            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
 +            return null;
 +          }
 +        case BOOLVAL:
 +          if (field.type == BOOLVAL_FIELD_DESC.type) {
 +            Boolean boolval;
 +            boolval = iprot.readBool();
 +            return boolval;
 +          } else {
 +            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
 +            return null;
 +          }
 +        case RECORDS:
 +          if (field.type == RECORDS_FIELD_DESC.type) {
 +            HBRecords records;
 +            records = new HBRecords();
 +            records.read(iprot);
 +            return records;
 +          } else {
 +            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
 +            return null;
 +          }
 +        case NODES:
 +          if (field.type == NODES_FIELD_DESC.type) {
 +            HBNodes nodes;
 +            nodes = new HBNodes();
 +            nodes.read(iprot);
 +            return nodes;
 +          } else {
 +            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
 +            return null;
 +          }
 +        case MESSAGE_BLOB:
 +          if (field.type == MESSAGE_BLOB_FIELD_DESC.type) {
 +            ByteBuffer message_blob;
 +            message_blob = iprot.readBinary();
 +            return message_blob;
 +          } else {
 +            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
 +            return null;
 +          }
 +        default:
 +          throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
 +      }
 +    } else {
 +      org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
 +      return null;
 +    }
 +  }
 +
 +  @Override
 +  protected void standardSchemeWriteValue(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
 +    switch (setField_) {
 +      case PATH:
 +        String path = (String)value_;
 +        oprot.writeString(path);
 +        return;
 +      case PULSE:
 +        HBPulse pulse = (HBPulse)value_;
 +        pulse.write(oprot);
 +        return;
 +      case BOOLVAL:
 +        Boolean boolval = (Boolean)value_;
 +        oprot.writeBool(boolval);
 +        return;
 +      case RECORDS:
 +        HBRecords records = (HBRecords)value_;
 +        records.write(oprot);
 +        return;
 +      case NODES:
 +        HBNodes nodes = (HBNodes)value_;
 +        nodes.write(oprot);
 +        return;
 +      case MESSAGE_BLOB:
 +        ByteBuffer message_blob = (ByteBuffer)value_;
 +        oprot.writeBinary(message_blob);
 +        return;
 +      default:
 +        throw new IllegalStateException("Cannot write union with unknown field " + setField_);
 +    }
 +  }
 +
 +  @Override
 +  protected Object tupleSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot, short fieldID) throws org.apache.thrift.TException {
 +    _Fields setField = _Fields.findByThriftId(fieldID);
 +    if (setField != null) {
 +      switch (setField) {
 +        case PATH:
 +          String path;
 +          path = iprot.readString();
 +          return path;
 +        case PULSE:
 +          HBPulse pulse;
 +          pulse = new HBPulse();
 +          pulse.read(iprot);
 +          return pulse;
 +        case BOOLVAL:
 +          Boolean boolval;
 +          boolval = iprot.readBool();
 +          return boolval;
 +        case RECORDS:
 +          HBRecords records;
 +          records = new HBRecords();
 +          records.read(iprot);
 +          return records;
 +        case NODES:
 +          HBNodes nodes;
 +          nodes = new HBNodes();
 +          nodes.read(iprot);
 +          return nodes;
 +        case MESSAGE_BLOB:
 +          ByteBuffer message_blob;
 +          message_blob = iprot.readBinary();
 +          return message_blob;
 +        default:
 +          throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
 +      }
 +    } else {
 +      throw new TProtocolException("Couldn't find a field with field id " + fieldID);
 +    }
 +  }
 +
 +  @Override
 +  protected void tupleSchemeWriteValue(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
 +    switch (setField_) {
 +      case PATH:
 +        String path = (String)value_;
 +        oprot.writeString(path);
 +        return;
 +      case PULSE:
 +        HBPulse pulse = (HBPulse)value_;
 +        pulse.write(oprot);
 +        return;
 +      case BOOLVAL:
 +        Boolean boolval = (Boolean)value_;
 +        oprot.writeBool(boolval);
 +        return;
 +      case RECORDS:
 +        HBRecords records = (HBRecords)value_;
 +        records.write(oprot);
 +        return;
 +      case NODES:
 +        HBNodes nodes = (HBNodes)value_;
 +        nodes.write(oprot);
 +        return;
 +      case MESSAGE_BLOB:
 +        ByteBuffer message_blob = (ByteBuffer)value_;
 +        oprot.writeBinary(message_blob);
 +        return;
 +      default:
 +        throw new IllegalStateException("Cannot write union with unknown field " + setField_);
 +    }
 +  }
 +
 +  @Override
 +  protected org.apache.thrift.protocol.TField getFieldDesc(_Fields setField) {
 +    switch (setField) {
 +      case PATH:
 +        return PATH_FIELD_DESC;
 +      case PULSE:
 +        return PULSE_FIELD_DESC;
 +      case BOOLVAL:
 +        return BOOLVAL_FIELD_DESC;
 +      case RECORDS:
 +        return RECORDS_FIELD_DESC;
 +      case NODES:
 +        return NODES_FIELD_DESC;
 +      case MESSAGE_BLOB:
 +        return MESSAGE_BLOB_FIELD_DESC;
 +      default:
 +        throw new IllegalArgumentException("Unknown field id " + setField);
 +    }
 +  }
 +
 +  @Override
 +  protected org.apache.thrift.protocol.TStruct getStructDesc() {
 +    return STRUCT_DESC;
 +  }
 +
 +  @Override
 +  protected _Fields enumForId(short id) {
 +    return _Fields.findByThriftIdOrThrow(id);
 +  }
 +
 +  public _Fields fieldForId(int fieldId) {
 +    return _Fields.findByThriftId(fieldId);
 +  }
 +
 +
 +  public String get_path() {
 +    if (getSetField() == _Fields.PATH) {
 +      return (String)getFieldValue();
 +    } else {
 +      throw new RuntimeException("Cannot get field 'path' because union is currently set to " + getFieldDesc(getSetField()).name);
 +    }
 +  }
 +
 +  public void set_path(String value) {
 +    if (value == null) throw new NullPointerException();
 +    setField_ = _Fields.PATH;
 +    value_ = value;
 +  }
 +
 +  public HBPulse get_pulse() {
 +    if (getSetField() == _Fields.PULSE) {
 +      return (HBPulse)getFieldValue();
 +    } else {
 +      throw new RuntimeException("Cannot get field 'pulse' because union is currently set to " + getFieldDesc(getSetField()).name);
 +    }
 +  }
 +
 +  public void set_pulse(HBPulse value) {
 +    if (value == null) throw new NullPointerException();
 +    setField_ = _Fields.PULSE;
 +    value_ = value;
 +  }
 +
 +  public boolean get_boolval() {
 +    if (getSetField() == _Fields.BOOLVAL) {
 +      return (Boolean)getFieldValue();
 +    } else {
 +      throw new RuntimeException("Cannot get field 'boolval' because union is currently set to " + getFieldDesc(getSetField()).name);
 +    }
 +  }
 +
 +  public void set_boolval(boolean value) {
 +    setField_ = _Fields.BOOLVAL;
 +    value_ = value;
 +  }
 +
 +  public HBRecords get_records() {
 +    if (getSetField() == _Fields.RECORDS) {
 +      return (HBRecords)getFieldValue();
 +    } else {
 +      throw new RuntimeException("Cannot get field 'records' because union is currently set to " + getFieldDesc(getSetField()).name);
 +    }
 +  }
 +
 +  public void set_records(HBRecords value) {
 +    if (value == null) throw new NullPointerException();
 +    setField_ = _Fields.RECORDS;
 +    value_ = value;
 +  }
 +
 +  public HBNodes get_nodes() {
 +    if (getSetField() == _Fields.NODES) {
 +      return (HBNodes)getFieldValue();
 +    } else {
 +      throw new RuntimeException("Cannot get field 'nodes' because union is currently set to " + getFieldDesc(getSetField()).name);
 +    }
 +  }
 +
 +  public void set_nodes(HBNodes value) {
 +    if (value == null) throw new NullPointerException();
 +    setField_ = _Fields.NODES;
 +    value_ = value;
 +  }
 +
 +  public byte[] get_message_blob() {
 +    set_message_blob(org.apache.thrift.TBaseHelper.rightSize(buffer_for_message_blob()));
 +    ByteBuffer b = buffer_for_message_blob();
 +    return b == null ? null : b.array();
 +  }
 +
 +  public ByteBuffer buffer_for_message_blob() {
 +    if (getSetField() == _Fields.MESSAGE_BLOB) {
 +      return org.apache.thrift.TBaseHelper.copyBinary((ByteBuffer)getFieldValue());
 +    } else {
 +      throw new RuntimeException("Cannot get field 'message_blob' because union is currently set to " + getFieldDesc(getSetField()).name);
 +    }
 +  }
 +
 +  public void set_message_blob(byte[] value) {
 +    set_message_blob(ByteBuffer.wrap(Arrays.copyOf(value, value.length)));
 +  }
 +
 +  public void set_message_blob(ByteBuffer value) {
 +    if (value == null) throw new NullPointerException();
 +    setField_ = _Fields.MESSAGE_BLOB;
 +    value_ = value;
 +  }
 +
 +  public boolean is_set_path() {
 +    return setField_ == _Fields.PATH;
 +  }
 +
 +
 +  public boolean is_set_pulse() {
 +    return setField_ == _Fields.PULSE;
 +  }
 +
 +
 +  public boolean is_set_boolval() {
 +    return setField_ == _Fields.BOOLVAL;
 +  }
 +
 +
 +  public boolean is_set_records() {
 +    return setField_ == _Fields.RECORDS;
 +  }
 +
 +
 +  public boolean is_set_nodes() {
 +    return setField_ == _Fields.NODES;
 +  }
 +
 +
 +  public boolean is_set_message_blob() {
 +    return setField_ == _Fields.MESSAGE_BLOB;
 +  }
 +
 +
 +  public boolean equals(Object other) {
 +    if (other instanceof HBMessageData) {
 +      return equals((HBMessageData)other);
 +    } else {
 +      return false;
 +    }
 +  }
 +
 +  public boolean equals(HBMessageData other) {
 +    return other != null && getSetField() == other.getSetField() && getFieldValue().equals(other.getFieldValue());
 +  }
 +
 +  @Override
 +  public int compareTo(HBMessageData other) {
 +    int lastComparison = org.apache.thrift.TBaseHelper.compareTo(getSetField(), other.getSetField());
 +    if (lastComparison == 0) {
 +      return org.apache.thrift.TBaseHelper.compareTo(getFieldValue(), other.getFieldValue());
 +    }
 +    return lastComparison;
 +  }
 +
 +
 +  @Override
 +  public int hashCode() {
 +    List<Object> list = new ArrayList<Object>();
 +    list.add(this.getClass().getName());
 +    org.apache.thrift.TFieldIdEnum setField = getSetField();
 +    if (setField != null) {
 +      list.add(setField.getThriftFieldId());
 +      Object value = getFieldValue();
 +      if (value instanceof org.apache.thrift.TEnum) {
 +        list.add(((org.apache.thrift.TEnum)getFieldValue()).getValue());
 +      } else {
 +        list.add(value);
 +      }
 +    }
 +    return list.hashCode();
 +  }
 +  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
 +    try {
 +      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
 +    } catch (org.apache.thrift.TException te) {
 +      throw new java.io.IOException(te);
 +    }
 +  }
 +
 +
 +  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
 +    try {
 +      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
 +    } catch (org.apache.thrift.TException te) {
 +      throw new java.io.IOException(te);
 +    }
 +  }
 +
 +
 +}


[25/37] storm git commit: Minor tweaks to documentation.

Posted by kn...@apache.org.
Minor tweaks to documentation.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/37768ef2
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/37768ef2
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/37768ef2

Branch: refs/heads/master
Commit: 37768ef219591a60cf7483b72a58be8552bffac1
Parents: b8f4056
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Tue Nov 17 15:42:59 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Tue Nov 17 15:42:59 2015 -0600

----------------------------------------------------------------------
 docs/documentation/Pacemaker.md | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/37768ef2/docs/documentation/Pacemaker.md
----------------------------------------------------------------------
diff --git a/docs/documentation/Pacemaker.md b/docs/documentation/Pacemaker.md
index 6a85b8e..06f2fa1 100644
--- a/docs/documentation/Pacemaker.md
+++ b/docs/documentation/Pacemaker.md
@@ -19,12 +19,12 @@ The corresponding Pacemaker client is a plugin for the `ClusterState` interface,
 
 #### Example
 
-To get Pacemaker up and running, set the following option in the cluster config:
+To get Pacemaker up and running, set the following option in the cluster config on all nodes:
 ```
 storm.cluster.state.store: "org.apache.storm.pacemaker.pacemaker_state_factory"
 ```
 
-The Pacemaker host also needs to be set:
+The Pacemaker host also needs to be set on all nodes:
 ```
 pacemaker.host: somehost.mycompany.com
 ```


[24/37] storm git commit: Minor tweaks to documentation.

Posted by kn...@apache.org.
Minor tweaks to documentation.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/b8f4056c
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/b8f4056c
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/b8f4056c

Branch: refs/heads/master
Commit: b8f4056c7a2a706c149dde2038b7cce1fb747942
Parents: 099dc72
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Tue Nov 17 15:39:56 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Tue Nov 17 15:39:56 2015 -0600

----------------------------------------------------------------------
 docs/documentation/Pacemaker.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/b8f4056c/docs/documentation/Pacemaker.md
----------------------------------------------------------------------
diff --git a/docs/documentation/Pacemaker.md b/docs/documentation/Pacemaker.md
index 8acbb36..6a85b8e 100644
--- a/docs/documentation/Pacemaker.md
+++ b/docs/documentation/Pacemaker.md
@@ -1,6 +1,6 @@
 # Pacemaker
 
-### Intro
+### Introduction
 Pacemaker is a storm daemon designed to process heartbeats from workers. As Storm is scaled up, ZooKeeper begins to become a bottleneck due to high volumes of writes from workers doing heartbeats. Lots of writes to disk and traffic across the network is generated as ZooKeeper tries to maintain consistency.
 
 Because heartbeats are of an ephemeral nature, they do not need to be persisted to disk or synced across nodes; an in-memory store will do. This is the role of Pacemaker. Pacemaker functions as a simple in-memory key/value store with ZooKeeper-like, directory-style keys and byte array values.


[27/37] storm git commit: Minor tweaks to documentation.

Posted by kn...@apache.org.
Minor tweaks to documentation.


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/98159865
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/98159865
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/98159865

Branch: refs/heads/master
Commit: 98159865deada8ff4c7e02264a2ed4f42c5c1c6d
Parents: 8c1ad3f
Author: Kyle Nusbaum <Ky...@gmail.com>
Authored: Tue Nov 17 17:07:26 2015 -0600
Committer: Kyle Nusbaum <Ky...@gmail.com>
Committed: Tue Nov 17 17:07:26 2015 -0600

----------------------------------------------------------------------
 docs/documentation/Pacemaker.md | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/98159865/docs/documentation/Pacemaker.md
----------------------------------------------------------------------
diff --git a/docs/documentation/Pacemaker.md b/docs/documentation/Pacemaker.md
index a9df08d..e877541 100644
--- a/docs/documentation/Pacemaker.md
+++ b/docs/documentation/Pacemaker.md
@@ -1,7 +1,7 @@
 # Pacemaker
 
 ### Introduction
-Pacemaker is a storm daemon designed to process heartbeats from workers. As Storm is scaled up, ZooKeeper begins to become a bottleneck due to high volumes of writes from workers doing heartbeats. Lots of writes to disk and traffic across the network is generated as ZooKeeper tries to maintain consistency.
+Pacemaker is a storm daemon designed to process heartbeats from workers. As Storm is scaled up, ZooKeeper begins to become a bottleneck due to high volumes of writes from workers doing heartbeats. Lots of writes to disk and too much traffic across the network is generated as ZooKeeper tries to maintain consistency.
 
 Because heartbeats are of an ephemeral nature, they do not need to be persisted to disk or synced across nodes; an in-memory store will do. This is the role of Pacemaker. Pacemaker functions as a simple in-memory key/value store with ZooKeeper-like, directory-style keys and byte array values.
 
@@ -44,11 +44,11 @@ Currently digest (password-based) and Kerberos security are supported. Security
 
 #### Digest
 To configure digest authentication, set `pacemaker.auth.method: DIGEST` in the cluster config on the nodes hosting Nimbus and Pacemaker.
-The nodes must also have `java.security.auth.login.config` set to point to a jaas config file containing the following structure:
+The nodes must also have `java.security.auth.login.config` set to point to a JAAS config file containing the following structure:
 ```
 PacemakerDigest {
     username="some username"
-    password="some password"
+    password="some password";
 };
 ```
 
@@ -57,9 +57,9 @@ Worker nodes need not have these configs set, and may keep `pacemaker.auth.metho
 
 #### Kerberos
 To configure Kerberos authentication, set `pacemaker.auth.method: KERBEROS` in the cluster config on the nodes hosting Nimbus and Pacemaker.
-The nodes must also have `java.security.auth.login.config` set to point to a jaas config.
+The nodes must also have `java.security.auth.login.config` set to point to a JAAS config.
 
-The jaas config on Nimbus must look something like this:
+The JAAS config on Nimbus must look something like this:
 ```
 PacemakerClient {
     com.sun.security.auth.module.Krb5LoginModule required
@@ -73,7 +73,7 @@ PacemakerClient {
                          
 ```
 
-The jaas config on Pacemaker must look something like this:
+The JAAS config on Pacemaker must look something like this:
 ```
 PacemakerServer {
    com.sun.security.auth.module.Krb5LoginModule required