You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@storm.apache.org by bo...@apache.org on 2015/10/05 21:22:25 UTC

[1/9] storm git commit: Adding thrift changes for Dynamic Logging

Repository: storm
Updated Branches:
  refs/heads/master 7cf4d2596 -> 9ac9eb5a2


http://git-wip-us.apache.org/repos/asf/storm/blob/d13cfe2d/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/Nimbus.java b/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
index 3a5d5df..1d0114e 100644
--- a/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
+++ b/storm-core/src/jvm/backtype/storm/generated/Nimbus.java
@@ -51,7 +51,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-24")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-9-24")
 public class Nimbus {
 
   public interface Iface {
@@ -70,6 +70,10 @@ public class Nimbus {
 
     public void rebalance(String name, RebalanceOptions options) throws NotAliveException, InvalidTopologyException, AuthorizationException, org.apache.thrift.TException;
 
+    public void setLogConfig(String name, LogConfig config) throws org.apache.thrift.TException;
+
+    public LogConfig getLogConfig(String name) throws org.apache.thrift.TException;
+
     /**
      * Enable/disable logging the tuples generated in topology via an internal EventLogger bolt. The component name is optional
      * and if null or empty, the debug flag will apply to the entire topology.
@@ -138,6 +142,10 @@ public class Nimbus {
 
     public void rebalance(String name, RebalanceOptions options, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
 
+    public void setLogConfig(String name, LogConfig config, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+    public void getLogConfig(String name, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
     public void debug(String name, String component, boolean enable, double samplingPercentage, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
 
     public void uploadNewCredentials(String name, Credentials creds, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
@@ -388,6 +396,50 @@ public class Nimbus {
       return;
     }
 
+    public void setLogConfig(String name, LogConfig config) throws org.apache.thrift.TException
+    {
+      send_setLogConfig(name, config);
+      recv_setLogConfig();
+    }
+
+    public void send_setLogConfig(String name, LogConfig config) throws org.apache.thrift.TException
+    {
+      setLogConfig_args args = new setLogConfig_args();
+      args.set_name(name);
+      args.set_config(config);
+      sendBase("setLogConfig", args);
+    }
+
+    public void recv_setLogConfig() throws org.apache.thrift.TException
+    {
+      setLogConfig_result result = new setLogConfig_result();
+      receiveBase(result, "setLogConfig");
+      return;
+    }
+
+    public LogConfig getLogConfig(String name) throws org.apache.thrift.TException
+    {
+      send_getLogConfig(name);
+      return recv_getLogConfig();
+    }
+
+    public void send_getLogConfig(String name) throws org.apache.thrift.TException
+    {
+      getLogConfig_args args = new getLogConfig_args();
+      args.set_name(name);
+      sendBase("getLogConfig", args);
+    }
+
+    public LogConfig recv_getLogConfig() throws org.apache.thrift.TException
+    {
+      getLogConfig_result result = new getLogConfig_result();
+      receiveBase(result, "getLogConfig");
+      if (result.is_set_success()) {
+        return result.success;
+      }
+      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getLogConfig failed: unknown result");
+    }
+
     public void debug(String name, String component, boolean enable, double samplingPercentage) throws NotAliveException, AuthorizationException, org.apache.thrift.TException
     {
       send_debug(name, component, enable, samplingPercentage);
@@ -1036,6 +1088,73 @@ public class Nimbus {
       }
     }
 
+    public void setLogConfig(String name, LogConfig config, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      setLogConfig_call method_call = new setLogConfig_call(name, config, resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class setLogConfig_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private String name;
+      private LogConfig config;
+      public setLogConfig_call(String name, LogConfig config, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.name = name;
+        this.config = config;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("setLogConfig", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        setLogConfig_args args = new setLogConfig_args();
+        args.set_name(name);
+        args.set_config(config);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_setLogConfig();
+      }
+    }
+
+    public void getLogConfig(String name, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      getLogConfig_call method_call = new getLogConfig_call(name, resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class getLogConfig_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private String name;
+      public getLogConfig_call(String name, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.name = name;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getLogConfig", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        getLogConfig_args args = new getLogConfig_args();
+        args.set_name(name);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public LogConfig getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_getLogConfig();
+      }
+    }
+
     public void debug(String name, String component, boolean enable, double samplingPercentage, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
       checkReady();
       debug_call method_call = new debug_call(name, component, enable, samplingPercentage, resultHandler, this, ___protocolFactory, ___transport);
@@ -1513,6 +1632,8 @@ public class Nimbus {
       processMap.put("activate", new activate());
       processMap.put("deactivate", new deactivate());
       processMap.put("rebalance", new rebalance());
+      processMap.put("setLogConfig", new setLogConfig());
+      processMap.put("getLogConfig", new getLogConfig());
       processMap.put("debug", new debug());
       processMap.put("uploadNewCredentials", new uploadNewCredentials());
       processMap.put("beginFileUpload", new beginFileUpload());
@@ -1718,6 +1839,46 @@ public class Nimbus {
       }
     }
 
+    public static class setLogConfig<I extends Iface> extends org.apache.thrift.ProcessFunction<I, setLogConfig_args> {
+      public setLogConfig() {
+        super("setLogConfig");
+      }
+
+      public setLogConfig_args getEmptyArgsInstance() {
+        return new setLogConfig_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public setLogConfig_result getResult(I iface, setLogConfig_args args) throws org.apache.thrift.TException {
+        setLogConfig_result result = new setLogConfig_result();
+        iface.setLogConfig(args.name, args.config);
+        return result;
+      }
+    }
+
+    public static class getLogConfig<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getLogConfig_args> {
+      public getLogConfig() {
+        super("getLogConfig");
+      }
+
+      public getLogConfig_args getEmptyArgsInstance() {
+        return new getLogConfig_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public getLogConfig_result getResult(I iface, getLogConfig_args args) throws org.apache.thrift.TException {
+        getLogConfig_result result = new getLogConfig_result();
+        result.success = iface.getLogConfig(args.name);
+        return result;
+      }
+    }
+
     public static class debug<I extends Iface> extends org.apache.thrift.ProcessFunction<I, debug_args> {
       public debug() {
         super("debug");
@@ -2090,6 +2251,8 @@ public class Nimbus {
       processMap.put("activate", new activate());
       processMap.put("deactivate", new deactivate());
       processMap.put("rebalance", new rebalance());
+      processMap.put("setLogConfig", new setLogConfig());
+      processMap.put("getLogConfig", new getLogConfig());
       processMap.put("debug", new debug());
       processMap.put("uploadNewCredentials", new uploadNewCredentials());
       processMap.put("beginFileUpload", new beginFileUpload());
@@ -2549,6 +2712,107 @@ public class Nimbus {
       }
     }
 
+    public static class setLogConfig<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, setLogConfig_args, Void> {
+      public setLogConfig() {
+        super("setLogConfig");
+      }
+
+      public setLogConfig_args getEmptyArgsInstance() {
+        return new setLogConfig_args();
+      }
+
+      public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<Void>() { 
+          public void onComplete(Void o) {
+            setLogConfig_result result = new setLogConfig_result();
+            try {
+              fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            setLogConfig_result result = new setLogConfig_result();
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, setLogConfig_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
+        iface.setLogConfig(args.name, args.config,resultHandler);
+      }
+    }
+
+    public static class getLogConfig<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, getLogConfig_args, LogConfig> {
+      public getLogConfig() {
+        super("getLogConfig");
+      }
+
+      public getLogConfig_args getEmptyArgsInstance() {
+        return new getLogConfig_args();
+      }
+
+      public AsyncMethodCallback<LogConfig> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<LogConfig>() { 
+          public void onComplete(LogConfig o) {
+            getLogConfig_result result = new getLogConfig_result();
+            result.success = o;
+            try {
+              fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            getLogConfig_result result = new getLogConfig_result();
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, getLogConfig_args args, org.apache.thrift.async.AsyncMethodCallback<LogConfig> resultHandler) throws TException {
+        iface.getLogConfig(args.name,resultHandler);
+      }
+    }
+
     public static class debug<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, debug_args, Void> {
       public debug() {
         super("debug");
@@ -10432,6 +10696,1440 @@ public class Nimbus {
 
   }
 
+  public static class setLogConfig_args implements org.apache.thrift.TBase<setLogConfig_args, setLogConfig_args._Fields>, java.io.Serializable, Cloneable, Comparable<setLogConfig_args>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("setLogConfig_args");
+
+    private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)1);
+    private static final org.apache.thrift.protocol.TField CONFIG_FIELD_DESC = new org.apache.thrift.protocol.TField("config", org.apache.thrift.protocol.TType.STRUCT, (short)2);
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new setLogConfig_argsStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new setLogConfig_argsTupleSchemeFactory());
+    }
+
+    private String name; // required
+    private LogConfig config; // required
+
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+      NAME((short)1, "name"),
+      CONFIG((short)2, "config");
+
+      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          case 1: // NAME
+            return NAME;
+          case 2: // CONFIG
+            return CONFIG;
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+
+    // isset id assignments
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      tmpMap.put(_Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData("name", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+      tmpMap.put(_Fields.CONFIG, new org.apache.thrift.meta_data.FieldMetaData("config", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, LogConfig.class)));
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(setLogConfig_args.class, metaDataMap);
+    }
+
+    public setLogConfig_args() {
+    }
+
+    public setLogConfig_args(
+      String name,
+      LogConfig config)
+    {
+      this();
+      this.name = name;
+      this.config = config;
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public setLogConfig_args(setLogConfig_args other) {
+      if (other.is_set_name()) {
+        this.name = other.name;
+      }
+      if (other.is_set_config()) {
+        this.config = new LogConfig(other.config);
+      }
+    }
+
+    public setLogConfig_args deepCopy() {
+      return new setLogConfig_args(this);
+    }
+
+    @Override
+    public void clear() {
+      this.name = null;
+      this.config = null;
+    }
+
+    public String get_name() {
+      return this.name;
+    }
+
+    public void set_name(String name) {
+      this.name = name;
+    }
+
+    public void unset_name() {
+      this.name = null;
+    }
+
+    /** Returns true if field name is set (has been assigned a value) and false otherwise */
+    public boolean is_set_name() {
+      return this.name != null;
+    }
+
+    public void set_name_isSet(boolean value) {
+      if (!value) {
+        this.name = null;
+      }
+    }
+
+    public LogConfig get_config() {
+      return this.config;
+    }
+
+    public void set_config(LogConfig config) {
+      this.config = config;
+    }
+
+    public void unset_config() {
+      this.config = null;
+    }
+
+    /** Returns true if field config is set (has been assigned a value) and false otherwise */
+    public boolean is_set_config() {
+      return this.config != null;
+    }
+
+    public void set_config_isSet(boolean value) {
+      if (!value) {
+        this.config = null;
+      }
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      case NAME:
+        if (value == null) {
+          unset_name();
+        } else {
+          set_name((String)value);
+        }
+        break;
+
+      case CONFIG:
+        if (value == null) {
+          unset_config();
+        } else {
+          set_config((LogConfig)value);
+        }
+        break;
+
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      case NAME:
+        return get_name();
+
+      case CONFIG:
+        return get_config();
+
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      case NAME:
+        return is_set_name();
+      case CONFIG:
+        return is_set_config();
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof setLogConfig_args)
+        return this.equals((setLogConfig_args)that);
+      return false;
+    }
+
+    public boolean equals(setLogConfig_args that) {
+      if (that == null)
+        return false;
+
+      boolean this_present_name = true && this.is_set_name();
+      boolean that_present_name = true && that.is_set_name();
+      if (this_present_name || that_present_name) {
+        if (!(this_present_name && that_present_name))
+          return false;
+        if (!this.name.equals(that.name))
+          return false;
+      }
+
+      boolean this_present_config = true && this.is_set_config();
+      boolean that_present_config = true && that.is_set_config();
+      if (this_present_config || that_present_config) {
+        if (!(this_present_config && that_present_config))
+          return false;
+        if (!this.config.equals(that.config))
+          return false;
+      }
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      List<Object> list = new ArrayList<Object>();
+
+      boolean present_name = true && (is_set_name());
+      list.add(present_name);
+      if (present_name)
+        list.add(name);
+
+      boolean present_config = true && (is_set_config());
+      list.add(present_config);
+      if (present_config)
+        list.add(config);
+
+      return list.hashCode();
+    }
+
+    @Override
+    public int compareTo(setLogConfig_args other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+
+      lastComparison = Boolean.valueOf(is_set_name()).compareTo(other.is_set_name());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (is_set_name()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, other.name);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      lastComparison = Boolean.valueOf(is_set_config()).compareTo(other.is_set_config());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (is_set_config()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.config, other.config);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("setLogConfig_args(");
+      boolean first = true;
+
+      sb.append("name:");
+      if (this.name == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.name);
+      }
+      first = false;
+      if (!first) sb.append(", ");
+      sb.append("config:");
+      if (this.config == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.config);
+      }
+      first = false;
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+      if (config != null) {
+        config.validate();
+      }
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+      try {
+        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class setLogConfig_argsStandardSchemeFactory implements SchemeFactory {
+      public setLogConfig_argsStandardScheme getScheme() {
+        return new setLogConfig_argsStandardScheme();
+      }
+    }
+
+    private static class setLogConfig_argsStandardScheme extends StandardScheme<setLogConfig_args> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, setLogConfig_args struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            case 1: // NAME
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+                struct.name = iprot.readString();
+                struct.set_name_isSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            case 2: // CONFIG
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+                struct.config = new LogConfig();
+                struct.config.read(iprot);
+                struct.set_config_isSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, setLogConfig_args struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        if (struct.name != null) {
+          oprot.writeFieldBegin(NAME_FIELD_DESC);
+          oprot.writeString(struct.name);
+          oprot.writeFieldEnd();
+        }
+        if (struct.config != null) {
+          oprot.writeFieldBegin(CONFIG_FIELD_DESC);
+          struct.config.write(oprot);
+          oprot.writeFieldEnd();
+        }
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class setLogConfig_argsTupleSchemeFactory implements SchemeFactory {
+      public setLogConfig_argsTupleScheme getScheme() {
+        return new setLogConfig_argsTupleScheme();
+      }
+    }
+
+    private static class setLogConfig_argsTupleScheme extends TupleScheme<setLogConfig_args> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, setLogConfig_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+        BitSet optionals = new BitSet();
+        if (struct.is_set_name()) {
+          optionals.set(0);
+        }
+        if (struct.is_set_config()) {
+          optionals.set(1);
+        }
+        oprot.writeBitSet(optionals, 2);
+        if (struct.is_set_name()) {
+          oprot.writeString(struct.name);
+        }
+        if (struct.is_set_config()) {
+          struct.config.write(oprot);
+        }
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, setLogConfig_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+        BitSet incoming = iprot.readBitSet(2);
+        if (incoming.get(0)) {
+          struct.name = iprot.readString();
+          struct.set_name_isSet(true);
+        }
+        if (incoming.get(1)) {
+          struct.config = new LogConfig();
+          struct.config.read(iprot);
+          struct.set_config_isSet(true);
+        }
+      }
+    }
+
+  }
+
+  public static class setLogConfig_result implements org.apache.thrift.TBase<setLogConfig_result, setLogConfig_result._Fields>, java.io.Serializable, Cloneable, Comparable<setLogConfig_result>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("setLogConfig_result");
+
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new setLogConfig_resultStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new setLogConfig_resultTupleSchemeFactory());
+    }
+
+
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+;
+
+      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(setLogConfig_result.class, metaDataMap);
+    }
+
+    public setLogConfig_result() {
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public setLogConfig_result(setLogConfig_result other) {
+    }
+
+    public setLogConfig_result deepCopy() {
+      return new setLogConfig_result(this);
+    }
+
+    @Override
+    public void clear() {
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof setLogConfig_result)
+        return this.equals((setLogConfig_result)that);
+      return false;
+    }
+
+    public boolean equals(setLogConfig_result that) {
+      if (that == null)
+        return false;
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      List<Object> list = new ArrayList<Object>();
+
+      return list.hashCode();
+    }
+
+    @Override
+    public int compareTo(setLogConfig_result other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+      }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("setLogConfig_result(");
+      boolean first = true;
+
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+      try {
+        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class setLogConfig_resultStandardSchemeFactory implements SchemeFactory {
+      public setLogConfig_resultStandardScheme getScheme() {
+        return new setLogConfig_resultStandardScheme();
+      }
+    }
+
+    private static class setLogConfig_resultStandardScheme extends StandardScheme<setLogConfig_result> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, setLogConfig_result struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, setLogConfig_result struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class setLogConfig_resultTupleSchemeFactory implements SchemeFactory {
+      public setLogConfig_resultTupleScheme getScheme() {
+        return new setLogConfig_resultTupleScheme();
+      }
+    }
+
+    private static class setLogConfig_resultTupleScheme extends TupleScheme<setLogConfig_result> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, setLogConfig_result struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, setLogConfig_result struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+      }
+    }
+
+  }
+
+  public static class getLogConfig_args implements org.apache.thrift.TBase<getLogConfig_args, getLogConfig_args._Fields>, java.io.Serializable, Cloneable, Comparable<getLogConfig_args>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getLogConfig_args");
+
+    private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)1);
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new getLogConfig_argsStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new getLogConfig_argsTupleSchemeFactory());
+    }
+
+    private String name; // required
+
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+      NAME((short)1, "name");
+
+      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          case 1: // NAME
+            return NAME;
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+
+    // isset id assignments
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      tmpMap.put(_Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData("name", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getLogConfig_args.class, metaDataMap);
+    }
+
+    public getLogConfig_args() {
+    }
+
+    public getLogConfig_args(
+      String name)
+    {
+      this();
+      this.name = name;
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public getLogConfig_args(getLogConfig_args other) {
+      if (other.is_set_name()) {
+        this.name = other.name;
+      }
+    }
+
+    public getLogConfig_args deepCopy() {
+      return new getLogConfig_args(this);
+    }
+
+    @Override
+    public void clear() {
+      this.name = null;
+    }
+
+    public String get_name() {
+      return this.name;
+    }
+
+    public void set_name(String name) {
+      this.name = name;
+    }
+
+    public void unset_name() {
+      this.name = null;
+    }
+
+    /** Returns true if field name is set (has been assigned a value) and false otherwise */
+    public boolean is_set_name() {
+      return this.name != null;
+    }
+
+    public void set_name_isSet(boolean value) {
+      if (!value) {
+        this.name = null;
+      }
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      case NAME:
+        if (value == null) {
+          unset_name();
+        } else {
+          set_name((String)value);
+        }
+        break;
+
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      case NAME:
+        return get_name();
+
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      case NAME:
+        return is_set_name();
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof getLogConfig_args)
+        return this.equals((getLogConfig_args)that);
+      return false;
+    }
+
+    public boolean equals(getLogConfig_args that) {
+      if (that == null)
+        return false;
+
+      boolean this_present_name = true && this.is_set_name();
+      boolean that_present_name = true && that.is_set_name();
+      if (this_present_name || that_present_name) {
+        if (!(this_present_name && that_present_name))
+          return false;
+        if (!this.name.equals(that.name))
+          return false;
+      }
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      List<Object> list = new ArrayList<Object>();
+
+      boolean present_name = true && (is_set_name());
+      list.add(present_name);
+      if (present_name)
+        list.add(name);
+
+      return list.hashCode();
+    }
+
+    @Override
+    public int compareTo(getLogConfig_args other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+
+      lastComparison = Boolean.valueOf(is_set_name()).compareTo(other.is_set_name());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (is_set_name()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, other.name);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("getLogConfig_args(");
+      boolean first = true;
+
+      sb.append("name:");
+      if (this.name == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.name);
+      }
+      first = false;
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+      try {
+        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class getLogConfig_argsStandardSchemeFactory implements SchemeFactory {
+      public getLogConfig_argsStandardScheme getScheme() {
+        return new getLogConfig_argsStandardScheme();
+      }
+    }
+
+    private static class getLogConfig_argsStandardScheme extends StandardScheme<getLogConfig_args> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, getLogConfig_args struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            case 1: // NAME
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+                struct.name = iprot.readString();
+                struct.set_name_isSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, getLogConfig_args struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        if (struct.name != null) {
+          oprot.writeFieldBegin(NAME_FIELD_DESC);
+          oprot.writeString(struct.name);
+          oprot.writeFieldEnd();
+        }
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class getLogConfig_argsTupleSchemeFactory implements SchemeFactory {
+      public getLogConfig_argsTupleScheme getScheme() {
+        return new getLogConfig_argsTupleScheme();
+      }
+    }
+
+    private static class getLogConfig_argsTupleScheme extends TupleScheme<getLogConfig_args> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, getLogConfig_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+        BitSet optionals = new BitSet();
+        if (struct.is_set_name()) {
+          optionals.set(0);
+        }
+        oprot.writeBitSet(optionals, 1);
+        if (struct.is_set_name()) {
+          oprot.writeString(struct.name);
+        }
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, getLogConfig_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+        BitSet incoming = iprot.readBitSet(1);
+        if (incoming.get(0)) {
+          struct.name = iprot.readString();
+          struct.set_name_isSet(true);
+        }
+      }
+    }
+
+  }
+
+  public static class getLogConfig_result implements org.apache.thrift.TBase<getLogConfig_result, getLogConfig_result._Fields>, java.io.Serializable, Cloneable, Comparable<getLogConfig_result>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getLogConfig_result");
+
+    private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRUCT, (short)0);
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new getLogConfig_resultStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new getLogConfig_resultTupleSchemeFactory());
+    }
+
+    private LogConfig success; // required
+
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+      SUCCESS((short)0, "success");
+
+      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          case 0: // SUCCESS
+            return SUCCESS;
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+
+    // isset id assignments
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, LogConfig.class)));
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getLogConfig_result.class, metaDataMap);
+    }
+
+    public getLogConfig_result() {
+    }
+
+    public getLogConfig_result(
+      LogConfig success)
+    {
+      this();
+      this.success = success;
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public getLogConfig_result(getLogConfig_result other) {
+      if (other.is_set_success()) {
+        this.success = new LogConfig(other.success);
+      }
+    }
+
+    public getLogConfig_result deepCopy() {
+      return new getLogConfig_result(this);
+    }
+
+    @Override
+    public void clear() {
+      this.success = null;
+    }
+
+    public LogConfig get_success() {
+      return this.success;
+    }
+
+    public void set_success(LogConfig success) {
+      this.success = success;
+    }
+
+    public void unset_success() {
+      this.success = null;
+    }
+
+    /** Returns true if field success is set (has been assigned a value) and false otherwise */
+    public boolean is_set_success() {
+      return this.success != null;
+    }
+
+    public void set_success_isSet(boolean value) {
+      if (!value) {
+        this.success = null;
+      }
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      case SUCCESS:
+        if (value == null) {
+          unset_success();
+        } else {
+          set_success((LogConfig)value);
+        }
+        break;
+
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      case SUCCESS:
+        return get_success();
+
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      case SUCCESS:
+        return is_set_success();
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof getLogConfig_result)
+        return this.equals((getLogConfig_result)that);
+      return false;
+    }
+
+    public boolean equals(getLogConfig_result that) {
+      if (that == null)
+        return false;
+
+      boolean this_present_success = true && this.is_set_success();
+      boolean that_present_success = true && that.is_set_success();
+      if (this_present_success || that_present_success) {
+        if (!(this_present_success && that_present_success))
+          return false;
+        if (!this.success.equals(that.success))
+          return false;
+      }
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      List<Object> list = new ArrayList<Object>();
+
+      boolean present_success = true && (is_set_success());
+      list.add(present_success);
+      if (present_success)
+        list.add(success);
+
+      return list.hashCode();
+    }
+
+    @Override
+    public int compareTo(getLogConfig_result other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+
+      lastComparison = Boolean.valueOf(is_set_success()).compareTo(other.is_set_success());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (is_set_success()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+      }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("getLogConfig_result(");
+      boolean first = true;
+
+      sb.append("success:");
+      if (this.success == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.success);
+      }
+      first = false;
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+      if (success != null) {
+        success.validate();
+      }
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+      try {
+        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class getLogConfig_resultStandardSchemeFactory implements SchemeFactory {
+      public getLogConfig_resultStandardScheme getScheme() {
+        return new getLogConfig_resultStandardScheme();
+      }
+    }
+
+    private static class getLogConfig_resultStandardScheme extends StandardScheme<getLogConfig_result> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, getLogConfig_result struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            case 0: // SUCCESS
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+                struct.success = new LogConfig();
+                struct.success.read(iprot);
+                struct.set_success_isSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, getLogConfig_result struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        if (struct.success != null) {
+          oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
+          struct.success.write(oprot);
+          oprot.writeFieldEnd();
+        }
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class getLogConfig_resultTupleSchemeFactory implements SchemeFactory {
+      public getLogConfig_resultTupleScheme getScheme() {
+        return new getLogConfig_resultTupleScheme();
+      }
+    }
+
+    private static class getLogConfig_resultTupleScheme extends TupleScheme<getLogConfig_result> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, getLogConfig_result struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+        BitSet optionals = new BitSet();
+        if (struct.is_set_success()) {
+          optionals.set(0);
+        }
+        oprot.writeBitSet(optionals, 1);
+        if (struct.is_set_success()) {
+          struct.success.write(oprot);
+        }
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, getLogConfig_result struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+        BitSet incoming = iprot.readBitSet(1);
+        if (incoming.get(0)) {
+          struct.success = new LogConfig();
+          struct.success.read(iprot);
+          struct.set_success_isSet(true);
+        }
+      }
+    }
+
+  }
+
   public static class debug_args implements org.apache.thrift.TBase<debug_args, debug_args._Fields>, java.io.Serializable, Cloneable, Comparable<debug_args>   {
     private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("debug_args");
 

http://git-wip-us.apache.org/repos/asf/storm/blob/d13cfe2d/storm-core/src/py/storm/Nimbus-remote
----------------------------------------------------------------------
diff --git a/storm-core/src/py/storm/Nimbus-remote b/storm-core/src/py/storm/Nimbus-remote
index fff63f9..c5b4b0c 100644
--- a/storm-core/src/py/storm/Nimbus-remote
+++ b/storm-core/src/py/storm/Nimbus-remote
@@ -49,6 +49,8 @@ if len(sys.argv) <= 1 or sys.argv[1] == '--help':
   print('  void activate(string name)')
   print('  void deactivate(string name)')
   print('  void rebalance(string name, RebalanceOptions options)')
+  print('  void setLogConfig(string name, LogConfig config)')
+  print('  LogConfig getLogConfig(string name)')
   print('  void debug(string name, string component, bool enable, double samplingPercentage)')
   print('  void uploadNewCredentials(string name, Credentials creds)')
   print('  string beginFileUpload()')
@@ -161,6 +163,18 @@ elif cmd == 'rebalance':
     sys.exit(1)
   pp.pprint(client.rebalance(args[0],eval(args[1]),))
 
+elif cmd == 'setLogConfig':
+  if len(args) != 2:
+    print('setLogConfig requires 2 args')
+    sys.exit(1)
+  pp.pprint(client.setLogConfig(args[0],eval(args[1]),))
+
+elif cmd == 'getLogConfig':
+  if len(args) != 1:
+    print('getLogConfig requires 1 args')
+    sys.exit(1)
+  pp.pprint(client.getLogConfig(args[0],))
+
 elif cmd == 'debug':
   if len(args) != 4:
     print('debug requires 4 args')

http://git-wip-us.apache.org/repos/asf/storm/blob/d13cfe2d/storm-core/src/py/storm/Nimbus.py
----------------------------------------------------------------------
diff --git a/storm-core/src/py/storm/Nimbus.py b/storm-core/src/py/storm/Nimbus.py
index 0caeed0..cf7adbf 100644
--- a/storm-core/src/py/storm/Nimbus.py
+++ b/storm-core/src/py/storm/Nimbus.py
@@ -94,6 +94,21 @@ class Iface:
     """
     pass
 
+  def setLogConfig(self, name, config):
+    """
+    Parameters:
+     - name
+     - config
+    """
+    pass
+
+  def getLogConfig(self, name):
+    """
+    Parameters:
+     - name
+    """
+    pass
+
   def debug(self, name, component, enable, samplingPercentage):
     """
     Enable/disable logging the tuples generated in topology via an internal EventLogger bolt. The component name is optional
@@ -102,7 +117,6 @@ class Iface:
     The 'samplingPercentage' will limit loggging to a percentage of generated tuples.
 
 
-
     Parameters:
      - name
      - component
@@ -460,12 +474,74 @@ class Client(Iface):
       raise result.aze
     return
 
+  def setLogConfig(self, name, config):
+    """
+    Parameters:
+     - name
+     - config
+    """
+    self.send_setLogConfig(name, config)
+    self.recv_setLogConfig()
+
+  def send_setLogConfig(self, name, config):
+    self._oprot.writeMessageBegin('setLogConfig', TMessageType.CALL, self._seqid)
+    args = setLogConfig_args()
+    args.name = name
+    args.config = config
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_setLogConfig(self):
+    iprot = self._iprot
+    (fname, mtype, rseqid) = iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(iprot)
+      iprot.readMessageEnd()
+      raise x
+    result = setLogConfig_result()
+    result.read(iprot)
+    iprot.readMessageEnd()
+    return
+
+  def getLogConfig(self, name):
+    """
+    Parameters:
+     - name
+    """
+    self.send_getLogConfig(name)
+    return self.recv_getLogConfig()
+
+  def send_getLogConfig(self, name):
+    self._oprot.writeMessageBegin('getLogConfig', TMessageType.CALL, self._seqid)
+    args = getLogConfig_args()
+    args.name = name
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_getLogConfig(self):
+    iprot = self._iprot
+    (fname, mtype, rseqid) = iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(iprot)
+      iprot.readMessageEnd()
+      raise x
+    result = getLogConfig_result()
+    result.read(iprot)
+    iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getLogConfig failed: unknown result");
+
   def debug(self, name, component, enable, samplingPercentage):
     """
     Enable/disable logging the tuples generated in topology via an internal EventLogger bolt. The component name is optional
     and if null or empty, the debug flag will apply to the entire topology.
 
-    If 'samplingPercentage' is specified, it will limit loggging to a percentage of generated tuples. The default is to log all (100 pct).
+    The 'samplingPercentage' will limit loggging to a percentage of generated tuples.
 
 
     Parameters:
@@ -949,6 +1025,8 @@ class Processor(Iface, TProcessor):
     self._processMap["activate"] = Processor.process_activate
     self._processMap["deactivate"] = Processor.process_deactivate
     self._processMap["rebalance"] = Processor.process_rebalance
+    self._processMap["setLogConfig"] = Processor.process_setLogConfig
+    self._processMap["getLogConfig"] = Processor.process_getLogConfig
     self._processMap["debug"] = Processor.process_debug
     self._processMap["uploadNewCredentials"] = Processor.process_uploadNewCredentials
     self._processMap["beginFileUpload"] = Processor.process_beginFileUpload
@@ -1097,6 +1175,28 @@ class Processor(Iface, TProcessor):
     oprot.writeMessageEnd()
     oprot.trans.flush()
 
+  def process_setLogConfig(self, seqid, iprot, oprot):
+    args = setLogConfig_args()
+    args.read(iprot)
+    iprot.readMessageEnd()
+    result = setLogConfig_result()
+    self._handler.setLogConfig(args.name, args.config)
+    oprot.writeMessageBegin("setLogConfig", TMessageType.REPLY, seqid)
+    result.write(oprot)
+    oprot.writeMessageEnd()
+    oprot.trans.flush()
+
+  def process_getLogConfig(self, seqid, iprot, oprot):
+    args = getLogConfig_args()
+    args.read(iprot)
+    iprot.readMessageEnd()
+    result = getLogConfig_result()
+    result.success = self._handler.getLogConfig(args.name)
+    oprot.writeMessageBegin("getLogConfig", TMessageType.REPLY, seqid)
+    result.write(oprot)
+    oprot.writeMessageEnd()
+    oprot.trans.flush()
+
   def process_debug(self, seqid, iprot, oprot):
     args = debug_args()
     args.read(iprot)
@@ -2491,6 +2591,261 @@ class rebalance_result:
   def __ne__(self, other):
     return not (self == other)
 
+class setLogConfig_args:
+  """
+  Attributes:
+   - name
+   - config
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'name', None, None, ), # 1
+    (2, TType.STRUCT, 'config', (LogConfig, LogConfig.thrift_spec), None, ), # 2
+  )
+
+  def __init__(self, name=None, config=None,):
+    self.name = name
+    self.config = config
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.name = iprot.readString().decode('utf-8')
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRUCT:
+          self.config = LogConfig()
+          self.config.read(iprot)
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('setLogConfig_args')
+    if self.name is not None:
+      oprot.writeFieldBegin('name', TType.STRING, 1)
+      oprot.writeString(self.name.encode('utf-8'))
+      oprot.writeFieldEnd()
+    if self.config is not None:
+      oprot.writeFieldBegin('config', TType.STRUCT, 2)
+      self.config.write(oprot)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.name)
+    value = (value * 31) ^ hash(self.config)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class setLogConfig_result:
+
+  thrift_spec = (
+  )
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('setLogConfig_result')
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __hash__(self):
+    value = 17
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class getLogConfig_args:
+  """
+  Attributes:
+   - name
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'name', None, None, ), # 1
+  )
+
+  def __init__(self, name=None,):
+    self.name = name
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.name = iprot.readString().decode('utf-8')
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('getLogConfig_args')
+    if self.name is not None:
+      oprot.writeFieldBegin('name', TType.STRING, 1)
+      oprot.writeString(self.name.encode('utf-8'))
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.name)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class getLogConfig_result:
+  """
+  Attributes:
+   - success
+  """
+
+  thrift_spec = (
+    (0, TType.STRUCT, 'success', (LogConfig, LogConfig.thrift_spec), None, ), # 0
+  )
+
+  def __init__(self, success=None,):
+    self.success = success
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 0:
+        if ftype == TType.STRUCT:
+          self.success = LogConfig()
+          self.success.read(iprot)
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('getLogConfig_result')
+    if self.success is not None:
+      oprot.writeFieldBegin('success', TType.STRUCT, 0)
+      self.success.write(oprot)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.success)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
 class debug_args:
   """
   Attributes:

http://git-wip-us.apache.org/repos/asf/storm/blob/d13cfe2d/storm-core/src/py/storm/ttypes.py
----------------------------------------------------------------------
diff --git a/storm-core/src/py/storm/ttypes.py b/storm-core/src/py/storm/ttypes.py
index 4f48449..1881858 100644
--- a/storm-core/src/py/storm/ttypes.py
+++ b/storm-core/src/py/storm/ttypes.py
@@ -85,6 +85,23 @@ class NumErrorsChoice:
     "ONE": 2,
   }
 
+class LogLevelAction:
+  UNCHANGED = 1
+  UPDATE = 2
+  REMOVE = 3
+
+  _VALUES_TO_NAMES = {
+    1: "UNCHANGED",
+    2: "UPDATE",
+    3: "REMOVE",
+  }
+
+  _NAMES_TO_VALUES = {
+    "UNCHANGED": 1,
+    "UPDATE": 2,
+    "REMOVE": 3,
+  }
+
 
 class JavaObjectArg:
   """
@@ -6348,6 +6365,202 @@ class GetInfoOptions:
   def __ne__(self, other):
     return not (self == other)
 
+class LogLevel:
+  """
+  Attributes:
+   - action
+   - target_log_level
+   - reset_log_level_timeout_secs
+   - reset_log_level_timeout_epoch
+   - reset_log_level
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'action', None, None, ), # 1
+    (2, TType.STRING, 'target_log_level', None, None, ), # 2
+    (3, TType.I32, 'reset_log_level_timeout_secs', None, None, ), # 3
+    (4, TType.I64, 'reset_log_level_timeout_epoch', None, None, ), # 4
+    (5, TType.STRING, 'reset_log_level', None, None, ), # 5
+  )
+
+  def __init__(self, action=None, target_log_level=None, reset_log_level_timeout_secs=None, reset_log_level_timeout_epoch=None, reset_log_level=None,):
+    self.action = action
+    self.target_log_level = target_log_level
+    self.reset_log_level_timeout_secs = reset_log_level_timeout_secs
+    self.reset_log_level_timeout_epoch = reset_log_level_timeout_epoch
+    self.reset_log_level = reset_log_level
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.action = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.target_log_level = iprot.readString().decode('utf-8')
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.I32:
+          self.reset_log_level_timeout_secs = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.I64:
+          self.reset_log_level_timeout_epoch = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.STRING:
+          self.reset_log_level = iprot.readString().decode('utf-8')
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('LogLevel')
+    if self.action is not None:
+      oprot.writeFieldBegin('action', TType.I32, 1)
+      oprot.writeI32(self.action)
+      oprot.writeFieldEnd()
+    if self.target_log_level is not None:
+      oprot.writeFieldBegin('target_log_level', TType.STRING, 2)
+      oprot.writeString(self.target_log_level.encode('utf-8'))
+      oprot.writeFieldEnd()
+    if self.reset_log_level_timeout_secs is not None:
+      oprot.writeFieldBegin('reset_log_level_timeout_secs', TType.I32, 3)
+      oprot.writeI32(self.reset_log_level_timeout_secs)
+      oprot.writeFieldEnd()
+    if self.reset_log_level_timeout_epoch is not None:
+      oprot.writeFieldBegin('reset_log_level_timeout_epoch', TType.I64, 4)
+      oprot.writeI64(self.reset_log_level_timeout_epoch)
+      oprot.writeFieldEnd()
+    if self.reset_log_level is not None:
+      oprot.writeFieldBegin('reset_log_level', TType.STRING, 5)
+      oprot.writeString(self.reset_log_level.encode('utf-8'))
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.action is None:
+      raise TProtocol.TProtocolException(message='Required field action is unset!')
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.action)
+    value = (value * 31) ^ hash(self.target_log_level)
+    value = (value * 31) ^ hash(self.reset_log_level_timeout_secs)
+    value = (value * 31) ^ hash(self.reset_log_level_timeout_epoch)
+    value = (value * 31) ^ hash(self.reset_log_level)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class LogConfig:
+  """
+  Attributes:
+   - named_logger_level
+  """
+
+  thrift_spec = (
+    None, # 0
+    None, # 1
+    (2, TType.MAP, 'named_logger_level', (TType.STRING,None,TType.STRUCT,(LogLevel, LogLevel.thrift_spec)), None, ), # 2
+  )
+
+  def __init__(self, named_logger_level=None,):
+    self.named_logger_level = named_logger_level
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 2:
+        if ftype == TType.MAP:
+          self.named_logger_level = {}
+          (_ktype457, _vtype458, _size456 ) = iprot.readMapBegin()
+          for _i460 in xrange(_size456):
+            _key461 = iprot.readString().decode('utf-8')
+            _val462 = LogLevel()
+            _val462.read(iprot)
+            self.named_logger_level[_key461] = _val462
+          iprot.readMapEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('LogConfig')
+    if self.named_logger_level is not None:
+      oprot.writeFieldBegin('named_logger_level', TType.MAP, 2)
+      oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.named_logger_level))
+      for kiter463,viter464 in self.named_logger_level.items():
+        oprot.writeString(kiter463.encode('utf-8'))
+        viter464.write(oprot)
+      oprot.writeMapEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __hash__(self):
+    value = 17
+    value = (value * 31) ^ hash(self.named_logger_level)
+    return value
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
 class DRPCRequest:
   """
   Attributes:

http://git-wip-us.apache.org/repos/asf/storm/blob/d13cfe2d/storm-core/src/storm.thrift
----------------------------------------------------------------------
diff --git a/storm-core/src/storm.thrift b/storm-core/src/storm.thrift
index f687681..eba9fcd 100644
--- a/storm-core/src/storm.thrift
+++ b/storm-core/src/storm.thrift
@@ -358,6 +358,38 @@ struct GetInfoOptions {
   1: optional NumErrorsChoice num_err_choice;
 }
 
+enum LogLevelAction {
+  UNCHANGED = 1,
+  UPDATE    = 2,
+  REMOVE    = 3
+}
+
+struct LogLevel {
+  1: required LogLevelAction action;
+
+  // during this thrift call, we'll move logger to target_log_level
+  2: optional string target_log_level;
+
+  // number of seconds that target_log_level should be kept
+  // after this timeout, the loggers will be reset to reset_log_level
+  // if timeout is 0, we will not reset 
+  3: optional i32 reset_log_level_timeout_secs;
+
+  // number of seconds since unix epoch corresponding to 
+  // current time (when message gets to nimbus) + reset_log_level_timeout_se
+  // NOTE: this field gets set in Nimbus 
+  4: optional i64 reset_log_level_timeout_epoch;
+
+  // if reset timeout was set, then we would reset 
+  // to this level after timeout (or INFO by default)
+  5: optional string reset_log_level;
+}
+
+struct LogConfig { 
+  // logger name -> log level map
+  2: optional map<string, LogLevel> named_logger_level;
+}
+
 service Nimbus {
   void submitTopology(1: string name, 2: string uploadedJarLocation, 3: string jsonConf, 4: StormTopology topology) throws (1: AlreadyAliveException e, 2: InvalidTopologyException ite, 3: AuthorizationException aze);
   void submitTopologyWithOpts(1: string name, 2: string uploadedJarLocation, 3: string jsonConf, 4: StormTopology topology, 5: SubmitOptions options) throws (1: AlreadyAliveException e, 2: InvalidTopologyException ite, 3: AuthorizationException aze);
@@ -366,6 +398,11 @@ service Nimbus {
   void activate(1: string name) throws (1: NotAliveException e, 2: AuthorizationException aze);
   void deactivate(1: string name) throws (1: NotAliveException e, 2: AuthorizationException aze);
   void rebalance(1: string name, 2: RebalanceOptions options) throws (1: NotAliveException e, 2: InvalidTopologyException ite, 3: AuthorizationException aze);
+
+  // dynamic log levels
+  void setLogConfig(1: string name, 2: LogConfig config);
+  LogConfig getLogConfig(1: string name);
+
   /**
   * Enable/disable logging the tuples generated in topology via an internal EventLogger bolt. The component name is optional
   * and if null or empty, the debug flag will apply to the entire topology.


[6/9] storm git commit: Clearing logconfig zk node for topology as part of remove-storm

Posted by bo...@apache.org.
Clearing logconfig zk node for topology as part of remove-storm


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/754017f1
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/754017f1
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/754017f1

Branch: refs/heads/master
Commit: 754017f1c16bc74797a39c2cbb64e8c5e50f3010
Parents: 51c3253
Author: Kishor Patil <kp...@yahoo-inc.com>
Authored: Wed Sep 30 07:50:44 2015 -0500
Committer: Kishor Patil <kp...@yahoo-inc.com>
Committed: Wed Sep 30 07:50:44 2015 -0500

----------------------------------------------------------------------
 storm-core/src/clj/backtype/storm/cluster.clj       | 9 +--------
 storm-core/src/clj/backtype/storm/daemon/nimbus.clj | 1 -
 2 files changed, 1 insertion(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/754017f1/storm-core/src/clj/backtype/storm/cluster.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/cluster.clj b/storm-core/src/clj/backtype/storm/cluster.clj
index 77c576f..e471e53 100644
--- a/storm-core/src/clj/backtype/storm/cluster.clj
+++ b/storm-core/src/clj/backtype/storm/cluster.clj
@@ -177,7 +177,6 @@
   (setup-heartbeats! [this storm-id])
   (teardown-heartbeats! [this storm-id])
   (teardown-topology-errors! [this storm-id])
-  (teardown-topology-log-config! [this storm-id])
   (heartbeat-storms [this])
   (error-topologies [this])
   (set-topology-log-config! [this storm-id log-config])
@@ -540,13 +539,6 @@
           (catch KeeperException e
             (log-warn-error e "Could not teardown errors for " storm-id))))
 
-      (teardown-topology-log-config!
-        [this storm-id]
-        (try-cause
-          (.delete_node cluster-state (log-config-path storm-id))
-          (catch KeeperException e
-            (log-warn-error e "Could not teardown log configs for " storm-id))))
-
       (supervisor-heartbeat!
         [this supervisor-id info]
         (let [thrift-supervisor-info (thriftify-supervisor-info info)]
@@ -599,6 +591,7 @@
         (delete-node cluster-state (assignment-path storm-id))
         (delete-node cluster-state (code-distributor-path storm-id))
         (delete-node cluster-state (credentials-path storm-id))
+        (delete-node cluster-state (log-config-path storm-id))
         (remove-storm-base! this storm-id))
 
       (set-credentials!

http://git-wip-us.apache.org/repos/asf/storm/blob/754017f1/storm-core/src/clj/backtype/storm/daemon/nimbus.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/daemon/nimbus.clj b/storm-core/src/clj/backtype/storm/daemon/nimbus.clj
index c797c4b..19870f1 100644
--- a/storm-core/src/clj/backtype/storm/daemon/nimbus.clj
+++ b/storm-core/src/clj/backtype/storm/daemon/nimbus.clj
@@ -931,7 +931,6 @@
             (if (:code-distributor nimbus) (.cleanup (:code-distributor nimbus) id))
             (.teardown-heartbeats! storm-cluster-state id)
             (.teardown-topology-errors! storm-cluster-state id)
-            (.teardown-topology-log-config! storm-cluster-state id)
             (rmr (master-stormdist-root conf id))
             (swap! (:heartbeats-cache nimbus) dissoc id))
           )))


[8/9] storm git commit: Merge branch 'STORM-412' of https://github.com/kishorvpatil/incubator-storm into STORM-412

Posted by bo...@apache.org.
Merge branch 'STORM-412' of https://github.com/kishorvpatil/incubator-storm into STORM-412

STORM-412: Allow users to modify logging levels of running topologies


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/dd78e452
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/dd78e452
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/dd78e452

Branch: refs/heads/master
Commit: dd78e45225865392aee630cbfd390c7872eece8f
Parents: 7cf4d25 6397850
Author: Robert (Bobby) Evans <ev...@yahoo-inc.com>
Authored: Mon Oct 5 14:13:42 2015 -0500
Committer: Robert (Bobby) Evans <ev...@yahoo-inc.com>
Committed: Mon Oct 5 14:13:42 2015 -0500

----------------------------------------------------------------------
 bin/storm.py                                    |   35 +-
 conf/defaults.yaml                              |    3 +
 docs/DYNAMIC_LOG_LEVEL_SETTINGS.md              |   41 +
 docs/images/dynamic_log_level_settings_1.png    |  Bin 0 -> 93689 bytes
 docs/images/dynamic_log_level_settings_2.png    |  Bin 0 -> 78785 bytes
 .../storm/starter/MultipleLoggerTopology.java   |  105 ++
 storm-core/src/clj/backtype/storm/cluster.clj   |   28 +-
 .../backtype/storm/command/set_log_level.clj    |   75 +
 .../src/clj/backtype/storm/daemon/nimbus.clj    |   46 +-
 .../clj/backtype/storm/daemon/supervisor.clj    |    2 +
 .../src/clj/backtype/storm/daemon/worker.clj    |  107 +-
 storm-core/src/clj/backtype/storm/ui/core.clj   |   55 +-
 storm-core/src/jvm/backtype/storm/Config.java   |    7 +
 .../jvm/backtype/storm/generated/LogConfig.java |  475 +++++
 .../jvm/backtype/storm/generated/LogLevel.java  |  836 +++++++++
 .../storm/generated/LogLevelAction.java         |   65 +
 .../jvm/backtype/storm/generated/Nimbus.java    | 1700 +++++++++++++++++-
 .../auth/authorizer/SimpleACLAuthorizer.java    |    2 +-
 storm-core/src/py/storm/Nimbus-remote           |   14 +
 storm-core/src/py/storm/Nimbus.py               |  359 +++-
 storm-core/src/py/storm/ttypes.py               |  213 +++
 storm-core/src/storm.thrift                     |   37 +
 storm-core/src/ui/public/css/style.css          |    8 +
 storm-core/src/ui/public/js/script.js           |   20 +
 .../templates/topology-page-template.html       |   70 +-
 storm-core/src/ui/public/topology.html          |  149 +-
 .../test/clj/backtype/storm/nimbus_test.clj     |   56 +-
 .../test/clj/backtype/storm/supervisor_test.clj |    4 +
 .../test/clj/backtype/storm/worker_test.clj     |  179 +-
 29 files changed, 4670 insertions(+), 21 deletions(-)
----------------------------------------------------------------------



[3/9] storm git commit: Adding Dynamic Logger to UI, cli and supervisors

Posted by bo...@apache.org.
Adding Dynamic Logger to UI, cli and supervisors


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/250ab113
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/250ab113
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/250ab113

Branch: refs/heads/master
Commit: 250ab1132e5cb26ac9cf1af5c47cbaf98390923a
Parents: d13cfe2
Author: Kishor Patil <kp...@yahoo-inc.com>
Authored: Fri Sep 25 20:00:45 2015 +0000
Committer: Kishor Patil <kp...@yahoo-inc.com>
Committed: Mon Sep 28 15:26:22 2015 -0500

----------------------------------------------------------------------
 bin/storm.py                                    |  35 +++-
 conf/defaults.yaml                              |   3 +
 .../storm/starter/MultipleLoggerTopology.java   | 105 +++++++++++
 storm-core/src/clj/backtype/storm/cluster.clj   |  35 +++-
 .../backtype/storm/command/set_log_level.clj    |  75 ++++++++
 .../src/clj/backtype/storm/daemon/nimbus.clj    |  47 ++++-
 .../clj/backtype/storm/daemon/supervisor.clj    |   2 +
 .../src/clj/backtype/storm/daemon/worker.clj    | 107 ++++++++++-
 storm-core/src/clj/backtype/storm/ui/core.clj   |  55 +++++-
 storm-core/src/jvm/backtype/storm/Config.java   |   7 +
 .../auth/authorizer/SimpleACLAuthorizer.java    |   2 +-
 storm-core/src/ui/public/css/style.css          |   8 +
 storm-core/src/ui/public/js/script.js           |  20 +++
 .../templates/topology-page-template.html       |  70 +++++++-
 storm-core/src/ui/public/topology.html          | 149 ++++++++++++++-
 .../test/clj/backtype/storm/nimbus_test.clj     |  56 +++++-
 .../test/clj/backtype/storm/supervisor_test.clj |   4 +
 .../test/clj/backtype/storm/worker_test.clj     | 179 ++++++++++++++++++-
 18 files changed, 941 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/bin/storm.py
----------------------------------------------------------------------
diff --git a/bin/storm.py b/bin/storm.py
index 024912f..7b1dc83 100755
--- a/bin/storm.py
+++ b/bin/storm.py
@@ -277,6 +277,39 @@ def activate(*args):
         jvmtype="-client",
         extrajars=[USER_CONF_DIR, STORM_BIN_DIR])
 
+def set_log_level(*args):
+    """
+    Dynamically change topology log levels
+
+    Syntax: [storm set_log_level -l [logger name]=[log level][:optional timeout] -r [logger name]
+    where log level is one of:
+        ALL, TRACE, DEBUG, INFO, WARN, ERROR, FATAL, OFF
+    and timeout is integer seconds.
+
+    e.g.
+        ./bin/storm set_log_level -l ROOT=DEBUG:30
+
+        Set the root logger's level to DEBUG for 30 seconds
+
+        ./bin/storm set_log_level -l com.myapp=WARN
+
+        Set the com.myapp logger's level to WARN for 30 seconds
+
+        ./bin/storm set_log_level -l com.myapp=WARN -l com.myOtherLogger=ERROR:123
+
+        Set the com.myapp logger's level to WARN indifinitely, and com.myOtherLogger
+        to ERROR for 123 seconds
+
+        ./bin/storm set_log_level -r com.myOtherLogger
+
+        Clears settings, resetting back to the original level
+    """
+    exec_storm_class(
+        "backtype.storm.command.set_log_level",
+        args=args,
+        jvmtype="-client",
+        extrajars=[USER_CONF_DIR, STORM_BIN_DIR])
+
 def listtopos(*args):
     """Syntax: [storm list]
 
@@ -561,7 +594,7 @@ COMMANDS = {"jar": jar, "kill": kill, "shell": shell, "nimbus": nimbus, "ui": ui
             "remoteconfvalue": print_remoteconfvalue, "repl": repl, "classpath": print_classpath,
             "activate": activate, "deactivate": deactivate, "rebalance": rebalance, "help": print_usage,
             "list": listtopos, "dev-zookeeper": dev_zookeeper, "version": version, "monitor": monitor,
-            "upload-credentials": upload_credentials, "get-errors": get_errors }
+            "upload-credentials": upload_credentials, "get-errors": get_errors, "set_log_level": set_log_level }
 
 def parse_config(config_list):
     global CONFIG_OPTS

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/conf/defaults.yaml
----------------------------------------------------------------------
diff --git a/conf/defaults.yaml b/conf/defaults.yaml
index 43ef8f4..4a8e354 100644
--- a/conf/defaults.yaml
+++ b/conf/defaults.yaml
@@ -138,6 +138,9 @@ worker.childopts: "-Xmx768m"
 worker.gc.childopts: ""
 worker.heartbeat.frequency.secs: 1
 
+# check whether dynamic log levels can be reset from DEBUG to INFO in workers
+worker.log.level.reset.poll.secs: 30
+
 # control how many worker receiver threads we need per worker
 topology.worker.receiver.thread.count: 1
 

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/examples/storm-starter/src/jvm/storm/starter/MultipleLoggerTopology.java
----------------------------------------------------------------------
diff --git a/examples/storm-starter/src/jvm/storm/starter/MultipleLoggerTopology.java b/examples/storm-starter/src/jvm/storm/starter/MultipleLoggerTopology.java
new file mode 100644
index 0000000..4285ff9
--- /dev/null
+++ b/examples/storm-starter/src/jvm/storm/starter/MultipleLoggerTopology.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package storm.starter;
+
+import backtype.storm.Config;
+import backtype.storm.LocalCluster;
+import backtype.storm.StormSubmitter;
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.testing.TestWordSpout;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.topology.TopologyBuilder;
+import backtype.storm.topology.base.BaseRichBolt;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.tuple.Values;
+import backtype.storm.utils.Utils;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * This is a basic example of a Storm topology.
+ */
+public class MultipleLoggerTopology {
+  public static class ExclamationLoggingBolt extends BaseRichBolt {
+    OutputCollector _collector;
+    Logger _rootLogger = LoggerFactory.getLogger (Logger.ROOT_LOGGER_NAME);
+    // ensure the loggers are configured in the worker.xml before
+    // trying to use them here
+    Logger _logger = LoggerFactory.getLogger ("com.myapp");
+    Logger _subLogger = LoggerFactory.getLogger ("com.myapp.sub");
+
+    @Override
+    public void prepare(Map conf, TopologyContext context, OutputCollector collector) {
+      _collector = collector;
+    }
+
+    @Override
+    public void execute(Tuple tuple) {
+      _rootLogger.debug ("root: This is a DEBUG message");
+      _rootLogger.info ("root: This is an INFO message");
+      _rootLogger.warn ("root: This is a WARN message");
+      _rootLogger.error ("root: This is an ERROR message");
+
+      _logger.debug ("myapp: This is a DEBUG message");
+      _logger.info ("myapp: This is an INFO message");
+      _logger.warn ("myapp: This is a WARN message");
+      _logger.error ("myapp: This is an ERROR message");
+
+      _subLogger.debug ("myapp.sub: This is a DEBUG message");
+      _subLogger.info ("myapp.sub: This is an INFO message");
+      _subLogger.warn ("myapp.sub: This is a WARN message");
+      _subLogger.error ("myapp.sub: This is an ERROR message");
+
+      _collector.emit(tuple, new Values(tuple.getString(0) + "!!!"));
+      _collector.ack(tuple);
+    }
+
+    @Override
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+      declarer.declare(new Fields("word"));
+    }
+  }
+
+  public static void main(String[] args) throws Exception {
+    TopologyBuilder builder = new TopologyBuilder();
+
+    builder.setSpout("word", new TestWordSpout(), 10);
+    builder.setBolt("exclaim1", new ExclamationLoggingBolt(), 3).shuffleGrouping("word");
+    builder.setBolt("exclaim2", new ExclamationLoggingBolt(), 2).shuffleGrouping("exclaim1");
+
+    Config conf = new Config();
+    conf.setDebug(true);
+
+    if (args != null && args.length > 0) {
+      conf.setNumWorkers(2);
+      StormSubmitter.submitTopologyWithProgressBar(args[0], conf, builder.createTopology());
+    } else {
+      LocalCluster cluster = new LocalCluster();
+      cluster.submitTopology("test", conf, builder.createTopology());
+      Utils.sleep(10000);
+      cluster.killTopology("test");
+      cluster.shutdown();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/src/clj/backtype/storm/cluster.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/cluster.clj b/storm-core/src/clj/backtype/storm/cluster.clj
index 99fb49e..77c576f 100644
--- a/storm-core/src/clj/backtype/storm/cluster.clj
+++ b/storm-core/src/clj/backtype/storm/cluster.clj
@@ -16,7 +16,8 @@
 
 (ns backtype.storm.cluster
   (:import [org.apache.zookeeper.data Stat ACL Id]
-           [backtype.storm.generated SupervisorInfo Assignment StormBase ClusterWorkerHeartbeat ErrorInfo Credentials NimbusSummary]
+           [backtype.storm.generated SupervisorInfo Assignment StormBase ClusterWorkerHeartbeat ErrorInfo Credentials NimbusSummary
+            LogConfig]
            [java.io Serializable])
   (:import [org.apache.zookeeper KeeperException KeeperException$NoNodeException ZooDefs ZooDefs$Ids ZooDefs$Perms])
   (:import [org.apache.curator.framework.state ConnectionStateListener ConnectionState])
@@ -176,8 +177,11 @@
   (setup-heartbeats! [this storm-id])
   (teardown-heartbeats! [this storm-id])
   (teardown-topology-errors! [this storm-id])
+  (teardown-topology-log-config! [this storm-id])
   (heartbeat-storms [this])
   (error-topologies [this])
+  (set-topology-log-config! [this storm-id log-config])
+  (topology-log-config [this storm-id cb])
   (worker-heartbeat! [this storm-id node port info])
   (remove-worker-heartbeat! [this storm-id node port])
   (supervisor-heartbeat! [this supervisor-id info])
@@ -209,7 +213,7 @@
 (def CODE-DISTRIBUTOR-ROOT "code-distributor")
 (def NIMBUSES-ROOT "nimbuses")
 (def CREDENTIALS-ROOT "credentials")
-
+(def LOGCONFIG-ROOT "logconfigs")
 
 (def ASSIGNMENTS-SUBTREE (str "/" ASSIGNMENTS-ROOT))
 (def STORMS-SUBTREE (str "/" STORMS-ROOT))
@@ -220,6 +224,7 @@
 (def CODE-DISTRIBUTOR-SUBTREE (str "/" CODE-DISTRIBUTOR-ROOT))
 (def NIMBUSES-SUBTREE (str "/" NIMBUSES-ROOT))
 (def CREDENTIALS-SUBTREE (str "/" CREDENTIALS-ROOT))
+(def LOGCONFIG-SUBTREE (str "/" LOGCONFIG-ROOT))
 
 (defn supervisor-path
   [id]
@@ -279,6 +284,10 @@
   [storm-id]
   (str CREDENTIALS-SUBTREE "/" storm-id))
 
+(defn log-config-path
+  [storm-id]
+  (str LOGCONFIG-SUBTREE "/" storm-id))
+
 (defn- issue-callback!
   [cb-atom]
   (let [cb @cb-atom]
@@ -332,6 +341,7 @@
         storm-base-callback (atom {})
         code-distributor-callback (atom nil)
         credentials-callback (atom {})
+        log-config-callback (atom {})
         state-id (register
                   cluster-state
                   (fn [type path]
@@ -347,10 +357,12 @@
                          CODE-DISTRIBUTOR-ROOT (issue-callback! code-distributor-callback)
                          STORMS-ROOT (issue-map-callback! storm-base-callback (first args))
                          CREDENTIALS-ROOT (issue-map-callback! credentials-callback (first args))
+                         LOGCONFIG-ROOT (issue-map-callback! log-config-callback (first args))
                          BACKPRESSURE-ROOT (issue-map-callback! backpressure-callback (first args))
                          ;; this should never happen
                          (exit-process! 30 "Unknown callback for subtree " subtree args)))))]
-    (doseq [p [ASSIGNMENTS-SUBTREE STORMS-SUBTREE SUPERVISORS-SUBTREE WORKERBEATS-SUBTREE ERRORS-SUBTREE CODE-DISTRIBUTOR-SUBTREE NIMBUSES-SUBTREE]]
+    (doseq [p [ASSIGNMENTS-SUBTREE STORMS-SUBTREE SUPERVISORS-SUBTREE WORKERBEATS-SUBTREE ERRORS-SUBTREE CODE-DISTRIBUTOR-SUBTREE NIMBUSES-SUBTREE
+               LOGCONFIG-SUBTREE]]
       (mkdirs cluster-state p acls))
     (reify
       StormClusterState
@@ -461,6 +473,16 @@
         [this supervisor-id]
         (clojurify-supervisor-info (maybe-deserialize (get-data cluster-state (supervisor-path supervisor-id) false) SupervisorInfo)))
 
+      (topology-log-config
+        [this storm-id cb]
+        (when cb
+          (swap! log-config-callback assoc storm-id cb))
+        (maybe-deserialize (.get_data cluster-state (log-config-path storm-id) (not-nil? cb)) LogConfig))
+
+      (set-topology-log-config!
+        [this storm-id log-config]
+        (.set_data cluster-state (log-config-path storm-id) (Utils/serialize log-config) acls))
+
       (worker-heartbeat!
         [this storm-id node port info]
         (let [thrift-worker-hb (thriftify-zk-worker-hb info)]
@@ -518,6 +540,13 @@
           (catch KeeperException e
             (log-warn-error e "Could not teardown errors for " storm-id))))
 
+      (teardown-topology-log-config!
+        [this storm-id]
+        (try-cause
+          (.delete_node cluster-state (log-config-path storm-id))
+          (catch KeeperException e
+            (log-warn-error e "Could not teardown log configs for " storm-id))))
+
       (supervisor-heartbeat!
         [this supervisor-id info]
         (let [thrift-supervisor-info (thriftify-supervisor-info info)]

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/src/clj/backtype/storm/command/set_log_level.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/command/set_log_level.clj b/storm-core/src/clj/backtype/storm/command/set_log_level.clj
new file mode 100644
index 0000000..3b50c3a
--- /dev/null
+++ b/storm-core/src/clj/backtype/storm/command/set_log_level.clj
@@ -0,0 +1,75 @@
+;; Licensed to the Apache Software Foundation (ASF) under one
+;; or more contributor license agreements.  See the NOTICE file
+;; distributed with this work for additional information
+;; regarding copyright ownership.  The ASF licenses this file
+;; to you under the Apache License, Version 2.0 (the
+;; "License"); you may not use this file except in compliance
+;; with the License.  You may obtain a copy of the License at
+;;
+;; http://www.apache.org/licenses/LICENSE-2.0
+;;
+;; Unless required by applicable law or agreed to in writing, software
+;; distributed under the License is distributed on an "AS IS" BASIS,
+;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;; See the License for the specific language governing permissions and
+;; limitations under the License.
+(ns backtype.storm.command.set-log-level
+  (:use [clojure.tools.cli :only [cli]])
+  (:use [backtype.storm thrift log])
+  (:import [org.apache.logging.log4j Level])
+  (:import [backtype.storm.generated LogConfig LogLevel LogLevelAction])
+  (:gen-class))
+
+(defn- get-storm-id
+  "Get topology id for a running topology from the topology name."
+  [nimbus name]
+  (let [info (.getClusterInfo nimbus)
+        topologies (.get_topologies info)
+        topology (first (filter (fn [topo] (= name (.get_name topo))) topologies))]
+    (if topology 
+      (.get_id topology)
+      (throw (.IllegalArgumentException (str name " is not a running topology"))))))
+
+(defn- parse-named-log-levels [action]
+  "Parses [logger name]=[level string]:[optional timeout],[logger name2]...
+
+   e.g. ROOT=DEBUG:30
+        root logger, debug for 30 seconds
+
+        org.apache.foo=WARN
+        org.apache.foo set to WARN indefinitely"
+  (fn [^String s]
+    (let [log-args (re-find #"(.*)=([A-Z]+):?(\d*)" s)
+          name (if (= action LogLevelAction/REMOVE) s (nth log-args 1))
+          level (Level/toLevel (nth log-args 2))
+          timeout-str (nth log-args 3)
+          log-level (LogLevel.)]
+      (if (= action LogLevelAction/REMOVE)
+        (.set_action log-level action)
+        (do
+          (.set_action log-level action)
+          (.set_target_log_level log-level (.toString level))
+          (.set_reset_log_level_timeout_secs log-level
+            (Integer. (if (= timeout-str "") "0" timeout-str)))))
+      {name log-level})))
+
+(defn- merge-together [previous key val]
+   (assoc previous key
+      (if-let [oldval (get previous key)]
+         (merge oldval val)
+         val)))
+
+(defn -main [& args] 
+  (let [[{log-setting :log-setting remove-log-setting :remove-log-setting} [name] _]
+          (cli args ["-l" "--log-setting"
+                        :parse-fn (parse-named-log-levels LogLevelAction/UPDATE)
+                        :assoc-fn merge-together]
+                    ["-r" "--remove-log-setting"
+                        :parse-fn (parse-named-log-levels LogLevelAction/REMOVE)
+                        :assoc-fn merge-together])]
+    (with-configured-nimbus-connection nimbus
+      (let [log-config (LogConfig.)]
+        (doseq [[log-name log-val] (merge log-setting remove-log-setting)]
+          (.put_to_named_logger_level log-config log-name log-val))
+        (log-message "Sent log config " log-config " for topology " name)
+        (.setLogConfig nimbus (get-storm-id nimbus name) log-config)))))

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/src/clj/backtype/storm/daemon/nimbus.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/daemon/nimbus.clj b/storm-core/src/clj/backtype/storm/daemon/nimbus.clj
index 7ea515b..c797c4b 100644
--- a/storm-core/src/clj/backtype/storm/daemon/nimbus.clj
+++ b/storm-core/src/clj/backtype/storm/daemon/nimbus.clj
@@ -35,7 +35,8 @@
   (:import [backtype.storm.generated NotAliveException AlreadyAliveException StormTopology ErrorInfo
             ExecutorInfo InvalidTopologyException Nimbus$Iface Nimbus$Processor SubmitOptions TopologyInitialStatus
             KillOptions RebalanceOptions ClusterSummary SupervisorSummary TopologySummary TopologyInfo
-            ExecutorSummary AuthorizationException GetInfoOptions NumErrorsChoice])
+            ExecutorSummary AuthorizationException GetInfoOptions NumErrorsChoice
+            LogConfig LogLevel LogLevelAction])
   (:import [backtype.storm.daemon Shutdownable])
   (:use [backtype.storm util config log timer zookeeper])
   (:require [backtype.storm [cluster :as cluster] [stats :as stats] [converter :as converter]])
@@ -44,6 +45,8 @@
   (:use [backtype.storm.daemon common])
   (:import [org.apache.zookeeper data.ACL ZooDefs$Ids ZooDefs$Perms])
   (:import [backtype.storm.utils VersionInfo])
+  (:require [clj-time.core :as time])
+  (:require [clj-time.coerce :as coerce])
   (:gen-class
     :methods [^{:static true} [launch [backtype.storm.scheduler.INimbus] void]]))
 
@@ -102,6 +105,7 @@
                                                                        NIMBUS-ZK-ACLS))
      :submit-lock (Object.)
      :cred-update-lock (Object.)
+     :log-update-lock (Object.)
      :heartbeats-cache (atom {})
      :downloaders (file-cache-map conf)
      :uploaders (file-cache-map conf)
@@ -927,6 +931,7 @@
             (if (:code-distributor nimbus) (.cleanup (:code-distributor nimbus) id))
             (.teardown-heartbeats! storm-cluster-state id)
             (.teardown-topology-errors! storm-cluster-state id)
+            (.teardown-topology-log-config! storm-cluster-state id)
             (rmr (master-stormdist-root conf id))
             (swap! (:heartbeats-cache nimbus) dissoc id))
           )))
@@ -1067,6 +1072,13 @@
        (InvalidTopologyException. 
         (str "Failed to submit topology. Topology requests more than " workers-allowed " workers."))))))
 
+(defn- set-logger-timeouts [log-config]
+  (let [timeout-secs (.get_reset_log_level_timeout_secs log-config)
+       timeout (time/plus (time/now) (time/secs timeout-secs))]
+   (if (time/after? timeout (time/now))
+     (.set_reset_log_level_timeout_epoch log-config (coerce/to-long timeout))
+     (.unset_reset_log_level_timeout_epoch log-config))))
+
 (defserverfn service-handler [conf inimbus]
   (.prepare inimbus conf (master-inimbus-dir conf))
   (log-message "Starting Nimbus with conf " conf)
@@ -1265,6 +1277,31 @@
           (locking (:submit-lock nimbus)
             (.update-storm! storm-cluster-state storm-id storm-base-updates))))
 
+      (^void setLogConfig [this ^String id ^LogConfig log-config-msg]
+        (let [topology-conf (try-read-storm-conf conf id)
+              storm-name (topology-conf TOPOLOGY-NAME)
+              _ (check-authorization! nimbus storm-name topology-conf "setLogConfig")
+              storm-cluster-state (:storm-cluster-state nimbus)
+              merged-log-config (or (.topology-log-config storm-cluster-state id nil) (LogConfig.))
+              named-loggers (.get_named_logger_level merged-log-config)]
+            (doseq [[_ level] named-loggers]
+              (.set_action level LogLevelAction/UNCHANGED))
+            (doseq [[logger-name log-config] (.get_named_logger_level log-config-msg)]
+              (let [action (.get_action log-config)]
+                (if (clojure.string/blank? logger-name)
+                  (throw (RuntimeException. "Named loggers need a valid name. Use ROOT for the root logger")))
+                (condp = action
+                  LogLevelAction/UPDATE
+                    (do (set-logger-timeouts log-config)
+                          (.put_to_named_logger_level merged-log-config logger-name log-config))
+                  LogLevelAction/REMOVE
+                    (let [named-loggers (.get_named_logger_level merged-log-config)]
+                      (if (and (not (nil? named-loggers))
+                               (.containsKey named-loggers logger-name))
+                        (.remove named-loggers logger-name))))))
+            (log-message "Setting log config for " storm-name ":" merged-log-config)
+            (.set-topology-log-config! storm-cluster-state id merged-log-config)))
+
       (uploadNewCredentials [this storm-name credentials]
         (let [storm-cluster-state (:storm-cluster-state nimbus)
               storm-id (get-storm-id storm-cluster-state storm-name)
@@ -1333,6 +1370,14 @@
         (check-authorization! nimbus nil nil "getNimbusConf")
         (to-json (:conf nimbus)))
 
+      (^LogConfig getLogConfig [this ^String id]
+        (let [topology-conf (try-read-storm-conf conf id)
+              storm-name (topology-conf TOPOLOGY-NAME)
+              _ (check-authorization! nimbus storm-name topology-conf "getLogConfig")
+             storm-cluster-state (:storm-cluster-state nimbus)
+             log-config (.topology-log-config storm-cluster-state id nil)]
+           (if log-config log-config (LogConfig.))))
+
       (^String getTopologyConf [this ^String id]
         (let [topology-conf (try-read-storm-conf conf id)
               storm-name (topology-conf TOPOLOGY-NAME)]

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/src/clj/backtype/storm/daemon/supervisor.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/daemon/supervisor.clj b/storm-core/src/clj/backtype/storm/daemon/supervisor.clj
index 2a7613d..f7c4c0b 100644
--- a/storm-core/src/clj/backtype/storm/daemon/supervisor.clj
+++ b/storm-core/src/clj/backtype/storm/daemon/supervisor.clj
@@ -698,6 +698,7 @@
                      (str "-Dworker.port=" port)
                      (str "-Dstorm.log.dir=" storm-log-dir)
                      (str "-Dlog4j.configurationFile=" storm-log4j2-conf-dir file-path-separator "worker.xml")
+                     (str "-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector")
                      "backtype.storm.LogWriter"]
                     [(java-cmd) "-server"]
                     worker-childopts
@@ -711,6 +712,7 @@
                      (str "-Dstorm.log.dir=" storm-log-dir)
                      (str "-Dlogging.sensitivity=" logging-sensitivity)
                      (str "-Dlog4j.configurationFile=" storm-log4j2-conf-dir file-path-separator "worker.xml")
+                     (str "-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector")
                      (str "-Dstorm.id=" storm-id)
                      (str "-Dworker.id=" worker-id)
                      (str "-Dworker.port=" port)

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/src/clj/backtype/storm/daemon/worker.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/daemon/worker.clj b/storm-core/src/clj/backtype/storm/daemon/worker.clj
index 781a959..40b43ef 100644
--- a/storm-core/src/clj/backtype/storm/daemon/worker.clj
+++ b/storm-core/src/clj/backtype/storm/daemon/worker.clj
@@ -16,6 +16,8 @@
 (ns backtype.storm.daemon.worker
   (:use [backtype.storm.daemon common])
   (:use [backtype.storm config log util timer local-state])
+  (:require [clj-time.core :as time])
+  (:require [clj-time.coerce :as coerce])
   (:require [backtype.storm.daemon [executor :as executor]])
   (:require [backtype.storm [disruptor :as disruptor] [cluster :as cluster]])
   (:require [clojure.set :as set])
@@ -34,6 +36,10 @@
   (:import [backtype.storm.security.auth AuthUtils])
   (:import [javax.security.auth Subject])
   (:import [java.security PrivilegedExceptionAction])
+  (:import [org.apache.logging.log4j LogManager])
+  (:import [org.apache.logging.log4j Level])
+  (:import [org.apache.logging.log4j.core.config LoggerConfig])
+  (:import [backtype.storm.generated LogConfig LogLevelAction])
   (:gen-class))
 
 (defmulti mk-suicide-fn cluster-mode)
@@ -265,6 +271,7 @@
       :heartbeat-timer (mk-halting-timer "heartbeat-timer")
       :refresh-connections-timer (mk-halting-timer "refresh-connections-timer")
       :refresh-credentials-timer (mk-halting-timer "refresh-credentials-timer")
+      :reset-log-levels-timer (mk-halting-timer "reset-log-levels-timer")
       :refresh-active-timer (mk-halting-timer "refresh-active-timer")
       :executor-heartbeat-timer (mk-halting-timer "executor-heartbeat-timer")
       :user-timer (mk-halting-timer "user-timer")
@@ -439,6 +446,85 @@
     (assoc conf "java.security.auth.login.config" login_conf_file)
     conf))
 
+(defn- get-logger-levels []
+  (into {}
+    (let [logger-config (.getConfiguration (LogManager/getContext false))]
+      (for [[logger-name logger] (.getLoggers logger-config)]
+        {logger-name (.getLevel logger)}))))
+
+(defn set-logger-level [logger-context logger-name new-level]
+  (let [config (.getConfiguration logger-context)
+        logger-config (.getLoggerConfig config logger-name)]
+    (if (not (= (.getName logger-config) logger-name))
+      ;; create a new config. Make it additive (true) s.t. inherit
+      ;; parents appenders
+      (let [new-logger-config (LoggerConfig. logger-name new-level true)]
+        (log-message "Adding config for: " new-logger-config " with level: " new-level)
+        (.addLogger config logger-name new-logger-config))
+      (do
+        (log-message "Setting " logger-config " log level to: " new-level)
+        (.setLevel logger-config new-level)))))
+
+;; function called on timer to reset log levels last set to DEBUG
+;; also called from process-log-config-change
+(defn reset-log-levels [latest-log-config-atom]
+  (let [latest-log-config @latest-log-config-atom
+        logger-context (LogManager/getContext false)]
+    (doseq [[logger-name logger-setting] (sort latest-log-config)]
+      (let [timeout (:timeout logger-setting)
+            target-log-level (:target-log-level logger-setting)
+            reset-log-level (:reset-log-level logger-setting)]
+        (when (> (coerce/to-long (time/now)) timeout)
+          (log-message logger-name ": Resetting level to " reset-log-level) 
+          (set-logger-level logger-context logger-name reset-log-level)
+          (swap! latest-log-config-atom
+            (fn [prev]
+              (dissoc prev logger-name))))))
+    (.updateLoggers logger-context)))
+
+;; when a new log level is received from zookeeper, this function is called
+(defn process-log-config-change [latest-log-config original-log-levels log-config]
+  (when log-config
+    (log-debug "Processing received log config: " log-config)
+    ;; merge log configs together
+    (let [loggers (.get_named_logger_level log-config)
+          logger-context (LogManager/getContext false)]
+      (def new-log-configs
+        (into {}
+         ;; merge named log levels
+         (for [[msg-logger-name logger-level] loggers]
+           (let [logger-name (if (= msg-logger-name "ROOT")
+                                LogManager/ROOT_LOGGER_NAME
+                                msg-logger-name)]
+             ;; the new-timeouts map now contains logger => timeout 
+             (when (.is_set_reset_log_level_timeout_epoch logger-level)
+               {logger-name {:action (.get_action logger-level)
+                             :target-log-level (Level/toLevel (.get_target_log_level logger-level))
+                             :reset-log-level (or (.get @original-log-levels logger-name) (Level/INFO))
+                             :timeout (.get_reset_log_level_timeout_epoch logger-level)}})))))
+
+      ;; look for deleted log timeouts
+      (doseq [[logger-name logger-val] (sort @latest-log-config)]
+        (when (not (contains? new-log-configs logger-name))
+          ;; if we had a timeout, but the timeout is no longer active
+          (set-logger-level
+            logger-context logger-name (:reset-log-level logger-val))))
+
+      ;; apply new log settings we just received
+      ;; the merged configs are only for the reset logic
+      (doseq [[msg-logger-name logger-level] (sort (into {} (.get_named_logger_level log-config)))]
+        (let [logger-name (if (= msg-logger-name "ROOT")
+                                LogManager/ROOT_LOGGER_NAME
+                                msg-logger-name)
+              level (Level/toLevel (.get_target_log_level logger-level))
+              action (.get_action logger-level)]
+          (if (= action LogLevelAction/UPDATE)
+            (set-logger-level logger-context logger-name level))))
+   
+      (.updateLoggers logger-context)
+      (reset! latest-log-config new-log-configs)
+      (log-debug "New merged log config is " @latest-log-config))))
+
 ;; TODO: should worker even take the storm-id as input? this should be
 ;; deducable from cluster state (by searching through assignments)
 ;; what about if there's inconsistency in assignments? -> but nimbus
@@ -452,6 +538,13 @@
   ;; process. supervisor will register it in this case
   (when (= :distributed (cluster-mode conf))
     (touch (worker-pid-path conf worker-id (process-pid))))
+
+  (declare establish-log-setting-callback)
+
+  ;; start out with empty list of timeouts 
+  (def latest-log-config (atom {}))
+  (def original-log-levels (atom {}))
+
   (let [storm-conf (read-supervisor-storm-conf conf storm-id)
         storm-conf (override-login-config-with-system-property storm-conf)
         acls (Utils/getWorkerACL storm-conf)
@@ -577,7 +670,18 @@
                                                  (let [throttle-on (.topology-backpressure (:storm-cluster-state worker) storm-id cb)]
                                                    (reset! (:throttle-on worker) throttle-on)))
                                       new-throttle-on (.topology-backpressure (:storm-cluster-state worker) storm-id callback)]
-                                    (reset! (:throttle-on worker) new-throttle-on)))]
+                                    (reset! (:throttle-on worker) new-throttle-on)))
+        check-log-config-changed (fn []
+                                  (let [log-config (.topology-log-config (:storm-cluster-state worker) storm-id nil)]
+                                    (process-log-config-change latest-log-config original-log-levels log-config)
+                                    (establish-log-setting-callback)))]
+    (reset! original-log-levels (get-logger-levels))
+    (log-message "Started with log levels: " @original-log-levels)
+  
+    (defn establish-log-setting-callback []
+      (.topology-log-config (:storm-cluster-state worker) storm-id (fn [args] (check-log-config-changed))))
+
+    (establish-log-setting-callback)
     (.credentials (:storm-cluster-state worker) storm-id (fn [args] (check-credentials-changed)))
     (schedule-recurring (:refresh-credentials-timer worker) 0 (conf TASK-CREDENTIALS-POLL-SECS)
                         (fn [& args]
@@ -585,6 +689,7 @@
                           (if ((:storm-conf worker) TOPOLOGY-BACKPRESSURE-ENABLE)
                             (check-throttle-changed))))
     (schedule-recurring (:refresh-connections-timer worker) 0 (conf TASK-REFRESH-POLL-SECS) refresh-connections)
+    (schedule-recurring (:reset-log-levels-timer worker) 0 (conf WORKER-LOG-LEVEL-RESET-POLL-SECS) (fn [] (reset-log-levels latest-log-config)))
     (schedule-recurring (:refresh-active-timer worker) 0 (conf TASK-REFRESH-POLL-SECS) (partial refresh-storm-active worker))
 
     (log-message "Worker has topology config " (redact-value (:storm-conf worker) STORM-ZOOKEEPER-TOPOLOGY-AUTH-PAYLOAD))

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/src/clj/backtype/storm/ui/core.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/ui/core.clj b/storm-core/src/clj/backtype/storm/ui/core.clj
index 414bfb1..8d68f41 100644
--- a/storm-core/src/clj/backtype/storm/ui/core.clj
+++ b/storm-core/src/clj/backtype/storm/ui/core.clj
@@ -32,7 +32,8 @@
             ExecutorStats ExecutorSummary ExecutorInfo TopologyInfo SpoutStats BoltStats
             ErrorInfo ClusterSummary SupervisorSummary TopologySummary
             Nimbus$Client StormTopology GlobalStreamId RebalanceOptions
-            KillOptions GetInfoOptions NumErrorsChoice DebugOptions])
+            KillOptions GetInfoOptions NumErrorsChoice DebugOptions
+            LogConfig LogLevel LogLevelAction])
   (:import [backtype.storm.security.auth AuthUtils ReqContext])
   (:import [backtype.storm.generated AuthorizationException])
   (:import [backtype.storm.security.auth AuthUtils])
@@ -43,6 +44,7 @@
             [ring.util.response :as resp]
             [backtype.storm [thrift :as thrift]])
   (:import [org.apache.commons.lang StringEscapeUtils])
+  (:import [org.apache.logging.log4j Level])
   (:gen-class))
 
 (def ^:dynamic *STORM-CONF* (read-storm-config))
@@ -991,6 +993,26 @@
          "eventLogLink" (event-log-link topology-id summ topology component secure?)}
        spec errors))))
 
+(defn- level-to-dict [level]
+  (if level
+    (let [timeout (.get_reset_log_level_timeout_secs level)
+          timeout-epoch (.get_reset_log_level_timeout_epoch level)
+          target-level (.get_target_log_level level)
+          reset-level (.get_reset_log_level level)]
+          {"target_level" (.toString (Level/toLevel target-level))
+           "reset_level" (.toString (Level/toLevel reset-level))
+           "timeout" timeout
+           "timeout_epoch" timeout-epoch})))
+
+(defn log-config [topology-id]
+  (thrift/with-configured-nimbus-connection
+    nimbus
+    (let [log-config (.getLogConfig ^Nimbus$Client nimbus topology-id)
+          named-logger-levels (into {}
+                                (for [[key val] (.get_named_logger_level log-config)]
+                                  [(str key) (level-to-dict val)]))]
+      {"namedLoggerLevels" named-logger-levels})))
+
 (defn topology-config [topology-id]
   (thrift/with-configured-nimbus-connection nimbus
      (from-json (.getTopologyConf ^Nimbus$Client nimbus topology-id))))
@@ -1059,6 +1081,9 @@
          (json-response
           (component-page id component (:window m) (check-include-sys? (:sys m)) user (= scheme :https))
           (:callback m))))
+  (GET "/api/v1/topology/:id/logconfig" [:as {:keys [cookies servlet-request]} id & m]
+       (assert-authorized-user servlet-request "getTopology" (topology-config id))
+       (json-response (log-config id) (:callback m)))
   (POST "/api/v1/topology/:id/activate" [:as {:keys [cookies servlet-request]} id & m]
     (thrift/with-configured-nimbus-connection nimbus
     (assert-authorized-user servlet-request "activate" (topology-config id))
@@ -1137,6 +1162,34 @@
         (.killTopologyWithOpts nimbus name options)
         (log-message "Killing topology '" name "' with wait time: " wait-time " secs")))
     (json-response (topology-op-response id "kill") (m "callback")))
+
+  (POST "/api/v1/topology/:id/logconfig" [:as {:keys [cookies servlet-request]} id namedLoggerLevels & m]
+    (assert-authorized-user servlet-request "setLogConfig" (topology-config id))
+    (thrift/with-configured-nimbus-connection
+      nimbus
+      (let [new-log-config (LogConfig.)]
+        (doseq [[key level] namedLoggerLevels]
+            (let [logger-name (str key)
+                  target-level (.get level "target_level")
+                  timeout (or (.get level "timeout") 0)
+                  named-logger-level (LogLevel.)]
+              ;; if target-level is nil, do not set it, user wants to clear
+              (log-message "The target level for " logger-name " is " target-level)
+              (if (nil? target-level)
+                (do
+                  (.set_action named-logger-level LogLevelAction/REMOVE)
+                  (.unset_target_log_level named-logger-level))
+                (do
+                  (.set_action named-logger-level LogLevelAction/UPDATE)
+                  ;; the toLevel here ensures the string we get is valid
+                  (.set_target_log_level named-logger-level (.name (Level/toLevel target-level)))
+                  (.set_reset_log_level_timeout_secs named-logger-level timeout)))
+              (log-message "Adding this " logger-name " " named-logger-level " to " new-log-config)
+              (.put_to_named_logger_level new-log-config logger-name named-logger-level)))
+        (log-message "Setting topology " id " log config " new-log-config)
+        (.setLogConfig nimbus id new-log-config)
+        (json-response (log-config id) (m "callback")))))
+
   (GET "/" [:as {cookies :cookies}]
        (resp/redirect "/index.html"))
   (route/resources "/")

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/src/jvm/backtype/storm/Config.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/Config.java b/storm-core/src/jvm/backtype/storm/Config.java
index 0eff51b..7d60008 100644
--- a/storm-core/src/jvm/backtype/storm/Config.java
+++ b/storm-core/src/jvm/backtype/storm/Config.java
@@ -1038,6 +1038,13 @@ public class Config extends HashMap<String, Object> {
     public static final String TASK_REFRESH_POLL_SECS = "task.refresh.poll.secs";
     public static final Object TASK_REFRESH_POLL_SECS_SCHEMA = ConfigValidation.IntegerValidator;
 
+    /**
+     * How often a worker should check dynamic log level timeouts for expiration.
+     * For expired logger settings, the clean up polling task will reset the log levels
+     * to the original levels (detected at startup), and will clean up the timeout map
+     */
+    public static final String WORKER_LOG_LEVEL_RESET_POLL_SECS = "worker.log.level.reset.poll.secs";
+    public static final Object WORKER_LOG_LEVEL_RESET_POLL_SECS_SCHEMA = ConfigValidation.PositiveIntegerValidator;
 
     /**
      * How often a task should sync credentials, worst case.

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/src/jvm/backtype/storm/security/auth/authorizer/SimpleACLAuthorizer.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/security/auth/authorizer/SimpleACLAuthorizer.java b/storm-core/src/jvm/backtype/storm/security/auth/authorizer/SimpleACLAuthorizer.java
index e50a587..e332a13 100644
--- a/storm-core/src/jvm/backtype/storm/security/auth/authorizer/SimpleACLAuthorizer.java
+++ b/storm-core/src/jvm/backtype/storm/security/auth/authorizer/SimpleACLAuthorizer.java
@@ -44,7 +44,7 @@ public class SimpleACLAuthorizer implements IAuthorizer {
 
     protected Set<String> _userCommands = new HashSet<String>(Arrays.asList("submitTopology", "fileUpload", "getNimbusConf", "getClusterInfo"));
     protected Set<String> _supervisorCommands = new HashSet<String>(Arrays.asList("fileDownload"));
-    protected Set<String> _topoCommands = new HashSet<String>(Arrays.asList("killTopology","rebalance","activate","deactivate","getTopologyConf","getTopology","getUserTopology","getTopologyInfo","uploadNewCredentials"));
+    protected Set<String> _topoCommands = new HashSet<String>(Arrays.asList("killTopology","rebalance","activate","deactivate","getTopologyConf","getTopology","getUserTopology","getTopologyInfo","uploadNewCredentials","setLogConfig","getLogConfig"));
 
     protected Set<String> _admins;
     protected Set<String> _supervisors;

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/src/ui/public/css/style.css
----------------------------------------------------------------------
diff --git a/storm-core/src/ui/public/css/style.css b/storm-core/src/ui/public/css/style.css
index e058850..b438457 100644
--- a/storm-core/src/ui/public/css/style.css
+++ b/storm-core/src/ui/public/css/style.css
@@ -24,6 +24,14 @@
     padding: 0.5em;
 }
 
+#change-log-level select{
+    width:10em;
+}
+
+#change-log-level input.timeout_input{
+    width:5em;
+}
+
 body,
 .dataTables_wrapper label,
 div.dataTables_info[role="status"],

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/src/ui/public/js/script.js
----------------------------------------------------------------------
diff --git a/storm-core/src/ui/public/js/script.js b/storm-core/src/ui/public/js/script.js
index fe8643d..a880205 100644
--- a/storm-core/src/ui/public/js/script.js
+++ b/storm-core/src/ui/public/js/script.js
@@ -81,6 +81,26 @@ function ensureInt(n) {
     return isInt;
 }
 
+function sendRequest(id, action, extra, body, cb){
+   var opts = {
+        type:'POST',
+        url:'/api/v1/topology/' + id + '/' + action
+    };
+
+    if (body) {
+        opts.data = JSON.stringify(body);
+        opts.contentType = 'application/json; charset=utf-8';
+    }
+
+    opts.url += extra ? "/" + extra : "";
+
+    $.ajax(opts).always(function(data){
+        cb (data);
+    }).fail (function(){
+        alert("Error while communicating with Nimbus.");
+    });
+}
+
 function confirmComponentAction(topologyId, componentId, componentName, action, param, defaultParamValue, paramText, actionText) {
     var opts = {
         type:'POST',

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/src/ui/public/templates/topology-page-template.html
----------------------------------------------------------------------
diff --git a/storm-core/src/ui/public/templates/topology-page-template.html b/storm-core/src/ui/public/templates/topology-page-template.html
index 3292ee0..f0cc822 100644
--- a/storm-core/src/ui/public/templates/topology-page-template.html
+++ b/storm-core/src/ui/public/templates/topology-page-template.html
@@ -362,5 +362,73 @@
     <input {{killStatus}} onclick="confirmAction('{{encodedId}}', '{{name}}', 'kill', true, 30, 'wait time in seconds')" type="button" value="Kill" class="btn btn-default">
     <input {{startDebugStatus}} onclick="confirmAction('{{encodedId}}', '{{name}}', 'debug/enable', true, {{currentSamplingPct}}, 'sampling percentage', 'debug')" type="button" value="Debug" class="btn btn-default">
     <input {{stopDebugStatus}} onclick="confirmAction('{{encodedId}}', '{{name}}', 'debug/disable', false, 0, 'sampling percentage', 'stop debugging')" type="button" value="Stop Debug" class="btn btn-default">
+    <input type="button" {{logLevel}} value="Change Log Level" onclick="toggleChangeLogLevel()" class="btn btn-default">
   </p>
-</script>
\ No newline at end of file
+  <!-- 
+    Contents of topology-change-log-level-template will be added to the div below
+    once user clicks on "Change Log Level"
+  -->
+  <div id="change-log-level" style="display:none"></div>
+</script>
+
+<script id="log-level-and-timeout" type="text/html" class="partials">
+    <tr id="logger-{{loggerId}}" class="{{cls}}">
+        {{#loggerName}}
+          <td>{{loggerName}}</td>
+          <input type="hidden" id="loggerName-{{loggerId}}" value="{{loggerName}}"/>
+          <input type="hidden" id="loggerRemove-{{loggerId}}" value="false"/>
+        {{/loggerName}}
+
+        {{#isNew}}
+          <td>
+            <input type="text" id="loggerName-{{loggerId}}" placeholder="com.your.organization.LoggerName"/>
+          </td>
+        {{/isNew}}
+
+        <td>
+            <select id="loggerLevel-{{loggerId}}">
+                {{#levels}} 
+                    <option value="{{name}}" {{levelSelected}}>{{name}}</option>
+                {{/levels}}
+            </select>
+        </td>
+        <td>
+            <input type="text" class="timeout_input" id="loggerTimeout-{{loggerId}}" placeholder="30" value="{{timeout}}"/> 
+        </td>
+        <td>
+            {{absoluteTimeout}}
+        </td>
+        <td>
+            {{#isNew}}
+                <input type="button" value="Add" name="clear" onClick='sendLoggerLevel({{loggerId}})' class="btn btn-secondary"/>
+            {{/isNew}}
+            {{#loggerName}}
+                <input type="button" value="Apply" name="clear" onClick='sendLoggerLevel({{loggerId}})' class="btn btn-secondary"/>
+                {{#canClear}}
+                    <input type="button" value="Clear" name="clear" onClick='clearLoggerLevel({{loggerId}})' class="btn btn-secondary"/>
+                {{/canClear}}
+            {{/loggerName}}
+        </td>
+    </tr>
+</script>
+
+<script id="topology-change-log-level-template" type="text/html">
+  <div id="change-log-level">
+      <h3>Change Log Level</h3>
+      Modify the logger levels for topology. Note that applying a setting restarts the timer in the workers. To configure the root logger, use the name ROOT. 
+      <table class="table table-striped compact">
+          <thead>
+              <tr>
+                  <th class="header">Logger</th>
+                  <th class="header">Level</th>
+                  <th class="header">Timeout (sec)</th>
+                  <th class="header">Expires at</th>
+                  <th class="header">Actions</th>
+              </tr>
+          </thead>
+      {{#loggers}}
+          {{ > log-level-and-timeout }}
+      {{/loggers}}
+      </table>
+  </div>
+</script>

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/src/ui/public/topology.html
----------------------------------------------------------------------
diff --git a/storm-core/src/ui/public/topology.html b/storm-core/src/ui/public/topology.html
index 2b1214c..e873bb6 100644
--- a/storm-core/src/ui/public/topology.html
+++ b/storm-core/src/ui/public/topology.html
@@ -82,6 +82,146 @@
 </div>
 </body>
 <script>
+
+var toggleChangeLogLevel;
+
+function closeChangeLogLevel (){
+    var container = $("#change-log-level");
+    container.hide();
+}
+
+function clearLoggerLevel(id){
+    $("#loggerRemove-" + id).val("true");
+    sendLoggerLevel(id);
+    $("#logger-" + id).remove();
+}
+
+function sendLoggerLevel(id){
+    var topologyId = $.url("?id");
+    var shouldRemove = $("#loggerRemove-" + id).val() === "true";
+    var level = $("#loggerLevel-" + id).val();
+    var timeout = parseInt($("#loggerTimeout-" + id).val());
+        timeout = isNaN(timeout) ? 0 : timeout;
+    var loggerName = $("#loggerName-" + id).val();
+    if (level === 'Pick Level'){
+        alert ('Please pick a valid log level');
+        return;
+    }
+    var levelBelowInfo = level === 'DEBUG';
+
+    if (!shouldRemove && levelBelowInfo && timeout <= 0){
+        timeout = parseInt (
+            prompt ("You must provide a timeout > 0 for DEBUG log level. What timeout would you like (secs)?", 30));
+        if (!timeout){
+            return;
+        }
+    }
+    var data = {};
+    var loggerSetting;
+
+    if (id === 0) {
+        data["namedLoggerLevels"] = {};
+        data["namedLoggerLevels"]["ROOT"] = {};
+        loggerSetting = data["namedLoggerLevels"]["ROOT"];
+    } else {
+        data["namedLoggerLevels"] = {};
+        data["namedLoggerLevels"][loggerName] = {};
+        loggerSetting = data["namedLoggerLevels"][loggerName];
+    }
+
+    loggerSetting.target_level = shouldRemove ? null : level;
+    loggerSetting.reset_level  = "INFO";
+    loggerSetting.timeout = timeout;
+
+    sendRequest (topologyId, "logconfig", null, data, toggleChangeLogLevel);
+};
+
+function renderLogLevelForm (template, responseData){
+    var topologyId = $.url("?id");
+    var container = $("#change-log-level");
+
+    var levels = [
+        {name: "Pick Level"},
+        {name: "ALL"},
+        {name: "TRACE"},
+        {name: "DEBUG"},
+        {name: "INFO" },
+        {name: "WARN" },
+        {name: "ERROR"},
+        {name: "FATAL"},
+        {name: "OFF"}
+    ];
+    var partialTemplates = $(template).filter('.partials');
+    var partials = {};
+
+    $.each(partialTemplates, function (ix, partial){
+        var obj = $(partial);
+        partials [obj.attr('id')] = obj.html(); 
+    });
+
+    var logLevelTemplate = $(template).filter("#topology-change-log-level-template").html();
+
+    var calcAbsoluteTimeout = function (timeout_epoch) {
+        var absoluteTimeout = "";
+        if (timeout_epoch) {
+            var d = new Date(0); 
+            d.setUTCSeconds(timeout_epoch / 1000);
+            absoluteTimeout = d.toLocaleDateString() + " " + d.toLocaleTimeString();
+        }
+        return absoluteTimeout;
+    };
+    var renderImpl = function (data){
+        var loggers = [];
+        var loggerCount = 1;
+        var obj = data.namedLoggerLevels;
+        if (!obj) {
+            obj = {};
+            data.namedLoggerLevels = obj;
+        }
+
+        var sortedLoggers = Object.keys(obj).sort(function (l1, l2){
+            if (l1 === "ROOT") return -1;
+            if (l2 === "ROOT") return 1;
+            return l1 > l2;
+        });
+
+        sortedLoggers.forEach (function (l){
+            var obj = data.namedLoggerLevels[l];
+            obj.loggerId = loggerCount++;
+            obj.loggerName = l;
+            obj.named = l != "ROOT";
+            obj.cls = "namedLoggers";
+            obj.levelSelected = function (obj){
+                return function (){
+                    return this.name === obj.target_level ? "selected" : "";
+                }
+            }(obj);
+            obj.absoluteTimeout = calcAbsoluteTimeout (obj.timeout_epoch);
+            obj.canClear = true;
+            loggers.push(obj);
+        });
+
+        loggers.push({
+            loggerId: loggerCount,
+            isNew: true,
+            cls: 'newLogger'
+        });
+
+        var tmplData = {
+            loggers: loggers,
+            levels: levels
+        };
+
+        container.html(Mustache.render(logLevelTemplate, tmplData, partials));
+        container.show('fast');
+    };
+    if (!responseData) {
+        var topologyId = $.url("?id");
+        $.get ('/api/v1/topology/' + topologyId + '/logconfig', renderImpl);
+    } else {
+        renderImpl (responseData);
+    }
+} 
 $(document).ajaxStop($.unblockUI);
 $(document).ajaxStart(function(){
     if ($("#topology-visualization").children().size() == 0) {
@@ -134,7 +274,10 @@ $(document).ready(function() {
         var formattedConfig = formatConfigData(response["configuration"]);
         var buttonJsonData = topologyActionJson(response["id"],response["encodedId"],response["name"],response["status"]
                                                 ,response["msgTimeout"],response["debug"],response["samplingPct"]);
-        $.get("/templates/topology-page-template.html", function(template) {
+        $.ajax ({url: "/templates/topology-page-template.html", success: function(template) {
+          toggleChangeLogLevel = function (data) {
+              renderLogLevelForm (template, data);
+            }
             topologySummary.append(Mustache.render($(template).filter("#topology-summary-template").html(),response));
             topologyActions.append(Mustache.render($(template).filter("#topology-actions-template").html(),buttonJsonData));
             topologyStats.append(Mustache.render($(template).filter("#topology-stats-template").html(),response));
@@ -222,8 +365,8 @@ $(document).ready(function() {
             $('#topology-configuration [data-toggle="tooltip"]').tooltip();
             $('#topology-actions [data-toggle="tooltip"]').tooltip();
             $('#topology-visualization [data-toggle="tooltip"]').tooltip();
-        });
+      }});
     });
  });
 </script>
-</html>
\ No newline at end of file
+</html>

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/test/clj/backtype/storm/nimbus_test.clj
----------------------------------------------------------------------
diff --git a/storm-core/test/clj/backtype/storm/nimbus_test.clj b/storm-core/test/clj/backtype/storm/nimbus_test.clj
index 0645f39..387a691 100644
--- a/storm-core/test/clj/backtype/storm/nimbus_test.clj
+++ b/storm-core/test/clj/backtype/storm/nimbus_test.clj
@@ -24,7 +24,8 @@
   (:import [backtype.storm.nimbus ILeaderElector NimbusInfo])
   (:import [backtype.storm.generated Credentials NotAliveException SubmitOptions
             TopologyInitialStatus TopologyStatus AlreadyAliveException KillOptions RebalanceOptions
-            InvalidTopologyException AuthorizationException])
+            InvalidTopologyException AuthorizationException
+            LogConfig LogLevel LogLevelAction])
   (:import [java.util HashMap])
   (:import [java.io File])
   (:import [backtype.storm.utils Time])
@@ -1329,3 +1330,56 @@
                      {})]
       (submit-local-topology nimbus "t1" {TOPOLOGY-WORKERS 1} topology)
       (.debug nimbus "t1" "" true 100))))
+
+;; if the user sends an empty log config, nimbus will say that all 
+;; log configs it contains are LogLevelAction/UNCHANGED
+(deftest empty-save-config-results-in-all-unchanged-actions
+  (with-local-cluster [cluster]
+    (let [nimbus (:nimbus cluster)
+          previous-config (LogConfig.)
+          level (LogLevel.)
+          mock-config (LogConfig.)]
+      ;; send something with content to nimbus beforehand
+      (.set_target_log_level level "ERROR")
+      (.set_action level LogLevelAction/UPDATE)
+      (.put_to_named_logger_level previous-config "test" level)
+      (stubbing [nimbus/check-storm-active! nil
+                 nimbus/try-read-storm-conf {}]
+        (.setLogConfig nimbus "foo" previous-config)
+        (.setLogConfig nimbus "foo" mock-config)
+        (let [saved-config (.getLogConfig nimbus "foo")
+              levels (.get_named_logger_level saved-config)]
+           (is (= (.get_action (.get levels "test")) LogLevelAction/UNCHANGED)))))))
+
+(deftest log-level-update-merges-and-flags-existent-log-level
+  (with-local-cluster [cluster]
+    (stubbing [nimbus/check-storm-active! nil
+               nimbus/try-read-storm-conf {}]
+      (let [nimbus (:nimbus cluster)
+            previous-config (LogConfig.)
+            level (LogLevel.)
+            other-level (LogLevel.)
+            mock-config (LogConfig.)]
+        ;; send something with content to nimbus beforehand
+        (.set_target_log_level level "ERROR")
+        (.set_action level LogLevelAction/UPDATE)
+        (.put_to_named_logger_level previous-config "test" level)
+
+        (.set_target_log_level other-level "DEBUG")
+        (.set_action other-level LogLevelAction/UPDATE)
+        (.put_to_named_logger_level previous-config "other-test" other-level)
+        (.setLogConfig nimbus "foo" previous-config)
+      
+        ;; only change "test"
+        (.set_target_log_level level "INFO")
+        (.set_action level LogLevelAction/UPDATE)
+        (.put_to_named_logger_level mock-config "test" level)
+        (.setLogConfig nimbus "foo" mock-config)
+
+        (let [saved-config (.getLogConfig nimbus "foo")
+              levels (.get_named_logger_level saved-config)]
+           (is (= (.get_action (.get levels "test")) LogLevelAction/UPDATE))
+           (is (= (.get_target_log_level (.get levels "test")) "INFO"))
+
+           (is (= (.get_action (.get levels "other-test")) LogLevelAction/UNCHANGED))
+           (is (= (.get_target_log_level (.get levels "other-test")) "DEBUG")))))))

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/test/clj/backtype/storm/supervisor_test.clj
----------------------------------------------------------------------
diff --git a/storm-core/test/clj/backtype/storm/supervisor_test.clj b/storm-core/test/clj/backtype/storm/supervisor_test.clj
index b17ea5e..64dd1cb 100644
--- a/storm-core/test/clj/backtype/storm/supervisor_test.clj
+++ b/storm-core/test/clj/backtype/storm/supervisor_test.clj
@@ -263,6 +263,7 @@
                                 (str "-Dworker.port=" mock-port)
                                "-Dstorm.log.dir=/logs"
                                "-Dlog4j.configurationFile=/log4j2/worker.xml"
+                               "-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector"
                                "backtype.storm.LogWriter"]
                                [(supervisor/java-cmd) "-server"]
                                opts
@@ -275,6 +276,7 @@
                                 (str "-Dstorm.log.dir=" file-path-separator "logs")
                                 (str "-Dlogging.sensitivity=" mock-sensitivity)
                                 (str "-Dlog4j.configurationFile=" file-path-separator "log4j2" file-path-separator "worker.xml")
+                                "-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector"
                                 (str "-Dstorm.id=" mock-storm-id)
                                 (str "-Dworker.id=" mock-worker-id)
                                 (str "-Dworker.port=" mock-port)
@@ -395,6 +397,7 @@
                                 " '-Dworker.port=" mock-port "'"
                                 " '-Dstorm.log.dir=/logs'"
                                 " '-Dlog4j.configurationFile=/log4j2/worker.xml'"
+                                " '-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector'"
                                 " 'backtype.storm.LogWriter'"
                                 " 'java' '-server'"
                                 " " (shell-cmd opts)
@@ -407,6 +410,7 @@
                                 " '-Dstorm.log.dir=/logs'"
                                 " '-Dlogging.sensitivity=" mock-sensitivity "'"
                                 " '-Dlog4j.configurationFile=/log4j2/worker.xml'"
+                                " '-DLog4jContextSelector=org.apache.logging.log4j.core.selector.BasicContextSelector'"
                                 " '-Dstorm.id=" mock-storm-id "'"
                                 " '-Dworker.id=" mock-worker-id "'"
                                 " '-Dworker.port=" mock-port "'"

http://git-wip-us.apache.org/repos/asf/storm/blob/250ab113/storm-core/test/clj/backtype/storm/worker_test.clj
----------------------------------------------------------------------
diff --git a/storm-core/test/clj/backtype/storm/worker_test.clj b/storm-core/test/clj/backtype/storm/worker_test.clj
index 2e0533d..b8e5f0f 100644
--- a/storm-core/test/clj/backtype/storm/worker_test.clj
+++ b/storm-core/test/clj/backtype/storm/worker_test.clj
@@ -15,15 +15,184 @@
 ;; limitations under the License.
 (ns backtype.storm.worker-test
   (:use [clojure test])
+  (:require [backtype.storm.daemon [worker :as worker]])
+  (:require [backtype.storm [util :as util]])
+  (:require [conjure.core])
+  (:require [clj-time.core :as time])
+  (:require [clj-time.coerce :as coerce])
+  (:import [backtype.storm.generated LogConfig LogLevel LogLevelAction])
+  (:import [org.apache.logging.log4j Level LogManager])
+  (:import [org.slf4j Logger])
+  (:use [conjure core])
+  (:use [backtype.storm testing log])
+  (:use [backtype.storm.daemon common])
+  (:use [clojure.string :only [join]])
   (:import [backtype.storm.messaging TaskMessage IContext IConnection ConnectionWithStatus ConnectionWithStatus$Status])
   (:import [org.mockito Mockito])
-  (:use [backtype.storm testing])
-  (:use [backtype.storm.daemon common])
-
-  (:require [backtype.storm.daemon [worker :as worker]])
   )
 
 
+(deftest test-log-reset-should-not-trigger-for-future-time
+  (with-local-cluster [cluster]
+    (let [worker (:worker cluster)
+          present (time/now)
+          the-future (coerce/to-long (time/plus present (time/secs 1)))
+          mock-config {"foo" {:timeout the-future}}
+          mock-config-atom (atom mock-config)]
+      (stubbing [time/now present]
+        (worker/reset-log-levels mock-config-atom)
+        ;; if the worker doesn't reset log levels, the atom should not be nil
+        (is (not(= @mock-config-atom nil)))))))
+
+(deftest test-log-reset-triggers-for-past-time
+  (with-local-cluster [cluster]
+    (let [worker (:worker cluster)
+          present (time/now)
+          past (time/plus present (time/secs -1))
+          mock-config {"foo" { :timeout (coerce/to-long past)
+                               :target-log-level Level/INFO
+                               :reset-log-level Level/WARN}}
+          mock-config-atom (atom mock-config)]
+      (stubbing [time/now present]
+        (worker/reset-log-levels mock-config-atom)
+        ;; the logger config is removed from atom
+        (is (= @mock-config-atom {}))))))
+
+(deftest test-log-reset-resets-does-nothing-for-empty-log-config
+  (with-local-cluster [cluster]
+    (let [worker (:worker cluster)
+          present (time/now)
+          past (coerce/to-long (time/plus present (time/secs -1)))
+          mock-config {}
+          mock-config-atom (atom mock-config)]
+      (stubbing [worker/set-logger-level nil
+                 time/now present]
+        (worker/reset-log-levels mock-config-atom)
+        ;; if the worker resets log level, the atom is nil'ed out
+        (is (= @mock-config-atom {}))
+        ;; test that the set-logger-level function was not called
+        (verify-call-times-for worker/set-logger-level 0)))))
+
+(deftest test-log-reset-resets-root-logger-if-set
+  (with-local-cluster [cluster]
+    (let [worker (:worker cluster)
+          present (time/now)
+          past (coerce/to-long (time/plus present (time/secs -1)))
+          mock-config {LogManager/ROOT_LOGGER_NAME  {:timeout past
+                                                     :target-log-level Level/DEBUG
+                                                     :reset-log-level Level/WARN}}
+          mock-config-atom (atom mock-config)]
+      (stubbing [worker/set-logger-level nil
+                 time/now present]
+        (worker/reset-log-levels mock-config-atom)
+        ;; if the worker resets log level, the atom is reset to {}
+        (is (= @mock-config-atom {}))
+        ;; ensure we reset back to WARN level
+        (verify-call-times-for worker/set-logger-level 1)
+        (verify-first-call-args-for-indices worker/set-logger-level [1 2] LogManager/ROOT_LOGGER_NAME Level/WARN)))))
+
+;;This should be removed when it goes into conjure
+(defmacro verify-nth-call-args-for-indices
+  "Asserts that the function was called at least once, and the nth call was
+   passed the args specified, into the indices of the arglist specified. In
+   other words, it checks only the particular args you care about."
+  [n fn-name indices & args]
+  `(do
+     (assert-in-fake-context "verify-first-call-args-for-indices")
+     (assert-conjurified-fn "verify-first-call-args-for-indices" ~fn-name)
+     (is (< ~n (count (get @call-times ~fn-name)))
+         (str "(verify-nth-call-args-for-indices " ~n " " ~fn-name " " ~indices " " ~(join " " args) ")"))
+     (let [nth-call-args# (nth (get @call-times ~fn-name) ~n)
+           indices-in-range?# (< (apply max ~indices) (count nth-call-args#))]
+       (if indices-in-range?#
+         (is (= ~(vec args) (map #(nth nth-call-args# %) ~indices))
+             (str "(verify-first-call-args-for-indices " ~n " " ~fn-name " " ~indices " " ~(join " " args) ")"))
+         (is (= :fail (format "indices %s are out of range for the args, %s" ~indices ~(vec args)))
+             (str "(verify-first-call-args-for-indices " ~n " " ~fn-name " " ~indices " " ~(join " " args) ")"))))))
+
+(deftest test-log-resets-named-loggers-with-past-timeout
+  (with-local-cluster [cluster]
+    (let [worker (:worker cluster)
+          present (time/now)
+          past (coerce/to-long (time/plus present (time/secs -1)))
+          mock-config {"my_debug_logger" {:timeout past
+                                          :target-log-level Level/DEBUG
+                                          :reset-log-level Level/INFO} 
+                       "my_info_logger" {:timeout past
+                                         :target-log-level Level/INFO
+                                         :reset-log-level Level/WARN}
+                       "my_error_logger" {:timeout past
+                                          :target-log-level Level/ERROR
+                                          :reset-log-level Level/INFO}}
+          result (atom {})
+          mock-config-atom (atom mock-config)]
+      (stubbing [worker/set-logger-level nil
+                 time/now present]
+          (worker/reset-log-levels mock-config-atom)
+          ;; if the worker resets log level, the atom is reset to {}
+          (is (= @mock-config-atom {}))
+          (verify-call-times-for worker/set-logger-level 3)
+          (verify-nth-call-args-for-indices 0 worker/set-logger-level [1 2] "my_debug_logger" Level/INFO)
+          (verify-nth-call-args-for-indices 1 worker/set-logger-level [1 2] "my_error_logger" Level/INFO)
+          (verify-nth-call-args-for-indices 2 worker/set-logger-level [1 2] "my_info_logger" Level/WARN)))))
+
+(deftest test-process-root-log-level-to-debug-sets-logger-and-timeout-2
+  (with-local-cluster [cluster]
+    (let [worker (:worker cluster)
+          mock-config (LogConfig.)
+          root-level (LogLevel.)
+          mock-config-atom (atom nil)
+          orig-levels (atom {})
+          present (time/now)
+          in-thirty-seconds (coerce/to-long (time/plus present (time/secs 30)))]
+      ;; configure the root logger to be debug
+      (.set_reset_log_level_timeout_epoch root-level in-thirty-seconds)
+      (.set_target_log_level root-level "DEBUG")
+      (.set_action root-level LogLevelAction/UPDATE)
+      (.put_to_named_logger_level mock-config "ROOT" root-level)
+      (stubbing [worker/set-logger-level nil
+                 time/now present]
+          (worker/process-log-config-change mock-config-atom orig-levels mock-config)
+          ;; test that the set-logger-level function was not called
+          (log-message "Tests " @mock-config-atom)
+          (verify-call-times-for worker/set-logger-level 1)
+          (verify-nth-call-args-for-indices 0 worker/set-logger-level [1 2] "" Level/DEBUG)
+          (let [root-result (get @mock-config-atom LogManager/ROOT_LOGGER_NAME)]
+            (is (= (:action root-result) LogLevelAction/UPDATE))
+            (is (= (:target-log-level root-result) Level/DEBUG))
+            ;; defaults to INFO level when the logger isn't found previously
+            (is (= (:reset-log-level root-result) Level/INFO))
+            (is (= (:timeout root-result) in-thirty-seconds)))))))
+
+(deftest test-process-root-log-level-to-debug-sets-logger-and-timeout
+  (with-local-cluster [cluster]
+    (let [worker (:worker cluster)
+          mock-config (LogConfig.)
+          root-level (LogLevel.)
+          orig-levels (atom {})
+          present (time/now)
+          in-thirty-seconds (coerce/to-long (time/plus present (time/secs 30)))
+          mock-config-atom (atom {})]
+      ;; configure the root logger to be debug
+      (doseq [named {"ROOT" "DEBUG"
+                     "my_debug_logger" "DEBUG"
+                     "my_info_logger" "INFO"
+                     "my_error_logger" "ERROR"}]
+        (let [level (LogLevel.)]
+          (.set_action level LogLevelAction/UPDATE)
+          (.set_reset_log_level_timeout_epoch level in-thirty-seconds)
+          (.set_target_log_level level (val named))
+          (.put_to_named_logger_level mock-config (key named) level)))
+      (log-message "Tests " mock-config)
+      (stubbing [worker/set-logger-level nil
+                 time/now present]
+          (worker/process-log-config-change mock-config-atom orig-levels mock-config)
+          (verify-call-times-for worker/set-logger-level 4)
+          (verify-nth-call-args-for-indices 0 worker/set-logger-level [1 2] "" Level/DEBUG)
+          (verify-nth-call-args-for-indices 1 worker/set-logger-level [1 2] "my_debug_logger" Level/DEBUG)
+          (verify-nth-call-args-for-indices 2 worker/set-logger-level [1 2] "my_error_logger" Level/ERROR)
+          (verify-nth-call-args-for-indices 3 worker/set-logger-level [1 2] "my_info_logger" Level/INFO)))))
+
 (deftest test-worker-is-connection-ready
   (let [connection (Mockito/mock ConnectionWithStatus)]
     (. (Mockito/when (.status connection)) thenReturn ConnectionWithStatus$Status/Ready)
@@ -34,4 +203,4 @@
 
     (. (Mockito/when (.status connection)) thenReturn ConnectionWithStatus$Status/Closed)
     (is (= false (worker/is-connection-ready connection)))
-  ))
\ No newline at end of file
+  ))


[9/9] storm git commit: Added STORM-412 to changlog

Posted by bo...@apache.org.
Added STORM-412 to changlog


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/9ac9eb5a
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/9ac9eb5a
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/9ac9eb5a

Branch: refs/heads/master
Commit: 9ac9eb5a26bf5e834e728c29673d26b65bacfc2f
Parents: dd78e45
Author: Robert (Bobby) Evans <ev...@yahoo-inc.com>
Authored: Mon Oct 5 14:14:10 2015 -0500
Committer: Robert (Bobby) Evans <ev...@yahoo-inc.com>
Committed: Mon Oct 5 14:14:10 2015 -0500

----------------------------------------------------------------------
 CHANGELOG.md | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/9ac9eb5a/CHANGELOG.md
----------------------------------------------------------------------
diff --git a/CHANGELOG.md b/CHANGELOG.md
index bc3eac1..2c57d59 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,5 @@
 ## 0.11.0
+ * STORM-412: Allow users to modify logging levels of running topologies
  * STORM-1078: Updated RateTracker to be thread safe
  * STORM-1082: fix nits for properties in kafka tests
  * STORM-993: include uptimeSeconds as JSON integer field


[7/9] storm git commit: Merge branch 'master' of github.com:apache/storm into STORM-412

Posted by bo...@apache.org.
Merge branch 'master' of github.com:apache/storm into STORM-412

Conflicts:
	bin/storm.py


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/63978509
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/63978509
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/63978509

Branch: refs/heads/master
Commit: 63978509decc42d3922fea3b98a20e54b928e571
Parents: 754017f 7cf4d25
Author: Kishor Patil <kp...@yahoo-inc.com>
Authored: Mon Oct 5 18:53:51 2015 +0000
Committer: Kishor Patil <kp...@yahoo-inc.com>
Committed: Mon Oct 5 18:53:51 2015 +0000

----------------------------------------------------------------------
 .gitignore                                      |   3 +
 CHANGELOG.md                                    |  18 +-
 DEVELOPER.md                                    |  33 +--
 README.markdown                                 |   2 +
 STORM-UI-REST-API.md                            |  17 +-
 bin/storm.py                                    |  18 +-
 dev-tools/travis/travis-install.sh              |   4 +-
 dev-tools/travis/travis-script.sh               |   5 +-
 examples/storm-starter/pom.xml                  |   1 -
 external/flux/flux-core/pom.xml                 |   1 -
 external/flux/flux-examples/pom.xml             |   1 -
 external/storm-elasticsearch/pom.xml            |   5 +
 external/storm-eventhubs/pom.xml                |   1 -
 external/storm-hdfs/pom.xml                     |  56 +++-
 .../org/apache/storm/hdfs/bolt/HdfsBolt.java    | 143 ++++++++--
 .../apache/storm/hdfs/bolt/TestHdfsBolt.java    | 242 +++++++++++++++++
 external/storm-kafka/README.md                  |  16 +-
 external/storm-kafka/pom.xml                    |   5 +
 .../src/jvm/storm/kafka/KafkaSpout.java         |  32 ++-
 .../kafka/trident/OpaqueTridentKafkaSpout.java  |   4 +-
 .../trident/TransactionalTridentKafkaSpout.java |   4 +-
 .../src/test/storm/kafka/KafkaUtilsTest.java    |   2 +-
 .../test/storm/kafka/bolt/KafkaBoltTest.java    |   9 +-
 external/storm-solr/pom.xml                     |   4 +-
 pom.xml                                         | 125 ++++++---
 storm-core/pom.xml                              | 259 +++++++++++--------
 .../clj/backtype/storm/command/kill_workers.clj |  33 +++
 storm-core/src/clj/backtype/storm/config.clj    |   4 +-
 .../src/clj/backtype/storm/daemon/executor.clj  |   1 -
 .../clj/backtype/storm/daemon/supervisor.clj    |  24 +-
 storm-core/src/clj/backtype/storm/log.clj       |   2 +-
 storm-core/src/clj/backtype/storm/ui/core.clj   |  14 +-
 storm-core/src/clj/backtype/storm/util.clj      |   7 +-
 storm-core/src/jvm/backtype/storm/Config.java   |   2 +
 .../backtype/storm/utils/DisruptorQueue.java    |  51 +---
 .../jvm/backtype/storm/utils/RateTracker.java   | 147 +++++++----
 .../public/templates/index-page-template.html   |   2 +-
 .../backtype/storm/utils/RateTrackerTest.java   |  66 +++--
 storm-dist/binary/src/main/assembly/binary.xml  |  32 ---
 39 files changed, 991 insertions(+), 404 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/63978509/bin/storm.py
----------------------------------------------------------------------
diff --cc bin/storm.py
index 7b1dc83,87f62fd..1b9617e
--- a/bin/storm.py
+++ b/bin/storm.py
@@@ -594,7 -576,8 +609,8 @@@ COMMANDS = {"jar": jar, "kill": kill, "
              "remoteconfvalue": print_remoteconfvalue, "repl": repl, "classpath": print_classpath,
              "activate": activate, "deactivate": deactivate, "rebalance": rebalance, "help": print_usage,
              "list": listtopos, "dev-zookeeper": dev_zookeeper, "version": version, "monitor": monitor,
-             "upload-credentials": upload_credentials, "get-errors": get_errors, "set_log_level": set_log_level }
 -            "upload-credentials": upload_credentials, "get-errors": get_errors,
++            "upload-credentials": upload_credentials, "get-errors": get_errors, "set_log_level": set_log_level,
+             "kill_workers": kill_workers }
  
  def parse_config(config_list):
      global CONFIG_OPTS

http://git-wip-us.apache.org/repos/asf/storm/blob/63978509/storm-core/src/clj/backtype/storm/daemon/supervisor.clj
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/63978509/storm-core/src/clj/backtype/storm/ui/core.clj
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/storm/blob/63978509/storm-core/src/jvm/backtype/storm/Config.java
----------------------------------------------------------------------


[2/9] storm git commit: Adding thrift changes for Dynamic Logging

Posted by bo...@apache.org.
Adding thrift changes for  Dynamic Logging


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/d13cfe2d
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/d13cfe2d
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/d13cfe2d

Branch: refs/heads/master
Commit: d13cfe2dc9beda8fc4fda6fa6536d87b9ce514a1
Parents: 397d2d8
Author: Kishor Patil <kp...@yahoo-inc.com>
Authored: Thu Sep 24 14:22:37 2015 +0000
Committer: Kishor Patil <kp...@yahoo-inc.com>
Committed: Thu Sep 24 14:22:37 2015 +0000

----------------------------------------------------------------------
 .../jvm/backtype/storm/generated/LogConfig.java |  475 +++++
 .../jvm/backtype/storm/generated/LogLevel.java  |  836 +++++++++
 .../storm/generated/LogLevelAction.java         |   65 +
 .../jvm/backtype/storm/generated/Nimbus.java    | 1700 +++++++++++++++++-
 storm-core/src/py/storm/Nimbus-remote           |   14 +
 storm-core/src/py/storm/Nimbus.py               |  359 +++-
 storm-core/src/py/storm/ttypes.py               |  213 +++
 storm-core/src/storm.thrift                     |   37 +
 8 files changed, 3696 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/d13cfe2d/storm-core/src/jvm/backtype/storm/generated/LogConfig.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LogConfig.java b/storm-core/src/jvm/backtype/storm/generated/LogConfig.java
new file mode 100644
index 0000000..318ed86
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/generated/LogConfig.java
@@ -0,0 +1,475 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-9-24")
+public class LogConfig implements org.apache.thrift.TBase<LogConfig, LogConfig._Fields>, java.io.Serializable, Cloneable, Comparable<LogConfig> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LogConfig");
+
+  private static final org.apache.thrift.protocol.TField NAMED_LOGGER_LEVEL_FIELD_DESC = new org.apache.thrift.protocol.TField("named_logger_level", org.apache.thrift.protocol.TType.MAP, (short)2);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new LogConfigStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new LogConfigTupleSchemeFactory());
+  }
+
+  private Map<String,LogLevel> named_logger_level; // optional
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    NAMED_LOGGER_LEVEL((short)2, "named_logger_level");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 2: // NAMED_LOGGER_LEVEL
+          return NAMED_LOGGER_LEVEL;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  private static final _Fields optionals[] = {_Fields.NAMED_LOGGER_LEVEL};
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.NAMED_LOGGER_LEVEL, new org.apache.thrift.meta_data.FieldMetaData("named_logger_level", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, 
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), 
+            new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, LogLevel.class))));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(LogConfig.class, metaDataMap);
+  }
+
+  public LogConfig() {
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public LogConfig(LogConfig other) {
+    if (other.is_set_named_logger_level()) {
+      Map<String,LogLevel> __this__named_logger_level = new HashMap<String,LogLevel>(other.named_logger_level.size());
+      for (Map.Entry<String, LogLevel> other_element : other.named_logger_level.entrySet()) {
+
+        String other_element_key = other_element.getKey();
+        LogLevel other_element_value = other_element.getValue();
+
+        String __this__named_logger_level_copy_key = other_element_key;
+
+        LogLevel __this__named_logger_level_copy_value = new LogLevel(other_element_value);
+
+        __this__named_logger_level.put(__this__named_logger_level_copy_key, __this__named_logger_level_copy_value);
+      }
+      this.named_logger_level = __this__named_logger_level;
+    }
+  }
+
+  public LogConfig deepCopy() {
+    return new LogConfig(this);
+  }
+
+  @Override
+  public void clear() {
+    this.named_logger_level = null;
+  }
+
+  public int get_named_logger_level_size() {
+    return (this.named_logger_level == null) ? 0 : this.named_logger_level.size();
+  }
+
+  public void put_to_named_logger_level(String key, LogLevel val) {
+    if (this.named_logger_level == null) {
+      this.named_logger_level = new HashMap<String,LogLevel>();
+    }
+    this.named_logger_level.put(key, val);
+  }
+
+  public Map<String,LogLevel> get_named_logger_level() {
+    return this.named_logger_level;
+  }
+
+  public void set_named_logger_level(Map<String,LogLevel> named_logger_level) {
+    this.named_logger_level = named_logger_level;
+  }
+
+  public void unset_named_logger_level() {
+    this.named_logger_level = null;
+  }
+
+  /** Returns true if field named_logger_level is set (has been assigned a value) and false otherwise */
+  public boolean is_set_named_logger_level() {
+    return this.named_logger_level != null;
+  }
+
+  public void set_named_logger_level_isSet(boolean value) {
+    if (!value) {
+      this.named_logger_level = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case NAMED_LOGGER_LEVEL:
+      if (value == null) {
+        unset_named_logger_level();
+      } else {
+        set_named_logger_level((Map<String,LogLevel>)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case NAMED_LOGGER_LEVEL:
+      return get_named_logger_level();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case NAMED_LOGGER_LEVEL:
+      return is_set_named_logger_level();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof LogConfig)
+      return this.equals((LogConfig)that);
+    return false;
+  }
+
+  public boolean equals(LogConfig that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_named_logger_level = true && this.is_set_named_logger_level();
+    boolean that_present_named_logger_level = true && that.is_set_named_logger_level();
+    if (this_present_named_logger_level || that_present_named_logger_level) {
+      if (!(this_present_named_logger_level && that_present_named_logger_level))
+        return false;
+      if (!this.named_logger_level.equals(that.named_logger_level))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    List<Object> list = new ArrayList<Object>();
+
+    boolean present_named_logger_level = true && (is_set_named_logger_level());
+    list.add(present_named_logger_level);
+    if (present_named_logger_level)
+      list.add(named_logger_level);
+
+    return list.hashCode();
+  }
+
+  @Override
+  public int compareTo(LogConfig other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = Boolean.valueOf(is_set_named_logger_level()).compareTo(other.is_set_named_logger_level());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_named_logger_level()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.named_logger_level, other.named_logger_level);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("LogConfig(");
+    boolean first = true;
+
+    if (is_set_named_logger_level()) {
+      sb.append("named_logger_level:");
+      if (this.named_logger_level == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.named_logger_level);
+      }
+      first = false;
+    }
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class LogConfigStandardSchemeFactory implements SchemeFactory {
+    public LogConfigStandardScheme getScheme() {
+      return new LogConfigStandardScheme();
+    }
+  }
+
+  private static class LogConfigStandardScheme extends StandardScheme<LogConfig> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, LogConfig struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 2: // NAMED_LOGGER_LEVEL
+            if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+              {
+                org.apache.thrift.protocol.TMap _map510 = iprot.readMapBegin();
+                struct.named_logger_level = new HashMap<String,LogLevel>(2*_map510.size);
+                String _key511;
+                LogLevel _val512;
+                for (int _i513 = 0; _i513 < _map510.size; ++_i513)
+                {
+                  _key511 = iprot.readString();
+                  _val512 = new LogLevel();
+                  _val512.read(iprot);
+                  struct.named_logger_level.put(_key511, _val512);
+                }
+                iprot.readMapEnd();
+              }
+              struct.set_named_logger_level_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, LogConfig struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.named_logger_level != null) {
+        if (struct.is_set_named_logger_level()) {
+          oprot.writeFieldBegin(NAMED_LOGGER_LEVEL_FIELD_DESC);
+          {
+            oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, struct.named_logger_level.size()));
+            for (Map.Entry<String, LogLevel> _iter514 : struct.named_logger_level.entrySet())
+            {
+              oprot.writeString(_iter514.getKey());
+              _iter514.getValue().write(oprot);
+            }
+            oprot.writeMapEnd();
+          }
+          oprot.writeFieldEnd();
+        }
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class LogConfigTupleSchemeFactory implements SchemeFactory {
+    public LogConfigTupleScheme getScheme() {
+      return new LogConfigTupleScheme();
+    }
+  }
+
+  private static class LogConfigTupleScheme extends TupleScheme<LogConfig> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, LogConfig struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      BitSet optionals = new BitSet();
+      if (struct.is_set_named_logger_level()) {
+        optionals.set(0);
+      }
+      oprot.writeBitSet(optionals, 1);
+      if (struct.is_set_named_logger_level()) {
+        {
+          oprot.writeI32(struct.named_logger_level.size());
+          for (Map.Entry<String, LogLevel> _iter515 : struct.named_logger_level.entrySet())
+          {
+            oprot.writeString(_iter515.getKey());
+            _iter515.getValue().write(oprot);
+          }
+        }
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, LogConfig struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      BitSet incoming = iprot.readBitSet(1);
+      if (incoming.get(0)) {
+        {
+          org.apache.thrift.protocol.TMap _map516 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+          struct.named_logger_level = new HashMap<String,LogLevel>(2*_map516.size);
+          String _key517;
+          LogLevel _val518;
+          for (int _i519 = 0; _i519 < _map516.size; ++_i519)
+          {
+            _key517 = iprot.readString();
+            _val518 = new LogLevel();
+            _val518.read(iprot);
+            struct.named_logger_level.put(_key517, _val518);
+          }
+        }
+        struct.set_named_logger_level_isSet(true);
+      }
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/storm/blob/d13cfe2d/storm-core/src/jvm/backtype/storm/generated/LogLevel.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LogLevel.java b/storm-core/src/jvm/backtype/storm/generated/LogLevel.java
new file mode 100644
index 0000000..dba55d0
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/generated/LogLevel.java
@@ -0,0 +1,836 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-9-24")
+public class LogLevel implements org.apache.thrift.TBase<LogLevel, LogLevel._Fields>, java.io.Serializable, Cloneable, Comparable<LogLevel> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LogLevel");
+
+  private static final org.apache.thrift.protocol.TField ACTION_FIELD_DESC = new org.apache.thrift.protocol.TField("action", org.apache.thrift.protocol.TType.I32, (short)1);
+  private static final org.apache.thrift.protocol.TField TARGET_LOG_LEVEL_FIELD_DESC = new org.apache.thrift.protocol.TField("target_log_level", org.apache.thrift.protocol.TType.STRING, (short)2);
+  private static final org.apache.thrift.protocol.TField RESET_LOG_LEVEL_TIMEOUT_SECS_FIELD_DESC = new org.apache.thrift.protocol.TField("reset_log_level_timeout_secs", org.apache.thrift.protocol.TType.I32, (short)3);
+  private static final org.apache.thrift.protocol.TField RESET_LOG_LEVEL_TIMEOUT_EPOCH_FIELD_DESC = new org.apache.thrift.protocol.TField("reset_log_level_timeout_epoch", org.apache.thrift.protocol.TType.I64, (short)4);
+  private static final org.apache.thrift.protocol.TField RESET_LOG_LEVEL_FIELD_DESC = new org.apache.thrift.protocol.TField("reset_log_level", org.apache.thrift.protocol.TType.STRING, (short)5);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new LogLevelStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new LogLevelTupleSchemeFactory());
+  }
+
+  private LogLevelAction action; // required
+  private String target_log_level; // optional
+  private int reset_log_level_timeout_secs; // optional
+  private long reset_log_level_timeout_epoch; // optional
+  private String reset_log_level; // optional
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    /**
+     * 
+     * @see LogLevelAction
+     */
+    ACTION((short)1, "action"),
+    TARGET_LOG_LEVEL((short)2, "target_log_level"),
+    RESET_LOG_LEVEL_TIMEOUT_SECS((short)3, "reset_log_level_timeout_secs"),
+    RESET_LOG_LEVEL_TIMEOUT_EPOCH((short)4, "reset_log_level_timeout_epoch"),
+    RESET_LOG_LEVEL((short)5, "reset_log_level");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // ACTION
+          return ACTION;
+        case 2: // TARGET_LOG_LEVEL
+          return TARGET_LOG_LEVEL;
+        case 3: // RESET_LOG_LEVEL_TIMEOUT_SECS
+          return RESET_LOG_LEVEL_TIMEOUT_SECS;
+        case 4: // RESET_LOG_LEVEL_TIMEOUT_EPOCH
+          return RESET_LOG_LEVEL_TIMEOUT_EPOCH;
+        case 5: // RESET_LOG_LEVEL
+          return RESET_LOG_LEVEL;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  private static final int __RESET_LOG_LEVEL_TIMEOUT_SECS_ISSET_ID = 0;
+  private static final int __RESET_LOG_LEVEL_TIMEOUT_EPOCH_ISSET_ID = 1;
+  private byte __isset_bitfield = 0;
+  private static final _Fields optionals[] = {_Fields.TARGET_LOG_LEVEL,_Fields.RESET_LOG_LEVEL_TIMEOUT_SECS,_Fields.RESET_LOG_LEVEL_TIMEOUT_EPOCH,_Fields.RESET_LOG_LEVEL};
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.ACTION, new org.apache.thrift.meta_data.FieldMetaData("action", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, LogLevelAction.class)));
+    tmpMap.put(_Fields.TARGET_LOG_LEVEL, new org.apache.thrift.meta_data.FieldMetaData("target_log_level", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.RESET_LOG_LEVEL_TIMEOUT_SECS, new org.apache.thrift.meta_data.FieldMetaData("reset_log_level_timeout_secs", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+    tmpMap.put(_Fields.RESET_LOG_LEVEL_TIMEOUT_EPOCH, new org.apache.thrift.meta_data.FieldMetaData("reset_log_level_timeout_epoch", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
+    tmpMap.put(_Fields.RESET_LOG_LEVEL, new org.apache.thrift.meta_data.FieldMetaData("reset_log_level", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(LogLevel.class, metaDataMap);
+  }
+
+  public LogLevel() {
+  }
+
+  public LogLevel(
+    LogLevelAction action)
+  {
+    this();
+    this.action = action;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public LogLevel(LogLevel other) {
+    __isset_bitfield = other.__isset_bitfield;
+    if (other.is_set_action()) {
+      this.action = other.action;
+    }
+    if (other.is_set_target_log_level()) {
+      this.target_log_level = other.target_log_level;
+    }
+    this.reset_log_level_timeout_secs = other.reset_log_level_timeout_secs;
+    this.reset_log_level_timeout_epoch = other.reset_log_level_timeout_epoch;
+    if (other.is_set_reset_log_level()) {
+      this.reset_log_level = other.reset_log_level;
+    }
+  }
+
+  public LogLevel deepCopy() {
+    return new LogLevel(this);
+  }
+
+  @Override
+  public void clear() {
+    this.action = null;
+    this.target_log_level = null;
+    set_reset_log_level_timeout_secs_isSet(false);
+    this.reset_log_level_timeout_secs = 0;
+    set_reset_log_level_timeout_epoch_isSet(false);
+    this.reset_log_level_timeout_epoch = 0;
+    this.reset_log_level = null;
+  }
+
+  /**
+   * 
+   * @see LogLevelAction
+   */
+  public LogLevelAction get_action() {
+    return this.action;
+  }
+
+  /**
+   * 
+   * @see LogLevelAction
+   */
+  public void set_action(LogLevelAction action) {
+    this.action = action;
+  }
+
+  public void unset_action() {
+    this.action = null;
+  }
+
+  /** Returns true if field action is set (has been assigned a value) and false otherwise */
+  public boolean is_set_action() {
+    return this.action != null;
+  }
+
+  public void set_action_isSet(boolean value) {
+    if (!value) {
+      this.action = null;
+    }
+  }
+
+  public String get_target_log_level() {
+    return this.target_log_level;
+  }
+
+  public void set_target_log_level(String target_log_level) {
+    this.target_log_level = target_log_level;
+  }
+
+  public void unset_target_log_level() {
+    this.target_log_level = null;
+  }
+
+  /** Returns true if field target_log_level is set (has been assigned a value) and false otherwise */
+  public boolean is_set_target_log_level() {
+    return this.target_log_level != null;
+  }
+
+  public void set_target_log_level_isSet(boolean value) {
+    if (!value) {
+      this.target_log_level = null;
+    }
+  }
+
+  public int get_reset_log_level_timeout_secs() {
+    return this.reset_log_level_timeout_secs;
+  }
+
+  public void set_reset_log_level_timeout_secs(int reset_log_level_timeout_secs) {
+    this.reset_log_level_timeout_secs = reset_log_level_timeout_secs;
+    set_reset_log_level_timeout_secs_isSet(true);
+  }
+
+  public void unset_reset_log_level_timeout_secs() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __RESET_LOG_LEVEL_TIMEOUT_SECS_ISSET_ID);
+  }
+
+  /** Returns true if field reset_log_level_timeout_secs is set (has been assigned a value) and false otherwise */
+  public boolean is_set_reset_log_level_timeout_secs() {
+    return EncodingUtils.testBit(__isset_bitfield, __RESET_LOG_LEVEL_TIMEOUT_SECS_ISSET_ID);
+  }
+
+  public void set_reset_log_level_timeout_secs_isSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __RESET_LOG_LEVEL_TIMEOUT_SECS_ISSET_ID, value);
+  }
+
+  public long get_reset_log_level_timeout_epoch() {
+    return this.reset_log_level_timeout_epoch;
+  }
+
+  public void set_reset_log_level_timeout_epoch(long reset_log_level_timeout_epoch) {
+    this.reset_log_level_timeout_epoch = reset_log_level_timeout_epoch;
+    set_reset_log_level_timeout_epoch_isSet(true);
+  }
+
+  public void unset_reset_log_level_timeout_epoch() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __RESET_LOG_LEVEL_TIMEOUT_EPOCH_ISSET_ID);
+  }
+
+  /** Returns true if field reset_log_level_timeout_epoch is set (has been assigned a value) and false otherwise */
+  public boolean is_set_reset_log_level_timeout_epoch() {
+    return EncodingUtils.testBit(__isset_bitfield, __RESET_LOG_LEVEL_TIMEOUT_EPOCH_ISSET_ID);
+  }
+
+  public void set_reset_log_level_timeout_epoch_isSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __RESET_LOG_LEVEL_TIMEOUT_EPOCH_ISSET_ID, value);
+  }
+
+  public String get_reset_log_level() {
+    return this.reset_log_level;
+  }
+
+  public void set_reset_log_level(String reset_log_level) {
+    this.reset_log_level = reset_log_level;
+  }
+
+  public void unset_reset_log_level() {
+    this.reset_log_level = null;
+  }
+
+  /** Returns true if field reset_log_level is set (has been assigned a value) and false otherwise */
+  public boolean is_set_reset_log_level() {
+    return this.reset_log_level != null;
+  }
+
+  public void set_reset_log_level_isSet(boolean value) {
+    if (!value) {
+      this.reset_log_level = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case ACTION:
+      if (value == null) {
+        unset_action();
+      } else {
+        set_action((LogLevelAction)value);
+      }
+      break;
+
+    case TARGET_LOG_LEVEL:
+      if (value == null) {
+        unset_target_log_level();
+      } else {
+        set_target_log_level((String)value);
+      }
+      break;
+
+    case RESET_LOG_LEVEL_TIMEOUT_SECS:
+      if (value == null) {
+        unset_reset_log_level_timeout_secs();
+      } else {
+        set_reset_log_level_timeout_secs((Integer)value);
+      }
+      break;
+
+    case RESET_LOG_LEVEL_TIMEOUT_EPOCH:
+      if (value == null) {
+        unset_reset_log_level_timeout_epoch();
+      } else {
+        set_reset_log_level_timeout_epoch((Long)value);
+      }
+      break;
+
+    case RESET_LOG_LEVEL:
+      if (value == null) {
+        unset_reset_log_level();
+      } else {
+        set_reset_log_level((String)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case ACTION:
+      return get_action();
+
+    case TARGET_LOG_LEVEL:
+      return get_target_log_level();
+
+    case RESET_LOG_LEVEL_TIMEOUT_SECS:
+      return Integer.valueOf(get_reset_log_level_timeout_secs());
+
+    case RESET_LOG_LEVEL_TIMEOUT_EPOCH:
+      return Long.valueOf(get_reset_log_level_timeout_epoch());
+
+    case RESET_LOG_LEVEL:
+      return get_reset_log_level();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case ACTION:
+      return is_set_action();
+    case TARGET_LOG_LEVEL:
+      return is_set_target_log_level();
+    case RESET_LOG_LEVEL_TIMEOUT_SECS:
+      return is_set_reset_log_level_timeout_secs();
+    case RESET_LOG_LEVEL_TIMEOUT_EPOCH:
+      return is_set_reset_log_level_timeout_epoch();
+    case RESET_LOG_LEVEL:
+      return is_set_reset_log_level();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof LogLevel)
+      return this.equals((LogLevel)that);
+    return false;
+  }
+
+  public boolean equals(LogLevel that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_action = true && this.is_set_action();
+    boolean that_present_action = true && that.is_set_action();
+    if (this_present_action || that_present_action) {
+      if (!(this_present_action && that_present_action))
+        return false;
+      if (!this.action.equals(that.action))
+        return false;
+    }
+
+    boolean this_present_target_log_level = true && this.is_set_target_log_level();
+    boolean that_present_target_log_level = true && that.is_set_target_log_level();
+    if (this_present_target_log_level || that_present_target_log_level) {
+      if (!(this_present_target_log_level && that_present_target_log_level))
+        return false;
+      if (!this.target_log_level.equals(that.target_log_level))
+        return false;
+    }
+
+    boolean this_present_reset_log_level_timeout_secs = true && this.is_set_reset_log_level_timeout_secs();
+    boolean that_present_reset_log_level_timeout_secs = true && that.is_set_reset_log_level_timeout_secs();
+    if (this_present_reset_log_level_timeout_secs || that_present_reset_log_level_timeout_secs) {
+      if (!(this_present_reset_log_level_timeout_secs && that_present_reset_log_level_timeout_secs))
+        return false;
+      if (this.reset_log_level_timeout_secs != that.reset_log_level_timeout_secs)
+        return false;
+    }
+
+    boolean this_present_reset_log_level_timeout_epoch = true && this.is_set_reset_log_level_timeout_epoch();
+    boolean that_present_reset_log_level_timeout_epoch = true && that.is_set_reset_log_level_timeout_epoch();
+    if (this_present_reset_log_level_timeout_epoch || that_present_reset_log_level_timeout_epoch) {
+      if (!(this_present_reset_log_level_timeout_epoch && that_present_reset_log_level_timeout_epoch))
+        return false;
+      if (this.reset_log_level_timeout_epoch != that.reset_log_level_timeout_epoch)
+        return false;
+    }
+
+    boolean this_present_reset_log_level = true && this.is_set_reset_log_level();
+    boolean that_present_reset_log_level = true && that.is_set_reset_log_level();
+    if (this_present_reset_log_level || that_present_reset_log_level) {
+      if (!(this_present_reset_log_level && that_present_reset_log_level))
+        return false;
+      if (!this.reset_log_level.equals(that.reset_log_level))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    List<Object> list = new ArrayList<Object>();
+
+    boolean present_action = true && (is_set_action());
+    list.add(present_action);
+    if (present_action)
+      list.add(action.getValue());
+
+    boolean present_target_log_level = true && (is_set_target_log_level());
+    list.add(present_target_log_level);
+    if (present_target_log_level)
+      list.add(target_log_level);
+
+    boolean present_reset_log_level_timeout_secs = true && (is_set_reset_log_level_timeout_secs());
+    list.add(present_reset_log_level_timeout_secs);
+    if (present_reset_log_level_timeout_secs)
+      list.add(reset_log_level_timeout_secs);
+
+    boolean present_reset_log_level_timeout_epoch = true && (is_set_reset_log_level_timeout_epoch());
+    list.add(present_reset_log_level_timeout_epoch);
+    if (present_reset_log_level_timeout_epoch)
+      list.add(reset_log_level_timeout_epoch);
+
+    boolean present_reset_log_level = true && (is_set_reset_log_level());
+    list.add(present_reset_log_level);
+    if (present_reset_log_level)
+      list.add(reset_log_level);
+
+    return list.hashCode();
+  }
+
+  @Override
+  public int compareTo(LogLevel other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = Boolean.valueOf(is_set_action()).compareTo(other.is_set_action());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_action()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.action, other.action);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(is_set_target_log_level()).compareTo(other.is_set_target_log_level());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_target_log_level()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.target_log_level, other.target_log_level);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(is_set_reset_log_level_timeout_secs()).compareTo(other.is_set_reset_log_level_timeout_secs());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_reset_log_level_timeout_secs()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.reset_log_level_timeout_secs, other.reset_log_level_timeout_secs);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(is_set_reset_log_level_timeout_epoch()).compareTo(other.is_set_reset_log_level_timeout_epoch());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_reset_log_level_timeout_epoch()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.reset_log_level_timeout_epoch, other.reset_log_level_timeout_epoch);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(is_set_reset_log_level()).compareTo(other.is_set_reset_log_level());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (is_set_reset_log_level()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.reset_log_level, other.reset_log_level);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("LogLevel(");
+    boolean first = true;
+
+    sb.append("action:");
+    if (this.action == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.action);
+    }
+    first = false;
+    if (is_set_target_log_level()) {
+      if (!first) sb.append(", ");
+      sb.append("target_log_level:");
+      if (this.target_log_level == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.target_log_level);
+      }
+      first = false;
+    }
+    if (is_set_reset_log_level_timeout_secs()) {
+      if (!first) sb.append(", ");
+      sb.append("reset_log_level_timeout_secs:");
+      sb.append(this.reset_log_level_timeout_secs);
+      first = false;
+    }
+    if (is_set_reset_log_level_timeout_epoch()) {
+      if (!first) sb.append(", ");
+      sb.append("reset_log_level_timeout_epoch:");
+      sb.append(this.reset_log_level_timeout_epoch);
+      first = false;
+    }
+    if (is_set_reset_log_level()) {
+      if (!first) sb.append(", ");
+      sb.append("reset_log_level:");
+      if (this.reset_log_level == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.reset_log_level);
+      }
+      first = false;
+    }
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    if (!is_set_action()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 'action' is unset! Struct:" + toString());
+    }
+
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+      __isset_bitfield = 0;
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class LogLevelStandardSchemeFactory implements SchemeFactory {
+    public LogLevelStandardScheme getScheme() {
+      return new LogLevelStandardScheme();
+    }
+  }
+
+  private static class LogLevelStandardScheme extends StandardScheme<LogLevel> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, LogLevel struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // ACTION
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.action = backtype.storm.generated.LogLevelAction.findByValue(iprot.readI32());
+              struct.set_action_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 2: // TARGET_LOG_LEVEL
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.target_log_level = iprot.readString();
+              struct.set_target_log_level_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 3: // RESET_LOG_LEVEL_TIMEOUT_SECS
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.reset_log_level_timeout_secs = iprot.readI32();
+              struct.set_reset_log_level_timeout_secs_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 4: // RESET_LOG_LEVEL_TIMEOUT_EPOCH
+            if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
+              struct.reset_log_level_timeout_epoch = iprot.readI64();
+              struct.set_reset_log_level_timeout_epoch_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 5: // RESET_LOG_LEVEL
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.reset_log_level = iprot.readString();
+              struct.set_reset_log_level_isSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, LogLevel struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.action != null) {
+        oprot.writeFieldBegin(ACTION_FIELD_DESC);
+        oprot.writeI32(struct.action.getValue());
+        oprot.writeFieldEnd();
+      }
+      if (struct.target_log_level != null) {
+        if (struct.is_set_target_log_level()) {
+          oprot.writeFieldBegin(TARGET_LOG_LEVEL_FIELD_DESC);
+          oprot.writeString(struct.target_log_level);
+          oprot.writeFieldEnd();
+        }
+      }
+      if (struct.is_set_reset_log_level_timeout_secs()) {
+        oprot.writeFieldBegin(RESET_LOG_LEVEL_TIMEOUT_SECS_FIELD_DESC);
+        oprot.writeI32(struct.reset_log_level_timeout_secs);
+        oprot.writeFieldEnd();
+      }
+      if (struct.is_set_reset_log_level_timeout_epoch()) {
+        oprot.writeFieldBegin(RESET_LOG_LEVEL_TIMEOUT_EPOCH_FIELD_DESC);
+        oprot.writeI64(struct.reset_log_level_timeout_epoch);
+        oprot.writeFieldEnd();
+      }
+      if (struct.reset_log_level != null) {
+        if (struct.is_set_reset_log_level()) {
+          oprot.writeFieldBegin(RESET_LOG_LEVEL_FIELD_DESC);
+          oprot.writeString(struct.reset_log_level);
+          oprot.writeFieldEnd();
+        }
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class LogLevelTupleSchemeFactory implements SchemeFactory {
+    public LogLevelTupleScheme getScheme() {
+      return new LogLevelTupleScheme();
+    }
+  }
+
+  private static class LogLevelTupleScheme extends TupleScheme<LogLevel> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, LogLevel struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      oprot.writeI32(struct.action.getValue());
+      BitSet optionals = new BitSet();
+      if (struct.is_set_target_log_level()) {
+        optionals.set(0);
+      }
+      if (struct.is_set_reset_log_level_timeout_secs()) {
+        optionals.set(1);
+      }
+      if (struct.is_set_reset_log_level_timeout_epoch()) {
+        optionals.set(2);
+      }
+      if (struct.is_set_reset_log_level()) {
+        optionals.set(3);
+      }
+      oprot.writeBitSet(optionals, 4);
+      if (struct.is_set_target_log_level()) {
+        oprot.writeString(struct.target_log_level);
+      }
+      if (struct.is_set_reset_log_level_timeout_secs()) {
+        oprot.writeI32(struct.reset_log_level_timeout_secs);
+      }
+      if (struct.is_set_reset_log_level_timeout_epoch()) {
+        oprot.writeI64(struct.reset_log_level_timeout_epoch);
+      }
+      if (struct.is_set_reset_log_level()) {
+        oprot.writeString(struct.reset_log_level);
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, LogLevel struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      struct.action = backtype.storm.generated.LogLevelAction.findByValue(iprot.readI32());
+      struct.set_action_isSet(true);
+      BitSet incoming = iprot.readBitSet(4);
+      if (incoming.get(0)) {
+        struct.target_log_level = iprot.readString();
+        struct.set_target_log_level_isSet(true);
+      }
+      if (incoming.get(1)) {
+        struct.reset_log_level_timeout_secs = iprot.readI32();
+        struct.set_reset_log_level_timeout_secs_isSet(true);
+      }
+      if (incoming.get(2)) {
+        struct.reset_log_level_timeout_epoch = iprot.readI64();
+        struct.set_reset_log_level_timeout_epoch_isSet(true);
+      }
+      if (incoming.get(3)) {
+        struct.reset_log_level = iprot.readString();
+        struct.set_reset_log_level_isSet(true);
+      }
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/storm/blob/d13cfe2d/storm-core/src/jvm/backtype/storm/generated/LogLevelAction.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/LogLevelAction.java b/storm-core/src/jvm/backtype/storm/generated/LogLevelAction.java
new file mode 100644
index 0000000..c649bcf
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/generated/LogLevelAction.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package backtype.storm.generated;
+
+
+import java.util.Map;
+import java.util.HashMap;
+import org.apache.thrift.TEnum;
+
+public enum LogLevelAction implements org.apache.thrift.TEnum {
+  UNCHANGED(1),
+  UPDATE(2),
+  REMOVE(3);
+
+  private final int value;
+
+  private LogLevelAction(int value) {
+    this.value = value;
+  }
+
+  /**
+   * Get the integer value of this enum value, as defined in the Thrift IDL.
+   */
+  public int getValue() {
+    return value;
+  }
+
+  /**
+   * Find a the enum type by its integer value, as defined in the Thrift IDL.
+   * @return null if the value is not found.
+   */
+  public static LogLevelAction findByValue(int value) { 
+    switch (value) {
+      case 1:
+        return UNCHANGED;
+      case 2:
+        return UPDATE;
+      case 3:
+        return REMOVE;
+      default:
+        return null;
+    }
+  }
+}


[4/9] storm git commit: Documentation brief for dynamic log level settings feature

Posted by bo...@apache.org.
Documentation brief for dynamic log level settings feature


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/087f08ea
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/087f08ea
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/087f08ea

Branch: refs/heads/master
Commit: 087f08ea314df2055bce61abbb54cc7443da41a9
Parents: 250ab11
Author: Alessandro Bellina <ab...@yahoo-inc.com>
Authored: Fri Apr 10 12:07:22 2015 -0500
Committer: Kishor Patil <kp...@yahoo-inc.com>
Committed: Mon Sep 28 15:57:37 2015 -0500

----------------------------------------------------------------------
 docs/DYNAMIC_LOG_LEVEL_SETTINGS.md           |  41 ++++++++++++++++++++++
 docs/images/dynamic_log_level_settings_1.png | Bin 0 -> 93689 bytes
 docs/images/dynamic_log_level_settings_2.png | Bin 0 -> 78785 bytes
 3 files changed, 41 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/087f08ea/docs/DYNAMIC_LOG_LEVEL_SETTINGS.md
----------------------------------------------------------------------
diff --git a/docs/DYNAMIC_LOG_LEVEL_SETTINGS.md b/docs/DYNAMIC_LOG_LEVEL_SETTINGS.md
new file mode 100644
index 0000000..f38b708
--- /dev/null
+++ b/docs/DYNAMIC_LOG_LEVEL_SETTINGS.md
@@ -0,0 +1,41 @@
+Dynamic Log Level Settings
+==========================
+
+We have added the ability to set log level settings for a running topology using the Storm UI and the Storm CLI. 
+
+The log level settings apply the same way as you'd expect from log4j, as all we are doing is telling log4j to set the level of the logger you provide. If you set the log level of a parent logger, the children loggers start using that level (unless the children have a more restrictive level already). A timeout can optionally be provided (except for DEBUG mode, where it’s required in the UI), if workers should reset log levels automatically.
+
+This revert action is triggered using a polling mechanism (every 30 seconds, but this is configurable), so you should expect your timeouts to be the value you provided plus anywhere between 0 and the setting's value.
+
+Using the Storm UI
+-------------
+
+In order to set a level, click on a running topology, and then click on “Change Log Level” in the Topology Actions section.
+
+![Change Log Level dialog](images/dynamic_log_level_settings_1.png "Change Log Level dialog")
+
+Next, provide the logger name, select the level you expect (e.g. WARN), and a timeout in seconds (or 0 if not needed). Then click on “Add”.
+
+![After adding a log level setting](images/dynamic_log_level_settings_2.png "After adding a log level setting")
+
+To clear the log level click on the “Clear” button. This reverts the log level back to what it was before you added the setting. The log level line will disappear from the UI.
+
+While there is a delay resetting log levels back, setting the log level in the first place is immediate (or as quickly as the message can travel from the UI/CLI to the workers by way of nimbus and zookeeper).
+
+Using the CLI
+-------------
+
+Using the CLI, issue the command:
+
+`./bin/storm set_log_level [topology name] -l [logger name]=[LEVEL]:[TIMEOUT]`
+
+For example:
+
+`./bin/storm set_log_level my_topology -l ROOT=DEBUG:30`
+
+Sets the ROOT logger to DEBUG for 30 seconds.
+
+`./bin/storm set_log_level my_topology -r ROOT`
+
+Clears the ROOT logger dynamic log level, resetting it to its original value.
+

http://git-wip-us.apache.org/repos/asf/storm/blob/087f08ea/docs/images/dynamic_log_level_settings_1.png
----------------------------------------------------------------------
diff --git a/docs/images/dynamic_log_level_settings_1.png b/docs/images/dynamic_log_level_settings_1.png
new file mode 100644
index 0000000..71d42e7
Binary files /dev/null and b/docs/images/dynamic_log_level_settings_1.png differ

http://git-wip-us.apache.org/repos/asf/storm/blob/087f08ea/docs/images/dynamic_log_level_settings_2.png
----------------------------------------------------------------------
diff --git a/docs/images/dynamic_log_level_settings_2.png b/docs/images/dynamic_log_level_settings_2.png
new file mode 100644
index 0000000..d0e61a7
Binary files /dev/null and b/docs/images/dynamic_log_level_settings_2.png differ


[5/9] storm git commit: Minor fix to lower scope on nimbus client call

Posted by bo...@apache.org.
Minor fix to lower scope on nimbus client call


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/51c32531
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/51c32531
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/51c32531

Branch: refs/heads/master
Commit: 51c32531ae72adc56d071745677e375326b50fe8
Parents: 087f08e
Author: Kishor Patil <kp...@yahoo-inc.com>
Authored: Tue Sep 29 09:31:25 2015 -0500
Committer: Kishor Patil <kp...@yahoo-inc.com>
Committed: Tue Sep 29 09:31:25 2015 -0500

----------------------------------------------------------------------
 .../backtype/storm/command/set_log_level.clj    | 24 ++++++++++----------
 1 file changed, 12 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/51c32531/storm-core/src/clj/backtype/storm/command/set_log_level.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/command/set_log_level.clj b/storm-core/src/clj/backtype/storm/command/set_log_level.clj
index 3b50c3a..88b297d 100644
--- a/storm-core/src/clj/backtype/storm/command/set_log_level.clj
+++ b/storm-core/src/clj/backtype/storm/command/set_log_level.clj
@@ -59,17 +59,17 @@
          (merge oldval val)
          val)))
 
-(defn -main [& args] 
+(defn -main [& args]
   (let [[{log-setting :log-setting remove-log-setting :remove-log-setting} [name] _]
-          (cli args ["-l" "--log-setting"
-                        :parse-fn (parse-named-log-levels LogLevelAction/UPDATE)
-                        :assoc-fn merge-together]
-                    ["-r" "--remove-log-setting"
-                        :parse-fn (parse-named-log-levels LogLevelAction/REMOVE)
-                        :assoc-fn merge-together])]
+        (cli args ["-l" "--log-setting"
+                   :parse-fn (parse-named-log-levels LogLevelAction/UPDATE)
+                   :assoc-fn merge-together]
+                  ["-r" "--remove-log-setting"
+                   :parse-fn (parse-named-log-levels LogLevelAction/REMOVE)
+                   :assoc-fn merge-together])
+        log-config (LogConfig.)]
+    (doseq [[log-name log-val] (merge log-setting remove-log-setting)]
+      (.put_to_named_logger_level log-config log-name log-val))
+    (log-message "Sent log config " log-config " for topology " name)
     (with-configured-nimbus-connection nimbus
-      (let [log-config (LogConfig.)]
-        (doseq [[log-name log-val] (merge log-setting remove-log-setting)]
-          (.put_to_named_logger_level log-config log-name log-val))
-        (log-message "Sent log config " log-config " for topology " name)
-        (.setLogConfig nimbus (get-storm-id nimbus name) log-config)))))
+      (.setLogConfig nimbus (get-storm-id nimbus name) log-config))))